Split main.rs: extract HTTP handlers into src/handlers/
main.rs drops from 2935 → 1200 lines, freed for startup wiring + the watcher. The 16 route handlers move into three domain-grouped files under src/handlers/: - handlers/favorites.rs (128 lines): favorites, put_add_favorite, delete_favorite. - handlers/video.rs (665 lines): generate_video, stream_video, get_video_part, get_video_preview, get_preview_status. The 5 pre-existing get_preview_status integration tests move with the handler (still pass against TestPreviewDao + AppState::test_state). - handlers/image.rs (1003 lines): get_image (with the hash/library-scoped/bare-legacy thumb lookup), upload_image, get_file_metadata, set_image_gps, get_full_exif, set_image_date, clear_image_date. Helpers (create_circular_thumbnail, build_metadata_response_for_date_mutation) and request structs (SetGpsRequest, SetDateRequest, ClearDateRequest, UploadQuery) travel with them. main.rs's import block shrinks from ~50 lines to ~22 as everything HTTP-specific (NamedFile, mp::Multipart, BytesMut, Span, KeyValue, StreamExt, …) moves with the handlers. The is_video_file wrapper also goes — remaining callers in watch_files / cleanup use file_types::is_video_file directly. cargo test --bin image-api: 325 passing (no regression). Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
128
src/handlers/favorites.rs
Normal file
128
src/handlers/favorites.rs
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
//! User-favorites endpoints. Favorites are keyed on `(user_id, rel_path)`
|
||||||
|
//! and shared across libraries — a favorite created in lib1 is visible
|
||||||
|
//! under lib2 if the same rel_path resolves there too.
|
||||||
|
|
||||||
|
use std::sync::Mutex;
|
||||||
|
|
||||||
|
use actix_web::{
|
||||||
|
HttpRequest, HttpResponse, Responder, delete, get, put,
|
||||||
|
web::{self, Data},
|
||||||
|
};
|
||||||
|
use log::{error, info, warn};
|
||||||
|
use opentelemetry::trace::{Span, Status, Tracer};
|
||||||
|
|
||||||
|
use crate::data::{AddFavoriteRequest, Claims, PhotosResponse};
|
||||||
|
use crate::database::{DbError, DbErrorKind, FavoriteDao};
|
||||||
|
use crate::otel::{extract_context_from_request, global_tracer};
|
||||||
|
|
||||||
|
#[get("image/favorites")]
|
||||||
|
pub async fn favorites(
|
||||||
|
claims: Claims,
|
||||||
|
request: HttpRequest,
|
||||||
|
favorites_dao: Data<Mutex<Box<dyn FavoriteDao>>>,
|
||||||
|
) -> impl Responder {
|
||||||
|
let tracer = global_tracer();
|
||||||
|
let context = extract_context_from_request(&request);
|
||||||
|
let mut span = tracer.start_with_context("get favorites", &context);
|
||||||
|
|
||||||
|
match web::block(move || {
|
||||||
|
favorites_dao
|
||||||
|
.lock()
|
||||||
|
.expect("Unable to get FavoritesDao")
|
||||||
|
.get_favorites(claims.sub.parse::<i32>().unwrap())
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(Ok(favorites)) => {
|
||||||
|
let favorites = favorites
|
||||||
|
.into_iter()
|
||||||
|
.map(|favorite| favorite.path)
|
||||||
|
.collect::<Vec<String>>();
|
||||||
|
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
// Favorites are library-agnostic (shared by rel_path), so we
|
||||||
|
// intentionally leave photo_libraries empty to signal "no badge".
|
||||||
|
HttpResponse::Ok().json(PhotosResponse {
|
||||||
|
photos: favorites,
|
||||||
|
dirs: Vec::new(),
|
||||||
|
photo_libraries: Vec::new(),
|
||||||
|
total_count: None,
|
||||||
|
has_more: None,
|
||||||
|
next_offset: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Ok(Err(e)) => {
|
||||||
|
span.set_status(Status::error(format!("Error getting favorites: {:?}", e)));
|
||||||
|
error!("Error getting favorites: {:?}", e);
|
||||||
|
HttpResponse::InternalServerError().finish()
|
||||||
|
}
|
||||||
|
Err(_) => HttpResponse::InternalServerError().finish(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[put("image/favorites")]
|
||||||
|
pub async fn put_add_favorite(
|
||||||
|
claims: Claims,
|
||||||
|
body: web::Json<AddFavoriteRequest>,
|
||||||
|
favorites_dao: Data<Mutex<Box<dyn FavoriteDao>>>,
|
||||||
|
) -> impl Responder {
|
||||||
|
if let Ok(user_id) = claims.sub.parse::<i32>() {
|
||||||
|
let path = body.path.clone();
|
||||||
|
match web::block::<_, Result<usize, DbError>>(move || {
|
||||||
|
favorites_dao
|
||||||
|
.lock()
|
||||||
|
.expect("Unable to get FavoritesDao")
|
||||||
|
.add_favorite(user_id, &path)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(Err(e)) if e.kind == DbErrorKind::AlreadyExists => {
|
||||||
|
warn!("Favorite: {} exists for user: {}", &body.path, user_id);
|
||||||
|
HttpResponse::Ok()
|
||||||
|
}
|
||||||
|
Ok(Err(e)) => {
|
||||||
|
error!("{:?} {}. for user: {}", e, body.path, user_id);
|
||||||
|
HttpResponse::BadRequest()
|
||||||
|
}
|
||||||
|
Ok(Ok(_)) => {
|
||||||
|
info!("Adding favorite \"{}\" for userid: {}", body.path, user_id);
|
||||||
|
HttpResponse::Created()
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!("Blocking error while inserting favorite: {:?}", e);
|
||||||
|
HttpResponse::InternalServerError()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
error!("Unable to parse sub as i32: {}", claims.sub);
|
||||||
|
HttpResponse::BadRequest()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[delete("image/favorites")]
|
||||||
|
pub async fn delete_favorite(
|
||||||
|
claims: Claims,
|
||||||
|
body: web::Query<AddFavoriteRequest>,
|
||||||
|
favorites_dao: Data<Mutex<Box<dyn FavoriteDao>>>,
|
||||||
|
) -> impl Responder {
|
||||||
|
if let Ok(user_id) = claims.sub.parse::<i32>() {
|
||||||
|
let path = body.path.clone();
|
||||||
|
web::block(move || {
|
||||||
|
favorites_dao
|
||||||
|
.lock()
|
||||||
|
.expect("Unable to get favorites dao")
|
||||||
|
.remove_favorite(user_id, path);
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"Removing favorite \"{}\" for userid: {}",
|
||||||
|
body.path, user_id
|
||||||
|
);
|
||||||
|
HttpResponse::Ok()
|
||||||
|
} else {
|
||||||
|
error!("Unable to parse sub as i32: {}", claims.sub);
|
||||||
|
HttpResponse::BadRequest()
|
||||||
|
}
|
||||||
|
}
|
||||||
999
src/handlers/image.rs
Normal file
999
src/handlers/image.rs
Normal file
@@ -0,0 +1,999 @@
|
|||||||
|
//! `/image*` endpoints: image serving (with hash/library-scoped/bare
|
||||||
|
//! legacy thumbnail lookup), upload, EXIF metadata read + GPS / date
|
||||||
|
//! mutation, and the full exiftool dump used by Apollo's details modal.
|
||||||
|
|
||||||
|
use std::error::Error;
|
||||||
|
use std::fs::File;
|
||||||
|
use std::io::ErrorKind;
|
||||||
|
use std::io::prelude::*;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::sync::Mutex;
|
||||||
|
|
||||||
|
use actix_files::NamedFile;
|
||||||
|
use actix_multipart as mp;
|
||||||
|
use actix_web::{
|
||||||
|
HttpRequest, HttpResponse, Responder, get, post,
|
||||||
|
web::{self, BufMut, BytesMut, Data},
|
||||||
|
};
|
||||||
|
use chrono::Utc;
|
||||||
|
use futures::stream::StreamExt;
|
||||||
|
use log::{debug, error, info, trace, warn};
|
||||||
|
use opentelemetry::KeyValue;
|
||||||
|
use opentelemetry::trace::{Span, Status, TraceContextExt, Tracer};
|
||||||
|
use urlencoding::decode;
|
||||||
|
|
||||||
|
use crate::content_hash;
|
||||||
|
use crate::data::{
|
||||||
|
Claims, MetadataResponse, PhotoSize, ThumbnailFormat, ThumbnailRequest, ThumbnailShape,
|
||||||
|
};
|
||||||
|
use crate::database::models::{ImageExif, InsertImageExif};
|
||||||
|
use crate::database::{DbErrorKind, ExifDao};
|
||||||
|
use crate::date_resolver;
|
||||||
|
use crate::exif;
|
||||||
|
use crate::file_types;
|
||||||
|
use crate::files::{RefreshThumbnailsMessage, is_image_or_video, is_valid_full_path};
|
||||||
|
use crate::libraries;
|
||||||
|
use crate::memories;
|
||||||
|
use crate::otel::{extract_context_from_request, global_tracer};
|
||||||
|
use crate::perceptual_hash;
|
||||||
|
use crate::state::AppState;
|
||||||
|
|
||||||
|
#[get("/image")]
|
||||||
|
pub async fn get_image(
|
||||||
|
_claims: Claims,
|
||||||
|
request: HttpRequest,
|
||||||
|
req: web::Query<ThumbnailRequest>,
|
||||||
|
app_state: Data<AppState>,
|
||||||
|
exif_dao: Data<Mutex<Box<dyn ExifDao>>>,
|
||||||
|
) -> impl Responder {
|
||||||
|
let tracer = global_tracer();
|
||||||
|
let context = extract_context_from_request(&request);
|
||||||
|
|
||||||
|
let mut span = tracer.start_with_context("get_image", &context);
|
||||||
|
|
||||||
|
// Resolve library from query param; default to primary so clients that
|
||||||
|
// don't yet send `library=` continue to work.
|
||||||
|
let library = match libraries::resolve_library_param(&app_state, req.library.as_deref()) {
|
||||||
|
Ok(Some(lib)) => lib,
|
||||||
|
Ok(None) => app_state.primary_library(),
|
||||||
|
Err(msg) => {
|
||||||
|
span.set_status(Status::error(msg.clone()));
|
||||||
|
return HttpResponse::BadRequest().body(msg);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Union-mode search returns flat rel_paths with no library attribution,
|
||||||
|
// so clients may request a file under the wrong library. Try the
|
||||||
|
// resolved library first; if the file isn't there, fall back to any
|
||||||
|
// other library holding that rel_path on disk.
|
||||||
|
let resolved = is_valid_full_path(&library.root_path, &req.path, false)
|
||||||
|
.filter(|p| p.exists())
|
||||||
|
.map(|p| (library, p))
|
||||||
|
.or_else(|| {
|
||||||
|
app_state.libraries.iter().find_map(|lib| {
|
||||||
|
if lib.id == library.id {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
is_valid_full_path(&lib.root_path, &req.path, false)
|
||||||
|
.filter(|p| p.exists())
|
||||||
|
.map(|p| (lib, p))
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Some((library, path)) = resolved {
|
||||||
|
let image_size = req.size.unwrap_or(PhotoSize::Full);
|
||||||
|
if image_size == PhotoSize::Thumb {
|
||||||
|
let relative_path = path
|
||||||
|
.strip_prefix(&library.root_path)
|
||||||
|
.expect("Error stripping library root prefix from thumbnail");
|
||||||
|
let relative_path_str = relative_path.to_string_lossy().replace('\\', "/");
|
||||||
|
|
||||||
|
let thumbs = &app_state.thumbnail_path;
|
||||||
|
let bare_legacy_thumb_path = Path::new(&thumbs).join(relative_path);
|
||||||
|
let scoped_legacy_thumb_path = content_hash::library_scoped_legacy_path(
|
||||||
|
Path::new(&thumbs),
|
||||||
|
library.id,
|
||||||
|
relative_path,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Gif thumbnails are a separate lookup (video GIF previews).
|
||||||
|
// Dual-lookup for gif is out of scope; preserve existing flow.
|
||||||
|
if req.format == Some(ThumbnailFormat::Gif) && file_types::is_video_file(&path) {
|
||||||
|
let mut gif_path = Path::new(&app_state.gif_path).join(relative_path);
|
||||||
|
gif_path.set_extension("gif");
|
||||||
|
trace!("Gif thumbnail path: {:?}", gif_path);
|
||||||
|
if let Ok(file) = NamedFile::open(&gif_path) {
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
return file
|
||||||
|
.use_etag(true)
|
||||||
|
.use_last_modified(true)
|
||||||
|
.prefer_utf8(true)
|
||||||
|
.into_response(&request);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Lookup chain (most-specific first, falling back as we miss):
|
||||||
|
// 1. hash-keyed (`<thumbs>/<hash[..2]>/<hash>.jpg`) — content
|
||||||
|
// identity, shared across libraries;
|
||||||
|
// 2. library-scoped legacy (`<thumbs>/<lib_id>/<rel_path>`) —
|
||||||
|
// written by current generation when hash isn't known;
|
||||||
|
// 3. bare legacy (`<thumbs>/<rel_path>`) — pre-multi-library
|
||||||
|
// thumbs from the days before library prefixing existed.
|
||||||
|
// Stage (3) goes away once a one-time migration lifts every
|
||||||
|
// bare-legacy file under a library prefix; until then it
|
||||||
|
// prevents needless 404s for already-warmed deployments.
|
||||||
|
let hash_thumb_path: Option<PathBuf> = {
|
||||||
|
let mut dao = exif_dao.lock().expect("Unable to lock ExifDao");
|
||||||
|
match dao.get_exif(&context, &relative_path_str) {
|
||||||
|
Ok(Some(row)) => row
|
||||||
|
.content_hash
|
||||||
|
.as_deref()
|
||||||
|
.map(|h| content_hash::thumbnail_path(Path::new(thumbs), h)),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let thumb_path = hash_thumb_path
|
||||||
|
.as_ref()
|
||||||
|
.filter(|p| p.exists())
|
||||||
|
.cloned()
|
||||||
|
.or_else(|| {
|
||||||
|
if scoped_legacy_thumb_path.exists() {
|
||||||
|
Some(scoped_legacy_thumb_path.clone())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.unwrap_or_else(|| bare_legacy_thumb_path.clone());
|
||||||
|
|
||||||
|
// Handle circular thumbnail request
|
||||||
|
if req.shape == Some(ThumbnailShape::Circle) {
|
||||||
|
match create_circular_thumbnail(&thumb_path, thumbs).await {
|
||||||
|
Ok(circular_path) => {
|
||||||
|
if let Ok(file) = NamedFile::open(&circular_path) {
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
return file
|
||||||
|
.use_etag(true)
|
||||||
|
.use_last_modified(true)
|
||||||
|
.prefer_utf8(true)
|
||||||
|
.into_response(&request);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn!("Failed to create circular thumbnail: {:?}", e);
|
||||||
|
// Fall through to serve square thumbnail
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
trace!("Thumbnail path: {:?}", thumb_path);
|
||||||
|
if let Ok(file) = NamedFile::open(&thumb_path) {
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
return file
|
||||||
|
.use_etag(true)
|
||||||
|
.use_last_modified(true)
|
||||||
|
.prefer_utf8(true)
|
||||||
|
.into_response(&request);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Full-size requests for RAW formats (NEF/CR2/ARW/etc.) can't just
|
||||||
|
// NamedFile-stream the original bytes — browsers won't decode the
|
||||||
|
// RAW container, so a `<img src=...>` lands as a broken image. Serve
|
||||||
|
// the embedded JPEG preview instead (typically the camera's in-body
|
||||||
|
// review JPEG, ~1–2 MP). Falls through to NamedFile if no preview is
|
||||||
|
// available, which preserves the historical behavior for callers
|
||||||
|
// that genuinely want the original bytes.
|
||||||
|
if image_size == PhotoSize::Full && exif::is_tiff_raw(&path) {
|
||||||
|
if let Some(preview) = exif::extract_embedded_jpeg_preview(&path) {
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
return HttpResponse::Ok()
|
||||||
|
.content_type("image/jpeg")
|
||||||
|
.insert_header(("Cache-Control", "public, max-age=3600"))
|
||||||
|
.body(preview);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(file) = NamedFile::open(&path) {
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
// Enable ETag and set cache headers for full images (1 hour cache)
|
||||||
|
return file
|
||||||
|
.use_etag(true)
|
||||||
|
.use_last_modified(true)
|
||||||
|
.prefer_utf8(true)
|
||||||
|
.into_response(&request);
|
||||||
|
}
|
||||||
|
|
||||||
|
span.set_status(Status::error("Not found"));
|
||||||
|
HttpResponse::NotFound().finish()
|
||||||
|
} else {
|
||||||
|
span.set_status(Status::error("Not found"));
|
||||||
|
error!("Path does not exist in any library: {}", req.path);
|
||||||
|
HttpResponse::NotFound().finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn create_circular_thumbnail(
|
||||||
|
thumb_path: &Path,
|
||||||
|
thumbs_dir: &str,
|
||||||
|
) -> Result<PathBuf, Box<dyn Error>> {
|
||||||
|
use image::{GenericImageView, ImageBuffer, Rgba};
|
||||||
|
|
||||||
|
// Create circular thumbnails directory
|
||||||
|
let circular_dir = Path::new(thumbs_dir).join("_circular");
|
||||||
|
|
||||||
|
// Get relative path from thumbs_dir to create same structure
|
||||||
|
let relative_to_thumbs = thumb_path.strip_prefix(thumbs_dir)?;
|
||||||
|
let circular_path = circular_dir.join(relative_to_thumbs).with_extension("png");
|
||||||
|
|
||||||
|
// Check if circular thumbnail already exists
|
||||||
|
if circular_path.exists() {
|
||||||
|
return Ok(circular_path);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create parent directory if needed
|
||||||
|
if let Some(parent) = circular_path.parent() {
|
||||||
|
std::fs::create_dir_all(parent)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load the square thumbnail
|
||||||
|
let img = image::open(thumb_path)?;
|
||||||
|
let (width, height) = img.dimensions();
|
||||||
|
|
||||||
|
// Fixed output size for consistency
|
||||||
|
let output_size = 80u32;
|
||||||
|
let radius = output_size as f32 / 2.0;
|
||||||
|
|
||||||
|
// Calculate crop area to get square center of original image
|
||||||
|
let crop_size = width.min(height);
|
||||||
|
let crop_x = (width - crop_size) / 2;
|
||||||
|
let crop_y = (height - crop_size) / 2;
|
||||||
|
|
||||||
|
// Create a new RGBA image with transparency
|
||||||
|
let output = ImageBuffer::from_fn(output_size, output_size, |x, y| {
|
||||||
|
let dx = x as f32 - radius;
|
||||||
|
let dy = y as f32 - radius;
|
||||||
|
let distance = (dx * dx + dy * dy).sqrt();
|
||||||
|
|
||||||
|
if distance <= radius {
|
||||||
|
// Inside circle - map to cropped source area
|
||||||
|
// Scale from output coordinates to crop coordinates
|
||||||
|
let scale = crop_size as f32 / output_size as f32;
|
||||||
|
let src_x = crop_x + (x as f32 * scale) as u32;
|
||||||
|
let src_y = crop_y + (y as f32 * scale) as u32;
|
||||||
|
let pixel = img.get_pixel(src_x, src_y);
|
||||||
|
Rgba([pixel[0], pixel[1], pixel[2], 255])
|
||||||
|
} else {
|
||||||
|
// Outside circle - transparent
|
||||||
|
Rgba([0, 0, 0, 0])
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Save as PNG (supports transparency)
|
||||||
|
output.save(&circular_path)?;
|
||||||
|
|
||||||
|
Ok(circular_path)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/image/metadata")]
|
||||||
|
pub async fn get_file_metadata(
|
||||||
|
_: Claims,
|
||||||
|
request: HttpRequest,
|
||||||
|
path: web::Query<ThumbnailRequest>,
|
||||||
|
app_state: Data<AppState>,
|
||||||
|
exif_dao: Data<Mutex<Box<dyn ExifDao>>>,
|
||||||
|
) -> impl Responder {
|
||||||
|
let tracer = global_tracer();
|
||||||
|
let context = extract_context_from_request(&request);
|
||||||
|
let mut span = tracer.start_with_context("get_file_metadata", &context);
|
||||||
|
let span_context =
|
||||||
|
opentelemetry::Context::new().with_remote_span_context(span.span_context().clone());
|
||||||
|
|
||||||
|
let library = libraries::resolve_library_param(&app_state, path.library.as_deref())
|
||||||
|
.ok()
|
||||||
|
.flatten()
|
||||||
|
.unwrap_or_else(|| app_state.primary_library());
|
||||||
|
|
||||||
|
// Fall back to other libraries if the file isn't under the resolved one,
|
||||||
|
// matching the `/image` handler so union-mode search results resolve.
|
||||||
|
let resolved = is_valid_full_path(&library.root_path, &path.path, false)
|
||||||
|
.filter(|p| p.exists())
|
||||||
|
.map(|p| (library, p))
|
||||||
|
.or_else(|| {
|
||||||
|
app_state.libraries.iter().find_map(|lib| {
|
||||||
|
if lib.id == library.id {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
is_valid_full_path(&lib.root_path, &path.path, false)
|
||||||
|
.filter(|p| p.exists())
|
||||||
|
.map(|p| (lib, p))
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
match resolved
|
||||||
|
.ok_or_else(|| ErrorKind::InvalidData.into())
|
||||||
|
.and_then(|(lib, full_path)| {
|
||||||
|
File::open(&full_path)
|
||||||
|
.and_then(|file| file.metadata())
|
||||||
|
.map(|metadata| (lib, metadata))
|
||||||
|
}) {
|
||||||
|
Ok((resolved_library, metadata)) => {
|
||||||
|
let mut response: MetadataResponse = metadata.into();
|
||||||
|
response.library_id = Some(resolved_library.id);
|
||||||
|
response.library_name = Some(resolved_library.name.clone());
|
||||||
|
|
||||||
|
// Extract date from filename if possible
|
||||||
|
response.filename_date =
|
||||||
|
memories::extract_date_from_filename(&path.path).map(|dt| dt.timestamp());
|
||||||
|
|
||||||
|
// Query EXIF data if available
|
||||||
|
if let Ok(mut dao) = exif_dao.lock()
|
||||||
|
&& let Ok(Some(exif)) = dao.get_exif(&span_context, &path.path)
|
||||||
|
{
|
||||||
|
response.exif = Some(exif.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
span.add_event(
|
||||||
|
"Metadata fetched",
|
||||||
|
vec![KeyValue::new("file", path.path.clone())],
|
||||||
|
);
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
|
||||||
|
HttpResponse::Ok().json(response)
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
let message = format!("Error getting metadata for file '{}': {:?}", path.path, e);
|
||||||
|
error!("{}", message);
|
||||||
|
span.set_status(Status::error(message));
|
||||||
|
|
||||||
|
HttpResponse::InternalServerError().finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Body for `POST /image/exif/gps` — write GPS coordinates into a file's
|
||||||
|
/// EXIF in place. Only `path` + `latitude` + `longitude` are required.
|
||||||
|
/// `library` is optional (falls back to the primary library) and matches
|
||||||
|
/// the convention of the other path-keyed routes.
|
||||||
|
#[derive(serde::Deserialize)]
|
||||||
|
struct SetGpsRequest {
|
||||||
|
path: String,
|
||||||
|
library: Option<String>,
|
||||||
|
latitude: f64,
|
||||||
|
longitude: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/image/exif/gps")]
|
||||||
|
pub async fn set_image_gps(
|
||||||
|
_: Claims,
|
||||||
|
request: HttpRequest,
|
||||||
|
body: web::Json<SetGpsRequest>,
|
||||||
|
app_state: Data<AppState>,
|
||||||
|
exif_dao: Data<Mutex<Box<dyn ExifDao>>>,
|
||||||
|
) -> impl Responder {
|
||||||
|
let tracer = global_tracer();
|
||||||
|
let context = extract_context_from_request(&request);
|
||||||
|
let mut span = tracer.start_with_context("set_image_gps", &context);
|
||||||
|
let span_context =
|
||||||
|
opentelemetry::Context::new().with_remote_span_context(span.span_context().clone());
|
||||||
|
|
||||||
|
let library = libraries::resolve_library_param(&app_state, body.library.as_deref())
|
||||||
|
.ok()
|
||||||
|
.flatten()
|
||||||
|
.unwrap_or_else(|| app_state.primary_library());
|
||||||
|
|
||||||
|
// Same fallback as get_file_metadata: union-mode means a file may
|
||||||
|
// resolve under a sibling library.
|
||||||
|
let resolved = is_valid_full_path(&library.root_path, &body.path, false)
|
||||||
|
.filter(|p| p.exists())
|
||||||
|
.map(|p| (library, p))
|
||||||
|
.or_else(|| {
|
||||||
|
app_state.libraries.iter().find_map(|lib| {
|
||||||
|
if lib.id == library.id {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
is_valid_full_path(&lib.root_path, &body.path, false)
|
||||||
|
.filter(|p| p.exists())
|
||||||
|
.map(|p| (lib, p))
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
let (resolved_library, full_path) = match resolved {
|
||||||
|
Some(v) => v,
|
||||||
|
None => {
|
||||||
|
span.set_status(Status::error("file not found"));
|
||||||
|
return HttpResponse::NotFound().body("File not found");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if !exif::supports_exif(&full_path) {
|
||||||
|
return HttpResponse::BadRequest().body("File format does not support EXIF GPS write");
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Err(e) = exif::write_gps(&full_path, body.latitude, body.longitude) {
|
||||||
|
let msg = format!("exiftool write failed: {}", e);
|
||||||
|
error!("{}", msg);
|
||||||
|
span.set_status(Status::error(msg.clone()));
|
||||||
|
return HttpResponse::InternalServerError().body(msg);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-read EXIF from disk (the write path doesn't tell us the rest of
|
||||||
|
// the parsed fields back, and we want the DB row to match what
|
||||||
|
// extract_exif_from_path would now produce). Update the existing row
|
||||||
|
// rather than insert — this endpoint is invoked on already-indexed
|
||||||
|
// files only.
|
||||||
|
let extracted = match exif::extract_exif_from_path(&full_path) {
|
||||||
|
Ok(d) => d,
|
||||||
|
Err(e) => {
|
||||||
|
// GPS was written successfully but re-extraction failed; surface
|
||||||
|
// a 500 because the DB will now disagree with disk until the
|
||||||
|
// next file scan rewrites it.
|
||||||
|
let msg = format!("EXIF re-read failed after write: {}", e);
|
||||||
|
error!("{}", msg);
|
||||||
|
return HttpResponse::InternalServerError().body(msg);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let now = Utc::now().timestamp();
|
||||||
|
let normalized_path = body.path.replace('\\', "/");
|
||||||
|
// Re-run the canonical-date waterfall on every GPS write — exiftool
|
||||||
|
// writing GPS doesn't change the capture date, but if the row was
|
||||||
|
// previously sourced from `fs_time` the re-read may have given us a
|
||||||
|
// real EXIF date this time, and we want to upgrade the source.
|
||||||
|
let resolved_date = date_resolver::resolve_date_taken(&full_path, extracted.date_taken);
|
||||||
|
let insert_exif = InsertImageExif {
|
||||||
|
library_id: resolved_library.id,
|
||||||
|
file_path: normalized_path.clone(),
|
||||||
|
camera_make: extracted.camera_make,
|
||||||
|
camera_model: extracted.camera_model,
|
||||||
|
lens_model: extracted.lens_model,
|
||||||
|
width: extracted.width,
|
||||||
|
height: extracted.height,
|
||||||
|
orientation: extracted.orientation,
|
||||||
|
gps_latitude: extracted.gps_latitude.map(|v| v as f32),
|
||||||
|
gps_longitude: extracted.gps_longitude.map(|v| v as f32),
|
||||||
|
gps_altitude: extracted.gps_altitude.map(|v| v as f32),
|
||||||
|
focal_length: extracted.focal_length.map(|v| v as f32),
|
||||||
|
aperture: extracted.aperture.map(|v| v as f32),
|
||||||
|
shutter_speed: extracted.shutter_speed,
|
||||||
|
iso: extracted.iso,
|
||||||
|
date_taken: resolved_date.map(|r| r.timestamp),
|
||||||
|
// Created_time is preserved by update_exif (it doesn't touch the
|
||||||
|
// column); pass any int — it's ignored in the UPDATE statement.
|
||||||
|
created_time: now,
|
||||||
|
last_modified: now,
|
||||||
|
// Hash + size aren't touched in update_exif either, but the file
|
||||||
|
// bytes did change — best-effort recompute so the new hash lands
|
||||||
|
// on the next call to get_exif. Failure here just leaves the old
|
||||||
|
// values in place.
|
||||||
|
content_hash: content_hash::compute(&full_path)
|
||||||
|
.ok()
|
||||||
|
.map(|c| c.content_hash),
|
||||||
|
size_bytes: content_hash::compute(&full_path).ok().map(|c| c.size_bytes),
|
||||||
|
// GPS-update path doesn't touch perceptual hashes either; columns
|
||||||
|
// ignored by update_exif. Compute best-effort so a new file lands
|
||||||
|
// with a usable signal; failure just leaves prior values in place.
|
||||||
|
phash_64: perceptual_hash::compute(&full_path).map(|h| h.phash_64),
|
||||||
|
dhash_64: perceptual_hash::compute(&full_path).map(|h| h.dhash_64),
|
||||||
|
date_taken_source: resolved_date.map(|r| r.source.as_str().to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let updated = {
|
||||||
|
let mut dao = exif_dao.lock().expect("Unable to lock ExifDao");
|
||||||
|
// If the row doesn't exist yet (file isn't indexed for some reason),
|
||||||
|
// insert instead so the GPS write is at least visible the moment
|
||||||
|
// the watcher catches up.
|
||||||
|
match dao.get_exif(&span_context, &normalized_path) {
|
||||||
|
Ok(Some(_)) => dao.update_exif(&span_context, insert_exif),
|
||||||
|
Ok(None) => dao.store_exif(&span_context, insert_exif),
|
||||||
|
Err(_) => dao.update_exif(&span_context, insert_exif),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
match updated {
|
||||||
|
Ok(row) => {
|
||||||
|
// Mirror the file metadata so the client gets the new size /
|
||||||
|
// mtime in the same response and can refresh its cached
|
||||||
|
// metadata block in one round-trip.
|
||||||
|
let fs_meta = std::fs::metadata(&full_path).ok();
|
||||||
|
let mut response: MetadataResponse = match fs_meta {
|
||||||
|
Some(m) => m.into(),
|
||||||
|
None => MetadataResponse {
|
||||||
|
created: None,
|
||||||
|
modified: None,
|
||||||
|
size: 0,
|
||||||
|
exif: None,
|
||||||
|
filename_date: None,
|
||||||
|
library_id: None,
|
||||||
|
library_name: None,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
response.exif = Some(row.into());
|
||||||
|
response.library_id = Some(resolved_library.id);
|
||||||
|
response.library_name = Some(resolved_library.name.clone());
|
||||||
|
response.filename_date =
|
||||||
|
memories::extract_date_from_filename(&body.path).map(|dt| dt.timestamp());
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
HttpResponse::Ok().json(response)
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
let msg = format!("EXIF DB update failed: {:?}", e);
|
||||||
|
error!("{}", msg);
|
||||||
|
span.set_status(Status::error(msg.clone()));
|
||||||
|
HttpResponse::InternalServerError().body(msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `GET /image/exif/full?path=&library=` — full per-file EXIF dump via
|
||||||
|
/// exiftool, for the DETAILS modal's "FULL EXIF" pane. Strictly richer
|
||||||
|
/// than `/image/metadata`'s curated subset (every group exiftool can
|
||||||
|
/// see: EXIF, File, MakerNotes, Composite, ICC_Profile, IPTC, …).
|
||||||
|
///
|
||||||
|
/// On-demand only — the watcher / indexer never calls this. Falls back
|
||||||
|
/// to 503 when exiftool isn't installed (deployer guidance is the same
|
||||||
|
/// as for the RAW preview pipeline: install exiftool for full coverage).
|
||||||
|
#[get("/image/exif/full")]
|
||||||
|
pub async fn get_full_exif(
|
||||||
|
_: Claims,
|
||||||
|
request: HttpRequest,
|
||||||
|
path: web::Query<ThumbnailRequest>,
|
||||||
|
app_state: Data<AppState>,
|
||||||
|
) -> impl Responder {
|
||||||
|
let tracer = global_tracer();
|
||||||
|
let context = extract_context_from_request(&request);
|
||||||
|
let mut span = tracer.start_with_context("get_full_exif", &context);
|
||||||
|
|
||||||
|
let library = libraries::resolve_library_param(&app_state, path.library.as_deref())
|
||||||
|
.ok()
|
||||||
|
.flatten()
|
||||||
|
.unwrap_or_else(|| app_state.primary_library());
|
||||||
|
|
||||||
|
// Same union-mode fallback as get_file_metadata — the file may live
|
||||||
|
// under a sibling library when the requested one's path resolves but
|
||||||
|
// doesn't actually contain the bytes.
|
||||||
|
let resolved = is_valid_full_path(&library.root_path, &path.path, false)
|
||||||
|
.filter(|p| p.exists())
|
||||||
|
.map(|p| (library, p))
|
||||||
|
.or_else(|| {
|
||||||
|
app_state.libraries.iter().find_map(|lib| {
|
||||||
|
if lib.id == library.id {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
is_valid_full_path(&lib.root_path, &path.path, false)
|
||||||
|
.filter(|p| p.exists())
|
||||||
|
.map(|p| (lib, p))
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
let (resolved_library, full_path) = match resolved {
|
||||||
|
Some(v) => v,
|
||||||
|
None => {
|
||||||
|
span.set_status(Status::error("file not found"));
|
||||||
|
return HttpResponse::NotFound().body("File not found");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// exiftool spawn is blocking — keep it off the actix worker by
|
||||||
|
// running on the blocking pool. ~50–200 ms typical for a JPEG;
|
||||||
|
// longer for RAW with rich MakerNotes.
|
||||||
|
let exif_result =
|
||||||
|
web::block(move || crate::exif::read_full_exif_via_exiftool(&full_path)).await;
|
||||||
|
|
||||||
|
match exif_result {
|
||||||
|
Ok(Ok(Some(tags))) => {
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
HttpResponse::Ok().json(serde_json::json!({
|
||||||
|
"library_id": resolved_library.id,
|
||||||
|
"library_name": resolved_library.name,
|
||||||
|
"tags": tags,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
Ok(Ok(None)) => {
|
||||||
|
// exiftool ran but produced no output for this file — treat as
|
||||||
|
// empty rather than an error so the modal renders "no tags"
|
||||||
|
// gracefully.
|
||||||
|
HttpResponse::Ok().json(serde_json::json!({
|
||||||
|
"library_id": resolved_library.id,
|
||||||
|
"library_name": resolved_library.name,
|
||||||
|
"tags": serde_json::Value::Object(Default::default()),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
Ok(Err(e)) => {
|
||||||
|
let msg = format!("exiftool failed: {}", e);
|
||||||
|
error!("{}", msg);
|
||||||
|
span.set_status(Status::error(msg.clone()));
|
||||||
|
// 503 — typically "exiftool isn't on PATH" or a transient spawn
|
||||||
|
// failure. Apollo surfaces a hint in the modal.
|
||||||
|
HttpResponse::ServiceUnavailable().body(msg)
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
let msg = format!("blocking-pool error: {}", e);
|
||||||
|
error!("{}", msg);
|
||||||
|
span.set_status(Status::error(msg.clone()));
|
||||||
|
HttpResponse::InternalServerError().body(msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Body for `POST /image/exif/date` — operator-driven date_taken override.
|
||||||
|
/// `date_taken` is unix seconds (matches `image_exif.date_taken`'s convention
|
||||||
|
/// — naive local reinterpreted as UTC, not real UTC; the Apollo client passes
|
||||||
|
/// through the same value the photo carousel rendered before edit).
|
||||||
|
#[derive(serde::Deserialize)]
|
||||||
|
struct SetDateRequest {
|
||||||
|
path: String,
|
||||||
|
library: Option<String>,
|
||||||
|
date_taken: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Body for `POST /image/exif/date/clear` — revert a manual override and
|
||||||
|
/// restore the resolver-derived `(date_taken, date_taken_source)` pair from
|
||||||
|
/// the snapshot.
|
||||||
|
#[derive(serde::Deserialize)]
|
||||||
|
struct ClearDateRequest {
|
||||||
|
path: String,
|
||||||
|
library: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build a `MetadataResponse` for the date endpoints. Mirrors
|
||||||
|
/// `get_file_metadata`'s shape so the client gets a single source of truth
|
||||||
|
/// after every mutation. Filesystem metadata is best-effort: if the file is
|
||||||
|
/// on a stale mount or moved, the DB-side override still succeeds and the
|
||||||
|
/// response carries `created=None, modified=None, size=0`. The DB row's
|
||||||
|
/// updated EXIF is what matters here.
|
||||||
|
fn build_metadata_response_for_date_mutation(
|
||||||
|
library: &libraries::Library,
|
||||||
|
rel_path: &str,
|
||||||
|
exif: ImageExif,
|
||||||
|
) -> MetadataResponse {
|
||||||
|
let full_path = is_valid_full_path(&library.root_path, &rel_path.to_string(), false);
|
||||||
|
let fs_meta = full_path
|
||||||
|
.as_ref()
|
||||||
|
.filter(|p| p.exists())
|
||||||
|
.and_then(|p| std::fs::metadata(p).ok());
|
||||||
|
let mut response: MetadataResponse = match fs_meta {
|
||||||
|
Some(m) => m.into(),
|
||||||
|
None => MetadataResponse {
|
||||||
|
created: None,
|
||||||
|
modified: None,
|
||||||
|
size: 0,
|
||||||
|
exif: None,
|
||||||
|
filename_date: None,
|
||||||
|
library_id: None,
|
||||||
|
library_name: None,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
response.exif = Some(exif.into());
|
||||||
|
response.library_id = Some(library.id);
|
||||||
|
response.library_name = Some(library.name.clone());
|
||||||
|
response.filename_date =
|
||||||
|
memories::extract_date_from_filename(rel_path).map(|dt| dt.timestamp());
|
||||||
|
response
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/image/exif/date")]
|
||||||
|
pub async fn set_image_date(
|
||||||
|
_: Claims,
|
||||||
|
request: HttpRequest,
|
||||||
|
body: web::Json<SetDateRequest>,
|
||||||
|
app_state: Data<AppState>,
|
||||||
|
exif_dao: Data<Mutex<Box<dyn ExifDao>>>,
|
||||||
|
) -> impl Responder {
|
||||||
|
let tracer = global_tracer();
|
||||||
|
let context = extract_context_from_request(&request);
|
||||||
|
let mut span = tracer.start_with_context("set_image_date", &context);
|
||||||
|
let span_context =
|
||||||
|
opentelemetry::Context::new().with_remote_span_context(span.span_context().clone());
|
||||||
|
|
||||||
|
let library = match libraries::resolve_library_param(&app_state, body.library.as_deref()) {
|
||||||
|
Ok(Some(lib)) => lib,
|
||||||
|
Ok(None) => app_state.primary_library(),
|
||||||
|
Err(msg) => {
|
||||||
|
span.set_status(Status::error(msg.clone()));
|
||||||
|
return HttpResponse::BadRequest().body(msg);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Path normalization matches set_image_gps so a Windows-import client
|
||||||
|
// doesn't end up with a backslash variant that misses the row.
|
||||||
|
let normalized_path = body.path.replace('\\', "/");
|
||||||
|
|
||||||
|
let updated = {
|
||||||
|
let mut dao = exif_dao.lock().expect("Unable to lock ExifDao");
|
||||||
|
dao.set_manual_date_taken(&span_context, library.id, &normalized_path, body.date_taken)
|
||||||
|
};
|
||||||
|
|
||||||
|
match updated {
|
||||||
|
Ok(row) => {
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
HttpResponse::Ok().json(build_metadata_response_for_date_mutation(
|
||||||
|
&library,
|
||||||
|
&normalized_path,
|
||||||
|
row,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
let msg = format!("set_manual_date_taken failed: {:?}", e);
|
||||||
|
error!("{}", msg);
|
||||||
|
span.set_status(Status::error(msg.clone()));
|
||||||
|
match e.kind {
|
||||||
|
DbErrorKind::NotFound => HttpResponse::NotFound().body(msg),
|
||||||
|
_ => HttpResponse::InternalServerError().body(msg),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/image/exif/date/clear")]
|
||||||
|
pub async fn clear_image_date(
|
||||||
|
_: Claims,
|
||||||
|
request: HttpRequest,
|
||||||
|
body: web::Json<ClearDateRequest>,
|
||||||
|
app_state: Data<AppState>,
|
||||||
|
exif_dao: Data<Mutex<Box<dyn ExifDao>>>,
|
||||||
|
) -> impl Responder {
|
||||||
|
let tracer = global_tracer();
|
||||||
|
let context = extract_context_from_request(&request);
|
||||||
|
let mut span = tracer.start_with_context("clear_image_date", &context);
|
||||||
|
let span_context =
|
||||||
|
opentelemetry::Context::new().with_remote_span_context(span.span_context().clone());
|
||||||
|
|
||||||
|
let library = match libraries::resolve_library_param(&app_state, body.library.as_deref()) {
|
||||||
|
Ok(Some(lib)) => lib,
|
||||||
|
Ok(None) => app_state.primary_library(),
|
||||||
|
Err(msg) => {
|
||||||
|
span.set_status(Status::error(msg.clone()));
|
||||||
|
return HttpResponse::BadRequest().body(msg);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let normalized_path = body.path.replace('\\', "/");
|
||||||
|
|
||||||
|
let updated = {
|
||||||
|
let mut dao = exif_dao.lock().expect("Unable to lock ExifDao");
|
||||||
|
dao.clear_manual_date_taken(&span_context, library.id, &normalized_path)
|
||||||
|
};
|
||||||
|
|
||||||
|
match updated {
|
||||||
|
Ok(row) => {
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
HttpResponse::Ok().json(build_metadata_response_for_date_mutation(
|
||||||
|
&library,
|
||||||
|
&normalized_path,
|
||||||
|
row,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
let msg = format!("clear_manual_date_taken failed: {:?}", e);
|
||||||
|
error!("{}", msg);
|
||||||
|
span.set_status(Status::error(msg.clone()));
|
||||||
|
match e.kind {
|
||||||
|
DbErrorKind::NotFound => HttpResponse::NotFound().body(msg),
|
||||||
|
_ => HttpResponse::InternalServerError().body(msg),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Deserialize)]
|
||||||
|
struct UploadQuery {
|
||||||
|
library: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/image")]
|
||||||
|
pub async fn upload_image(
|
||||||
|
_: Claims,
|
||||||
|
request: HttpRequest,
|
||||||
|
query: web::Query<UploadQuery>,
|
||||||
|
mut payload: mp::Multipart,
|
||||||
|
app_state: Data<AppState>,
|
||||||
|
exif_dao: Data<Mutex<Box<dyn ExifDao>>>,
|
||||||
|
) -> impl Responder {
|
||||||
|
let tracer = global_tracer();
|
||||||
|
let context = extract_context_from_request(&request);
|
||||||
|
let mut span = tracer.start_with_context("upload_image", &context);
|
||||||
|
let span_context =
|
||||||
|
opentelemetry::Context::new().with_remote_span_context(span.span_context().clone());
|
||||||
|
|
||||||
|
// Resolve the optional library selector. Absent → primary library
|
||||||
|
// (backwards-compatible with clients that don't yet send `library=`).
|
||||||
|
let target_library =
|
||||||
|
match libraries::resolve_library_param(&app_state, query.library.as_deref()) {
|
||||||
|
Ok(Some(lib)) => lib,
|
||||||
|
Ok(None) => app_state.primary_library(),
|
||||||
|
Err(msg) => {
|
||||||
|
span.set_status(Status::error(msg.clone()));
|
||||||
|
return HttpResponse::BadRequest().body(msg);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut file_content: BytesMut = BytesMut::new();
|
||||||
|
let mut file_name: Option<String> = None;
|
||||||
|
let mut file_path: Option<String> = None;
|
||||||
|
|
||||||
|
while let Some(Ok(mut part)) = payload.next().await {
|
||||||
|
if let Some(content_type) = part.content_disposition() {
|
||||||
|
debug!("{:?}", content_type);
|
||||||
|
if let Some(filename) = content_type.get_filename() {
|
||||||
|
debug!("Name (raw): {:?}", filename);
|
||||||
|
// Decode URL-encoded filename (e.g., "file%20name.jpg" -> "file name.jpg")
|
||||||
|
let decoded_filename = decode(filename)
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
.unwrap_or_else(|_| filename.to_string());
|
||||||
|
debug!("Name (decoded): {:?}", decoded_filename);
|
||||||
|
file_name = Some(decoded_filename);
|
||||||
|
|
||||||
|
while let Some(Ok(data)) = part.next().await {
|
||||||
|
file_content.put(data);
|
||||||
|
}
|
||||||
|
} else if content_type.get_name() == Some("path") {
|
||||||
|
while let Some(Ok(data)) = part.next().await {
|
||||||
|
if let Ok(path) = std::str::from_utf8(&data) {
|
||||||
|
file_path = Some(path.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let path = file_path.unwrap_or_else(|| target_library.root_path.clone());
|
||||||
|
if !file_content.is_empty() {
|
||||||
|
if file_name.is_none() {
|
||||||
|
span.set_status(Status::error("No filename provided"));
|
||||||
|
return HttpResponse::BadRequest().body("No filename provided");
|
||||||
|
}
|
||||||
|
let full_path = PathBuf::from(&path).join(file_name.unwrap());
|
||||||
|
if let Some(full_path) = is_valid_full_path(
|
||||||
|
&target_library.root_path,
|
||||||
|
&full_path.to_str().unwrap().to_string(),
|
||||||
|
true,
|
||||||
|
) {
|
||||||
|
// Pre-write content-hash check: if these exact bytes already
|
||||||
|
// exist anywhere in any library (and aren't themselves
|
||||||
|
// soft-marked as duplicates), don't write the file. Return
|
||||||
|
// 409 with the canonical sibling so the mobile app can show
|
||||||
|
// a friendly "already in your library" toast.
|
||||||
|
let upload_hash = blake3::Hasher::new()
|
||||||
|
.update(&file_content)
|
||||||
|
.finalize()
|
||||||
|
.to_hex()
|
||||||
|
.to_string();
|
||||||
|
{
|
||||||
|
let mut dao = exif_dao.lock().expect("Unable to lock ExifDao");
|
||||||
|
if let Ok(Some(existing)) = dao.find_by_content_hash(&span_context, &upload_hash)
|
||||||
|
&& existing.duplicate_of_hash.is_none()
|
||||||
|
{
|
||||||
|
let library_name = libraries::load_all(&mut crate::database::connect())
|
||||||
|
.into_iter()
|
||||||
|
.find(|l| l.id == existing.library_id)
|
||||||
|
.map(|l| l.name);
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
return HttpResponse::Conflict().json(serde_json::json!({
|
||||||
|
"duplicate_of": {
|
||||||
|
"library_id": existing.library_id,
|
||||||
|
"rel_path": existing.file_path,
|
||||||
|
},
|
||||||
|
"content_hash": upload_hash,
|
||||||
|
"library_name": library_name,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let context =
|
||||||
|
opentelemetry::Context::new().with_remote_span_context(span.span_context().clone());
|
||||||
|
tracer
|
||||||
|
.span_builder("file write")
|
||||||
|
.start_with_context(&tracer, &context);
|
||||||
|
|
||||||
|
let uploaded_path = if !full_path.is_file() && is_image_or_video(&full_path) {
|
||||||
|
let mut file = File::create(&full_path).unwrap();
|
||||||
|
file.write_all(&file_content).unwrap();
|
||||||
|
|
||||||
|
info!("Uploaded: {:?}", full_path);
|
||||||
|
full_path
|
||||||
|
} else {
|
||||||
|
warn!("File already exists: {:?}", full_path);
|
||||||
|
|
||||||
|
let new_path = format!(
|
||||||
|
"{}/{}_{}.{}",
|
||||||
|
full_path.parent().unwrap().to_str().unwrap(),
|
||||||
|
full_path.file_stem().unwrap().to_str().unwrap(),
|
||||||
|
Utc::now().timestamp(),
|
||||||
|
full_path
|
||||||
|
.extension()
|
||||||
|
.expect("Uploaded file should have an extension")
|
||||||
|
.to_str()
|
||||||
|
.unwrap()
|
||||||
|
);
|
||||||
|
info!("Uploaded: {}", new_path);
|
||||||
|
|
||||||
|
let new_path_buf = PathBuf::from(&new_path);
|
||||||
|
let mut file = File::create(&new_path_buf).unwrap();
|
||||||
|
file.write_all(&file_content).unwrap();
|
||||||
|
new_path_buf
|
||||||
|
};
|
||||||
|
|
||||||
|
// Extract and store EXIF data if file supports it
|
||||||
|
if exif::supports_exif(&uploaded_path) {
|
||||||
|
let relative_path = uploaded_path
|
||||||
|
.strip_prefix(&target_library.root_path)
|
||||||
|
.expect("Error stripping library root prefix")
|
||||||
|
.to_str()
|
||||||
|
.unwrap()
|
||||||
|
.replace('\\', "/");
|
||||||
|
|
||||||
|
match exif::extract_exif_from_path(&uploaded_path) {
|
||||||
|
Ok(exif_data) => {
|
||||||
|
let timestamp = Utc::now().timestamp();
|
||||||
|
let (content_hash, size_bytes) = match content_hash::compute(&uploaded_path)
|
||||||
|
{
|
||||||
|
Ok(id) => (Some(id.content_hash), Some(id.size_bytes)),
|
||||||
|
Err(e) => {
|
||||||
|
warn!(
|
||||||
|
"Failed to hash uploaded {}: {:?}",
|
||||||
|
uploaded_path.display(),
|
||||||
|
e
|
||||||
|
);
|
||||||
|
(None, None)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let perceptual = perceptual_hash::compute(&uploaded_path);
|
||||||
|
let resolved_date =
|
||||||
|
date_resolver::resolve_date_taken(&uploaded_path, exif_data.date_taken);
|
||||||
|
let insert_exif = InsertImageExif {
|
||||||
|
library_id: target_library.id,
|
||||||
|
file_path: relative_path.clone(),
|
||||||
|
camera_make: exif_data.camera_make,
|
||||||
|
camera_model: exif_data.camera_model,
|
||||||
|
lens_model: exif_data.lens_model,
|
||||||
|
width: exif_data.width,
|
||||||
|
height: exif_data.height,
|
||||||
|
orientation: exif_data.orientation,
|
||||||
|
gps_latitude: exif_data.gps_latitude.map(|v| v as f32),
|
||||||
|
gps_longitude: exif_data.gps_longitude.map(|v| v as f32),
|
||||||
|
gps_altitude: exif_data.gps_altitude.map(|v| v as f32),
|
||||||
|
focal_length: exif_data.focal_length.map(|v| v as f32),
|
||||||
|
aperture: exif_data.aperture.map(|v| v as f32),
|
||||||
|
shutter_speed: exif_data.shutter_speed,
|
||||||
|
iso: exif_data.iso,
|
||||||
|
date_taken: resolved_date.map(|r| r.timestamp),
|
||||||
|
created_time: timestamp,
|
||||||
|
last_modified: timestamp,
|
||||||
|
content_hash,
|
||||||
|
size_bytes,
|
||||||
|
phash_64: perceptual.map(|h| h.phash_64),
|
||||||
|
dhash_64: perceptual.map(|h| h.dhash_64),
|
||||||
|
date_taken_source: resolved_date.map(|r| r.source.as_str().to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Ok(mut dao) = exif_dao.lock() {
|
||||||
|
if let Err(e) = dao.store_exif(&span_context, insert_exif) {
|
||||||
|
error!("Failed to store EXIF data for {}: {:?}", relative_path, e);
|
||||||
|
} else {
|
||||||
|
debug!("EXIF data stored for {}", relative_path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
debug!(
|
||||||
|
"No EXIF data or error extracting from {}: {:?}",
|
||||||
|
uploaded_path.display(),
|
||||||
|
e
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
error!("Invalid path for upload: {:?}", full_path);
|
||||||
|
span.set_status(Status::error("Invalid path for upload"));
|
||||||
|
return HttpResponse::BadRequest().body("Path was not valid");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
span.set_status(Status::error("No file body read"));
|
||||||
|
return HttpResponse::BadRequest().body("No file body read");
|
||||||
|
}
|
||||||
|
|
||||||
|
app_state.stream_manager.do_send(RefreshThumbnailsMessage);
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
|
||||||
|
HttpResponse::Ok().finish()
|
||||||
|
}
|
||||||
9
src/handlers/mod.rs
Normal file
9
src/handlers/mod.rs
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
//! HTTP route handlers, grouped by domain.
|
||||||
|
//!
|
||||||
|
//! These were previously inlined in `main.rs`; moving them out keeps
|
||||||
|
//! `main()` focused on startup wiring and makes each domain
|
||||||
|
//! independently testable with `actix_web::test::init_service`.
|
||||||
|
|
||||||
|
pub mod favorites;
|
||||||
|
pub mod image;
|
||||||
|
pub mod video;
|
||||||
665
src/handlers/video.rs
Normal file
665
src/handlers/video.rs
Normal file
@@ -0,0 +1,665 @@
|
|||||||
|
//! Video-related endpoints: HLS playlist generation, segment streaming,
|
||||||
|
//! and the short-clip preview pipeline.
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::sync::Mutex;
|
||||||
|
|
||||||
|
use actix_files::NamedFile;
|
||||||
|
use actix_web::{
|
||||||
|
HttpRequest, HttpResponse, Responder, get, post,
|
||||||
|
web::{self, Data},
|
||||||
|
};
|
||||||
|
use log::{debug, error, info, warn};
|
||||||
|
use opentelemetry::trace::{Span, Status, Tracer};
|
||||||
|
use opentelemetry::{KeyValue, global};
|
||||||
|
|
||||||
|
use crate::data::{
|
||||||
|
Claims, PreviewClipRequest, PreviewStatusItem, PreviewStatusRequest, PreviewStatusResponse,
|
||||||
|
ThumbnailRequest,
|
||||||
|
};
|
||||||
|
use crate::database::PreviewDao;
|
||||||
|
use crate::files::is_valid_full_path;
|
||||||
|
use crate::libraries;
|
||||||
|
use crate::otel::{extract_context_from_request, global_tracer};
|
||||||
|
use crate::state::AppState;
|
||||||
|
use crate::video::actors::{GeneratePreviewClipMessage, ProcessMessage, create_playlist};
|
||||||
|
|
||||||
|
#[post("/video/generate")]
|
||||||
|
pub async fn generate_video(
|
||||||
|
_claims: Claims,
|
||||||
|
request: HttpRequest,
|
||||||
|
app_state: Data<AppState>,
|
||||||
|
body: web::Json<ThumbnailRequest>,
|
||||||
|
) -> impl Responder {
|
||||||
|
let tracer = global_tracer();
|
||||||
|
|
||||||
|
let context = extract_context_from_request(&request);
|
||||||
|
let mut span = tracer.start_with_context("generate_video", &context);
|
||||||
|
|
||||||
|
let filename = PathBuf::from(&body.path);
|
||||||
|
|
||||||
|
if let Some(name) = filename.file_name() {
|
||||||
|
let filename = name.to_str().expect("Filename should convert to string");
|
||||||
|
// KNOWN ISSUE (multi-library): playlist filename is the basename
|
||||||
|
// alone, so two source files with the same basename — whether in
|
||||||
|
// different libraries or different subdirs of one library —
|
||||||
|
// overwrite each other's playlists while ffmpeg runs. The
|
||||||
|
// hash-keyed `content_hash::hls_dir` is the long-term answer
|
||||||
|
// (see CLAUDE.md "Multi-library data model"); rewiring the
|
||||||
|
// actor pipeline to use it is out of scope for this branch.
|
||||||
|
// The orphan-cleanup job above already walks every library so
|
||||||
|
// it doesn't false-delete archive playlists.
|
||||||
|
let playlist = format!("{}/{}.m3u8", app_state.video_path, filename);
|
||||||
|
|
||||||
|
let library = libraries::resolve_library_param(&app_state, body.library.as_deref())
|
||||||
|
.ok()
|
||||||
|
.flatten()
|
||||||
|
.unwrap_or_else(|| app_state.primary_library());
|
||||||
|
|
||||||
|
// Try the resolved library first, then fall back to any other library
|
||||||
|
// that actually contains the file — handles union-mode requests where
|
||||||
|
// the mobile client passes no library but the file lives in a
|
||||||
|
// non-primary library.
|
||||||
|
let resolved = is_valid_full_path(&library.root_path, &body.path, false)
|
||||||
|
.filter(|p| p.exists())
|
||||||
|
.or_else(|| {
|
||||||
|
app_state.libraries.iter().find_map(|lib| {
|
||||||
|
if lib.id == library.id {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
is_valid_full_path(&lib.root_path, &body.path, false).filter(|p| p.exists())
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Some(path) = resolved {
|
||||||
|
if let Ok(child) = create_playlist(path.to_str().unwrap(), &playlist).await {
|
||||||
|
span.add_event(
|
||||||
|
"playlist_created".to_string(),
|
||||||
|
vec![KeyValue::new("playlist-name", filename.to_string())],
|
||||||
|
);
|
||||||
|
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
app_state.stream_manager.do_send(ProcessMessage(
|
||||||
|
playlist.clone(),
|
||||||
|
child,
|
||||||
|
// opentelemetry::Context::new().with_span(span),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
span.set_status(Status::error(format!("invalid path {:?}", &body.path)));
|
||||||
|
return HttpResponse::BadRequest().finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
HttpResponse::Ok().json(playlist)
|
||||||
|
} else {
|
||||||
|
let message = format!("Unable to get file name: {:?}", filename);
|
||||||
|
error!("{}", message);
|
||||||
|
span.set_status(Status::error(message));
|
||||||
|
|
||||||
|
HttpResponse::BadRequest().finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/video/stream")]
|
||||||
|
pub async fn stream_video(
|
||||||
|
request: HttpRequest,
|
||||||
|
_: Claims,
|
||||||
|
path: web::Query<ThumbnailRequest>,
|
||||||
|
app_state: Data<AppState>,
|
||||||
|
) -> impl Responder {
|
||||||
|
let tracer = global::tracer("image-server");
|
||||||
|
let context = extract_context_from_request(&request);
|
||||||
|
let mut span = tracer.start_with_context("stream_video", &context);
|
||||||
|
|
||||||
|
let playlist = &path.path;
|
||||||
|
debug!("Playlist: {}", playlist);
|
||||||
|
|
||||||
|
// Only serve files under video_path (HLS playlists) or base_path (source videos)
|
||||||
|
if playlist.starts_with(&app_state.video_path)
|
||||||
|
|| is_valid_full_path(&app_state.base_path, playlist, false).is_some()
|
||||||
|
{
|
||||||
|
match NamedFile::open(playlist) {
|
||||||
|
Ok(file) => {
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
file.into_response(&request)
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
span.set_status(Status::error(format!("playlist not found {}", playlist)));
|
||||||
|
HttpResponse::NotFound().finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
span.set_status(Status::error(format!("playlist not valid {}", playlist)));
|
||||||
|
HttpResponse::BadRequest().finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/video/{path}")]
|
||||||
|
pub async fn get_video_part(
|
||||||
|
request: HttpRequest,
|
||||||
|
_: Claims,
|
||||||
|
path: web::Path<ThumbnailRequest>,
|
||||||
|
app_state: Data<AppState>,
|
||||||
|
) -> impl Responder {
|
||||||
|
let tracer = global_tracer();
|
||||||
|
let context = extract_context_from_request(&request);
|
||||||
|
let mut span = tracer.start_with_context("get_video_part", &context);
|
||||||
|
|
||||||
|
let part = &path.path;
|
||||||
|
debug!("Video part: {}", part);
|
||||||
|
|
||||||
|
let mut file_part = PathBuf::new();
|
||||||
|
file_part.push(app_state.video_path.clone());
|
||||||
|
file_part.push(part);
|
||||||
|
|
||||||
|
// Guard against directory traversal attacks
|
||||||
|
let canonical_base = match std::fs::canonicalize(&app_state.video_path) {
|
||||||
|
Ok(path) => path,
|
||||||
|
Err(e) => {
|
||||||
|
error!("Failed to canonicalize video path: {:?}", e);
|
||||||
|
span.set_status(Status::error("Invalid video path configuration"));
|
||||||
|
return HttpResponse::InternalServerError().finish();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let canonical_file = match std::fs::canonicalize(&file_part) {
|
||||||
|
Ok(path) => path,
|
||||||
|
Err(_) => {
|
||||||
|
warn!("Video part not found or invalid: {:?}", file_part);
|
||||||
|
span.set_status(Status::error(format!("Video part not found '{}'", part)));
|
||||||
|
return HttpResponse::NotFound().finish();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Ensure the resolved path is still within the video directory
|
||||||
|
if !canonical_file.starts_with(&canonical_base) {
|
||||||
|
warn!("Directory traversal attempt detected: {:?}", part);
|
||||||
|
span.set_status(Status::error("Invalid video path"));
|
||||||
|
return HttpResponse::Forbidden().finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
match NamedFile::open(&canonical_file) {
|
||||||
|
Ok(file) => {
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
file.into_response(&request)
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
error!("Video part not found: {:?}", file_part);
|
||||||
|
span.set_status(Status::error(format!(
|
||||||
|
"Video part not found '{}'",
|
||||||
|
file_part.to_str().unwrap()
|
||||||
|
)));
|
||||||
|
HttpResponse::NotFound().finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/video/preview")]
|
||||||
|
pub async fn get_video_preview(
|
||||||
|
_claims: Claims,
|
||||||
|
request: HttpRequest,
|
||||||
|
req: web::Query<PreviewClipRequest>,
|
||||||
|
app_state: Data<AppState>,
|
||||||
|
preview_dao: Data<Mutex<Box<dyn PreviewDao>>>,
|
||||||
|
) -> impl Responder {
|
||||||
|
let tracer = global_tracer();
|
||||||
|
let context = extract_context_from_request(&request);
|
||||||
|
let mut span = tracer.start_with_context("get_video_preview", &context);
|
||||||
|
|
||||||
|
// Validate path
|
||||||
|
let full_path = match is_valid_full_path(&app_state.base_path, &req.path, true) {
|
||||||
|
Some(path) => path,
|
||||||
|
None => {
|
||||||
|
span.set_status(Status::error("Invalid path"));
|
||||||
|
return HttpResponse::BadRequest().json(serde_json::json!({"error": "Invalid path"}));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let full_path_str = full_path.to_string_lossy().to_string();
|
||||||
|
|
||||||
|
// Use relative path (from BASE_PATH) for DB storage, consistent with EXIF convention
|
||||||
|
let relative_path = full_path_str
|
||||||
|
.strip_prefix(&app_state.base_path)
|
||||||
|
.unwrap_or(&full_path_str)
|
||||||
|
.trim_start_matches(['/', '\\'])
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
// Check preview status in DB
|
||||||
|
let preview = {
|
||||||
|
let mut dao = preview_dao.lock().expect("Unable to lock PreviewDao");
|
||||||
|
dao.get_preview(&context, &relative_path)
|
||||||
|
};
|
||||||
|
|
||||||
|
match preview {
|
||||||
|
Ok(Some(clip)) => match clip.status.as_str() {
|
||||||
|
"complete" => {
|
||||||
|
let preview_path = PathBuf::from(&app_state.preview_clips_path)
|
||||||
|
.join(&relative_path)
|
||||||
|
.with_extension("mp4");
|
||||||
|
|
||||||
|
match NamedFile::open(&preview_path) {
|
||||||
|
Ok(file) => {
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
file.into_response(&request)
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
// File missing on disk but DB says complete - reset and regenerate
|
||||||
|
let mut dao = preview_dao.lock().expect("Unable to lock PreviewDao");
|
||||||
|
let _ = dao.update_status(
|
||||||
|
&context,
|
||||||
|
&relative_path,
|
||||||
|
"pending",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
app_state
|
||||||
|
.preview_clip_generator
|
||||||
|
.do_send(GeneratePreviewClipMessage {
|
||||||
|
video_path: full_path_str,
|
||||||
|
});
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
HttpResponse::Accepted().json(serde_json::json!({
|
||||||
|
"status": "processing",
|
||||||
|
"path": req.path
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"processing" => {
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
HttpResponse::Accepted().json(serde_json::json!({
|
||||||
|
"status": "processing",
|
||||||
|
"path": req.path
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
"failed" => {
|
||||||
|
let error_msg = clip
|
||||||
|
.error_message
|
||||||
|
.unwrap_or_else(|| "Unknown error".to_string());
|
||||||
|
span.set_status(Status::error(format!("Generation failed: {}", error_msg)));
|
||||||
|
HttpResponse::InternalServerError().json(serde_json::json!({
|
||||||
|
"error": format!("Generation failed: {}", error_msg)
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
// pending or unknown status - trigger generation
|
||||||
|
app_state
|
||||||
|
.preview_clip_generator
|
||||||
|
.do_send(GeneratePreviewClipMessage {
|
||||||
|
video_path: full_path_str,
|
||||||
|
});
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
HttpResponse::Accepted().json(serde_json::json!({
|
||||||
|
"status": "processing",
|
||||||
|
"path": req.path
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Ok(None) => {
|
||||||
|
// No record exists - insert as pending and trigger generation
|
||||||
|
{
|
||||||
|
let mut dao = preview_dao.lock().expect("Unable to lock PreviewDao");
|
||||||
|
let _ = dao.insert_preview(&context, &relative_path, "pending");
|
||||||
|
}
|
||||||
|
app_state
|
||||||
|
.preview_clip_generator
|
||||||
|
.do_send(GeneratePreviewClipMessage {
|
||||||
|
video_path: full_path_str,
|
||||||
|
});
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
HttpResponse::Accepted().json(serde_json::json!({
|
||||||
|
"status": "processing",
|
||||||
|
"path": req.path
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
span.set_status(Status::error("Database error"));
|
||||||
|
HttpResponse::InternalServerError().json(serde_json::json!({"error": "Database error"}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/video/preview/status")]
|
||||||
|
pub async fn get_preview_status(
|
||||||
|
_claims: Claims,
|
||||||
|
request: HttpRequest,
|
||||||
|
body: web::Json<PreviewStatusRequest>,
|
||||||
|
app_state: Data<AppState>,
|
||||||
|
preview_dao: Data<Mutex<Box<dyn PreviewDao>>>,
|
||||||
|
) -> impl Responder {
|
||||||
|
let tracer = global_tracer();
|
||||||
|
let context = extract_context_from_request(&request);
|
||||||
|
let mut span = tracer.start_with_context("get_preview_status", &context);
|
||||||
|
|
||||||
|
// Limit to 200 paths per request
|
||||||
|
if body.paths.len() > 200 {
|
||||||
|
span.set_status(Status::error("Too many paths"));
|
||||||
|
return HttpResponse::BadRequest()
|
||||||
|
.json(serde_json::json!({"error": "Maximum 200 paths per request"}));
|
||||||
|
}
|
||||||
|
|
||||||
|
let previews = {
|
||||||
|
let mut dao = preview_dao.lock().expect("Unable to lock PreviewDao");
|
||||||
|
dao.get_previews_batch(&context, &body.paths)
|
||||||
|
};
|
||||||
|
|
||||||
|
match previews {
|
||||||
|
Ok(clips) => {
|
||||||
|
// Build a map of file_path -> VideoPreviewClip for quick lookup
|
||||||
|
let clip_map: HashMap<String, _> = clips
|
||||||
|
.into_iter()
|
||||||
|
.map(|clip| (clip.file_path.clone(), clip))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let mut items: Vec<PreviewStatusItem> = Vec::with_capacity(body.paths.len());
|
||||||
|
|
||||||
|
for path in &body.paths {
|
||||||
|
if let Some(clip) = clip_map.get(path) {
|
||||||
|
// Re-queue generation for stale pending/failed records
|
||||||
|
if clip.status == "pending" || clip.status == "failed" {
|
||||||
|
let full_path = format!(
|
||||||
|
"{}/{}",
|
||||||
|
app_state.base_path.trim_end_matches(['/', '\\']),
|
||||||
|
path.trim_start_matches(['/', '\\'])
|
||||||
|
);
|
||||||
|
app_state
|
||||||
|
.preview_clip_generator
|
||||||
|
.do_send(GeneratePreviewClipMessage {
|
||||||
|
video_path: full_path,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
items.push(PreviewStatusItem {
|
||||||
|
path: path.clone(),
|
||||||
|
status: clip.status.clone(),
|
||||||
|
preview_url: if clip.status == "complete" {
|
||||||
|
Some(format!("/video/preview?path={}", urlencoding::encode(path)))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// No record exists — insert as pending and trigger generation
|
||||||
|
{
|
||||||
|
let mut dao = preview_dao.lock().expect("Unable to lock PreviewDao");
|
||||||
|
let _ = dao.insert_preview(&context, path, "pending");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build full path for ffmpeg (actor needs the absolute path for input)
|
||||||
|
let full_path = format!(
|
||||||
|
"{}/{}",
|
||||||
|
app_state.base_path.trim_end_matches(['/', '\\']),
|
||||||
|
path.trim_start_matches(['/', '\\'])
|
||||||
|
);
|
||||||
|
|
||||||
|
info!("Triggering preview generation for '{}'", path);
|
||||||
|
app_state
|
||||||
|
.preview_clip_generator
|
||||||
|
.do_send(GeneratePreviewClipMessage {
|
||||||
|
video_path: full_path,
|
||||||
|
});
|
||||||
|
|
||||||
|
items.push(PreviewStatusItem {
|
||||||
|
path: path.clone(),
|
||||||
|
status: "pending".to_string(),
|
||||||
|
preview_url: None,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
span.set_status(Status::Ok);
|
||||||
|
HttpResponse::Ok().json(PreviewStatusResponse { previews: items })
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
span.set_status(Status::error("Database error"));
|
||||||
|
HttpResponse::InternalServerError().json(serde_json::json!({"error": "Database error"}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::data::Claims;
|
||||||
|
use crate::database::PreviewDao;
|
||||||
|
use crate::testhelpers::TestPreviewDao;
|
||||||
|
use actix_web::App;
|
||||||
|
|
||||||
|
fn make_token() -> String {
|
||||||
|
let claims = Claims::valid_user("1".to_string());
|
||||||
|
jsonwebtoken::encode(
|
||||||
|
&jsonwebtoken::Header::default(),
|
||||||
|
&claims,
|
||||||
|
&jsonwebtoken::EncodingKey::from_secret(b"test_key"),
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn make_preview_dao(dao: TestPreviewDao) -> Data<Mutex<Box<dyn PreviewDao>>> {
|
||||||
|
Data::new(Mutex::new(Box::new(dao) as Box<dyn PreviewDao>))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_get_preview_status_returns_pending_for_unknown() {
|
||||||
|
let dao = TestPreviewDao::new();
|
||||||
|
let preview_dao = make_preview_dao(dao);
|
||||||
|
let app_state = Data::new(AppState::test_state());
|
||||||
|
let token = make_token();
|
||||||
|
|
||||||
|
let app = actix_web::test::init_service(
|
||||||
|
App::new()
|
||||||
|
.service(get_preview_status)
|
||||||
|
.app_data(app_state)
|
||||||
|
.app_data(preview_dao.clone()),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let req = actix_web::test::TestRequest::post()
|
||||||
|
.uri("/video/preview/status")
|
||||||
|
.insert_header(("Authorization", format!("Bearer {}", token)))
|
||||||
|
.set_json(serde_json::json!({"paths": ["photos/new_video.mp4"]}))
|
||||||
|
.to_request();
|
||||||
|
|
||||||
|
let resp = actix_web::test::call_service(&app, req).await;
|
||||||
|
assert_eq!(resp.status(), 200);
|
||||||
|
|
||||||
|
let body: serde_json::Value = actix_web::test::read_body_json(resp).await;
|
||||||
|
let previews = body["previews"].as_array().unwrap();
|
||||||
|
assert_eq!(previews.len(), 1);
|
||||||
|
assert_eq!(previews[0]["status"], "pending");
|
||||||
|
|
||||||
|
// Verify the DAO now has a pending record
|
||||||
|
let mut dao_lock = preview_dao.lock().unwrap();
|
||||||
|
let ctx = opentelemetry::Context::new();
|
||||||
|
let clip = dao_lock.get_preview(&ctx, "photos/new_video.mp4").unwrap();
|
||||||
|
assert!(clip.is_some());
|
||||||
|
assert_eq!(clip.unwrap().status, "pending");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_get_preview_status_returns_complete_with_url() {
|
||||||
|
let mut dao = TestPreviewDao::new();
|
||||||
|
let ctx = opentelemetry::Context::new();
|
||||||
|
dao.insert_preview(&ctx, "photos/done.mp4", "pending")
|
||||||
|
.unwrap();
|
||||||
|
dao.update_status(
|
||||||
|
&ctx,
|
||||||
|
"photos/done.mp4",
|
||||||
|
"complete",
|
||||||
|
Some(9.5),
|
||||||
|
Some(500000),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let preview_dao = make_preview_dao(dao);
|
||||||
|
let app_state = Data::new(AppState::test_state());
|
||||||
|
let token = make_token();
|
||||||
|
|
||||||
|
let app = actix_web::test::init_service(
|
||||||
|
App::new()
|
||||||
|
.service(get_preview_status)
|
||||||
|
.app_data(app_state)
|
||||||
|
.app_data(preview_dao),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let req = actix_web::test::TestRequest::post()
|
||||||
|
.uri("/video/preview/status")
|
||||||
|
.insert_header(("Authorization", format!("Bearer {}", token)))
|
||||||
|
.set_json(serde_json::json!({"paths": ["photos/done.mp4"]}))
|
||||||
|
.to_request();
|
||||||
|
|
||||||
|
let resp = actix_web::test::call_service(&app, req).await;
|
||||||
|
assert_eq!(resp.status(), 200);
|
||||||
|
|
||||||
|
let body: serde_json::Value = actix_web::test::read_body_json(resp).await;
|
||||||
|
let previews = body["previews"].as_array().unwrap();
|
||||||
|
assert_eq!(previews.len(), 1);
|
||||||
|
assert_eq!(previews[0]["status"], "complete");
|
||||||
|
assert!(
|
||||||
|
previews[0]["preview_url"]
|
||||||
|
.as_str()
|
||||||
|
.unwrap()
|
||||||
|
.contains("photos%2Fdone.mp4")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_get_preview_status_rejects_over_200_paths() {
|
||||||
|
let dao = TestPreviewDao::new();
|
||||||
|
let preview_dao = make_preview_dao(dao);
|
||||||
|
let app_state = Data::new(AppState::test_state());
|
||||||
|
let token = make_token();
|
||||||
|
|
||||||
|
let app = actix_web::test::init_service(
|
||||||
|
App::new()
|
||||||
|
.service(get_preview_status)
|
||||||
|
.app_data(app_state)
|
||||||
|
.app_data(preview_dao),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let paths: Vec<String> = (0..201).map(|i| format!("video_{}.mp4", i)).collect();
|
||||||
|
let req = actix_web::test::TestRequest::post()
|
||||||
|
.uri("/video/preview/status")
|
||||||
|
.insert_header(("Authorization", format!("Bearer {}", token)))
|
||||||
|
.set_json(serde_json::json!({"paths": paths}))
|
||||||
|
.to_request();
|
||||||
|
|
||||||
|
let resp = actix_web::test::call_service(&app, req).await;
|
||||||
|
assert_eq!(resp.status(), 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_get_preview_status_mixed_statuses() {
|
||||||
|
let mut dao = TestPreviewDao::new();
|
||||||
|
let ctx = opentelemetry::Context::new();
|
||||||
|
dao.insert_preview(&ctx, "a.mp4", "pending").unwrap();
|
||||||
|
dao.insert_preview(&ctx, "b.mp4", "pending").unwrap();
|
||||||
|
dao.update_status(&ctx, "b.mp4", "complete", Some(10.0), Some(100000), None)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let preview_dao = make_preview_dao(dao);
|
||||||
|
let app_state = Data::new(AppState::test_state());
|
||||||
|
let token = make_token();
|
||||||
|
|
||||||
|
let app = actix_web::test::init_service(
|
||||||
|
App::new()
|
||||||
|
.service(get_preview_status)
|
||||||
|
.app_data(app_state)
|
||||||
|
.app_data(preview_dao),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let req = actix_web::test::TestRequest::post()
|
||||||
|
.uri("/video/preview/status")
|
||||||
|
.insert_header(("Authorization", format!("Bearer {}", token)))
|
||||||
|
.set_json(serde_json::json!({"paths": ["a.mp4", "b.mp4", "c.mp4"]}))
|
||||||
|
.to_request();
|
||||||
|
|
||||||
|
let resp = actix_web::test::call_service(&app, req).await;
|
||||||
|
assert_eq!(resp.status(), 200);
|
||||||
|
|
||||||
|
let body: serde_json::Value = actix_web::test::read_body_json(resp).await;
|
||||||
|
let previews = body["previews"].as_array().unwrap();
|
||||||
|
assert_eq!(previews.len(), 3);
|
||||||
|
|
||||||
|
// a.mp4 is pending
|
||||||
|
assert_eq!(previews[0]["path"], "a.mp4");
|
||||||
|
assert_eq!(previews[0]["status"], "pending");
|
||||||
|
|
||||||
|
// b.mp4 is complete with URL
|
||||||
|
assert_eq!(previews[1]["path"], "b.mp4");
|
||||||
|
assert_eq!(previews[1]["status"], "complete");
|
||||||
|
assert!(previews[1]["preview_url"].is_string());
|
||||||
|
|
||||||
|
// c.mp4 was not found — handler inserts pending
|
||||||
|
assert_eq!(previews[2]["path"], "c.mp4");
|
||||||
|
assert_eq!(previews[2]["status"], "pending");
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Verifies that the status endpoint re-queues generation for stale
|
||||||
|
/// "pending" and "failed" records (e.g., after a server restart or
|
||||||
|
/// when clip files were deleted). The do_send to the actor exercises
|
||||||
|
/// the re-queue code path; the actor runs against temp dirs so it
|
||||||
|
/// won't panic.
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_get_preview_status_requeues_pending_and_failed() {
|
||||||
|
let mut dao = TestPreviewDao::new();
|
||||||
|
let ctx = opentelemetry::Context::new();
|
||||||
|
|
||||||
|
// Simulate stale records left from a previous server run
|
||||||
|
dao.insert_preview(&ctx, "stale/pending.mp4", "pending")
|
||||||
|
.unwrap();
|
||||||
|
dao.insert_preview(&ctx, "stale/failed.mp4", "pending")
|
||||||
|
.unwrap();
|
||||||
|
dao.update_status(
|
||||||
|
&ctx,
|
||||||
|
"stale/failed.mp4",
|
||||||
|
"failed",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
Some("ffmpeg error"),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let preview_dao = make_preview_dao(dao);
|
||||||
|
let app_state = Data::new(AppState::test_state());
|
||||||
|
let token = make_token();
|
||||||
|
|
||||||
|
let app = actix_web::test::init_service(
|
||||||
|
App::new()
|
||||||
|
.service(get_preview_status)
|
||||||
|
.app_data(app_state)
|
||||||
|
.app_data(preview_dao),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let req = actix_web::test::TestRequest::post()
|
||||||
|
.uri("/video/preview/status")
|
||||||
|
.insert_header(("Authorization", format!("Bearer {}", token)))
|
||||||
|
.set_json(serde_json::json!({
|
||||||
|
"paths": ["stale/pending.mp4", "stale/failed.mp4"]
|
||||||
|
}))
|
||||||
|
.to_request();
|
||||||
|
|
||||||
|
let resp = actix_web::test::call_service(&app, req).await;
|
||||||
|
assert_eq!(resp.status(), 200);
|
||||||
|
|
||||||
|
let body: serde_json::Value = actix_web::test::read_body_json(resp).await;
|
||||||
|
let previews = body["previews"].as_array().unwrap();
|
||||||
|
assert_eq!(previews.len(), 2);
|
||||||
|
|
||||||
|
// Both records are returned with their current status
|
||||||
|
assert_eq!(previews[0]["path"], "stale/pending.mp4");
|
||||||
|
assert_eq!(previews[0]["status"], "pending");
|
||||||
|
assert!(previews[0].get("preview_url").is_none());
|
||||||
|
|
||||||
|
assert_eq!(previews[1]["path"], "stale/failed.mp4");
|
||||||
|
assert_eq!(previews[1]["status"], "failed");
|
||||||
|
assert!(previews[1].get("preview_url").is_none());
|
||||||
|
}
|
||||||
|
}
|
||||||
1794
src/main.rs
1794
src/main.rs
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user