feature/exif-endpoint #44

Merged
cameron merged 29 commits from feature/exif-endpoint into master 2025-12-27 03:25:19 +00:00
4 changed files with 109 additions and 85 deletions
Showing only changes of commit e4d988a9fd - Show all commits

View File

@@ -2,11 +2,11 @@ use std::path::PathBuf;
use std::sync::{Arc, Mutex};
use chrono::Utc;
use walkdir::WalkDir;
use rayon::prelude::*;
use walkdir::WalkDir;
use image_api::database::{ExifDao, SqliteExifDao};
use image_api::database::models::InsertImageExif;
use image_api::database::{ExifDao, SqliteExifDao};
use image_api::exif;
fn main() -> anyhow::Result<()> {
@@ -51,7 +51,10 @@ fn main() -> anyhow::Result<()> {
let relative_path = match path.strip_prefix(&base) {
Ok(p) => p.to_str().unwrap().to_string(),
Err(_) => {
eprintln!("Error: Could not create relative path for {}", path.display());
eprintln!(
"Error: Could not create relative path for {}",
path.display()
);
return Err(anyhow::anyhow!("Path error"));
}
};
@@ -110,10 +113,17 @@ fn main() -> anyhow::Result<()> {
println!();
println!("===================");
println!("Migration complete!");
println!("Successfully extracted EXIF from {}/{} images", success_count, image_files.len());
println!(
"Successfully extracted EXIF from {}/{} images",
success_count,
image_files.len()
);
if error_count > 0 {
println!("{} images had no EXIF data or encountered errors", error_count);
println!(
"{} images had no EXIF data or encountered errors",
error_count
);
}
Ok(())

View File

@@ -1,7 +1,7 @@
use std::{fs, str::FromStr};
use anyhow::{Context, anyhow};
use crate::database::models::ImageExif;
use anyhow::{Context, anyhow};
use chrono::{DateTime, Utc};
use log::error;
@@ -234,10 +234,17 @@ pub struct CaptureSettings {
impl From<ImageExif> for ExifMetadata {
fn from(exif: ImageExif) -> Self {
let has_camera_info = exif.camera_make.is_some() || exif.camera_model.is_some() || exif.lens_model.is_some();
let has_image_properties = exif.width.is_some() || exif.height.is_some() || exif.orientation.is_some();
let has_gps = exif.gps_latitude.is_some() || exif.gps_longitude.is_some() || exif.gps_altitude.is_some();
let has_capture_settings = exif.focal_length.is_some() || exif.aperture.is_some() || exif.shutter_speed.is_some() || exif.iso.is_some();
let has_camera_info =
exif.camera_make.is_some() || exif.camera_model.is_some() || exif.lens_model.is_some();
let has_image_properties =
exif.width.is_some() || exif.height.is_some() || exif.orientation.is_some();
let has_gps = exif.gps_latitude.is_some()
|| exif.gps_longitude.is_some()
|| exif.gps_altitude.is_some();
let has_capture_settings = exif.focal_length.is_some()
|| exif.aperture.is_some()
|| exif.shutter_speed.is_some()
|| exif.iso.is_some();
ExifMetadata {
camera: if has_camera_info {

View File

@@ -65,83 +65,84 @@ pub async fn list_photos<TagD: TagDao, FS: FileSystemAccess>(
let search_recursively = req.recursive.unwrap_or(false);
if let Some(tag_ids) = &req.tag_ids
&& search_recursively {
let filter_mode = &req.tag_filter_mode.unwrap_or(FilterMode::Any);
info!(
"Searching for tags: {}. With path: '{}' and filter mode: {:?}",
tag_ids, search_path, filter_mode
);
&& search_recursively
{
let filter_mode = &req.tag_filter_mode.unwrap_or(FilterMode::Any);
info!(
"Searching for tags: {}. With path: '{}' and filter mode: {:?}",
tag_ids, search_path, filter_mode
);
let mut dao = tag_dao.lock().expect("Unable to get TagDao");
let tag_ids = tag_ids
.split(',')
.filter_map(|t| t.parse().ok())
.collect::<Vec<i32>>();
let mut dao = tag_dao.lock().expect("Unable to get TagDao");
let tag_ids = tag_ids
.split(',')
.filter_map(|t| t.parse().ok())
.collect::<Vec<i32>>();
let exclude_tag_ids = req
.exclude_tag_ids
.clone()
.unwrap_or_default()
.split(',')
.filter_map(|t| t.parse().ok())
.collect::<Vec<i32>>();
let exclude_tag_ids = req
.exclude_tag_ids
.clone()
.unwrap_or_default()
.split(',')
.filter_map(|t| t.parse().ok())
.collect::<Vec<i32>>();
return match filter_mode {
FilterMode::Any => {
dao.get_files_with_any_tag_ids(tag_ids.clone(), exclude_tag_ids, &span_context)
}
FilterMode::All => {
dao.get_files_with_all_tag_ids(tag_ids.clone(), exclude_tag_ids, &span_context)
}
return match filter_mode {
FilterMode::Any => {
dao.get_files_with_any_tag_ids(tag_ids.clone(), exclude_tag_ids, &span_context)
}
FilterMode::All => {
dao.get_files_with_all_tag_ids(tag_ids.clone(), exclude_tag_ids, &span_context)
}
.context(format!(
"Failed to get files with tag_ids: {:?} with filter_mode: {:?}",
tag_ids, filter_mode
))
.inspect(|files| {
info!(
"Found {:?} tagged files, filtering down by search path {:?}",
files.len(),
search_path
)
})
.map(|tagged_files| {
tagged_files
.into_iter()
.filter(|f| {
// When searching at the root, everything matches recursively
if search_path.trim() == "" {
return true;
}
f.file_name.starts_with(&format!(
"{}/",
search_path.strip_suffix('/').unwrap_or_else(|| search_path)
))
})
.collect::<Vec<FileWithTagCount>>()
})
.map(|files| sort(files, req.sort.unwrap_or(NameAsc)))
.inspect(|files| debug!("Found {:?} files", files.len()))
.map(|tagged_files: Vec<String>| {
info!(
"Found {:?} tagged files: {:?}",
tagged_files.len(),
tagged_files
);
span_context
.span()
.set_attribute(KeyValue::new("file_count", tagged_files.len().to_string()));
span_context.span().set_status(Status::Ok);
HttpResponse::Ok().json(PhotosResponse {
photos: tagged_files,
dirs: vec![],
})
})
.into_http_internal_err()
.unwrap_or_else(|e| e.error_response());
}
.context(format!(
"Failed to get files with tag_ids: {:?} with filter_mode: {:?}",
tag_ids, filter_mode
))
.inspect(|files| {
info!(
"Found {:?} tagged files, filtering down by search path {:?}",
files.len(),
search_path
)
})
.map(|tagged_files| {
tagged_files
.into_iter()
.filter(|f| {
// When searching at the root, everything matches recursively
if search_path.trim() == "" {
return true;
}
f.file_name.starts_with(&format!(
"{}/",
search_path.strip_suffix('/').unwrap_or_else(|| search_path)
))
})
.collect::<Vec<FileWithTagCount>>()
})
.map(|files| sort(files, req.sort.unwrap_or(NameAsc)))
.inspect(|files| debug!("Found {:?} files", files.len()))
.map(|tagged_files: Vec<String>| {
info!(
"Found {:?} tagged files: {:?}",
tagged_files.len(),
tagged_files
);
span_context
.span()
.set_attribute(KeyValue::new("file_count", tagged_files.len().to_string()));
span_context.span().set_status(Status::Ok);
HttpResponse::Ok().json(PhotosResponse {
photos: tagged_files,
dirs: vec![],
})
})
.into_http_internal_err()
.unwrap_or_else(|e| e.error_response());
}
match file_system.get_files_for_path(search_path) {
Ok(files) => {

View File

@@ -33,8 +33,8 @@ use rayon::prelude::*;
use crate::auth::login;
use crate::data::*;
use crate::database::*;
use crate::database::models::InsertImageExif;
use crate::database::*;
use crate::files::{
RealFileSystem, RefreshThumbnailsMessage, is_image_or_video, is_valid_full_path, move_file,
};
@@ -308,7 +308,11 @@ async fn upload_image(
}
}
Err(e) => {
debug!("No EXIF data or error extracting from {}: {:?}", uploaded_path.display(), e);
debug!(
"No EXIF data or error extracting from {}: {:?}",
uploaded_path.display(),
e
);
}
}
}
@@ -738,7 +742,9 @@ fn main() -> std::io::Result<()> {
favorites_dao,
))))
.app_data::<Data<Mutex<SqliteTagDao>>>(Data::new(Mutex::new(tag_dao)))
.app_data::<Data<Mutex<Box<dyn ExifDao>>>>(Data::new(Mutex::new(Box::new(exif_dao))))
.app_data::<Data<Mutex<Box<dyn ExifDao>>>>(Data::new(Mutex::new(Box::new(
exif_dao,
))))
.wrap(prometheus.clone())
})
.bind(dotenv::var("BIND_URL").unwrap())?