chore(bins): retire unused migrate_exif
Single-library hardcoded (library_id=1) and missing content_hash/size_bytes backfill, so the watcher's full-scan path subsumes everything it does. Removed the binary and its CLAUDE.md reference. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -69,9 +69,6 @@ cargo fix
|
|||||||
```bash
|
```bash
|
||||||
# Two-phase cleanup: resolve missing files and validate file types
|
# Two-phase cleanup: resolve missing files and validate file types
|
||||||
cargo run --bin cleanup_files -- --base-path /path/to/media --database-url ./database.db
|
cargo run --bin cleanup_files -- --base-path /path/to/media --database-url ./database.db
|
||||||
|
|
||||||
# Batch extract EXIF for existing files
|
|
||||||
cargo run --bin migrate_exif
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Architecture Overview
|
## Architecture Overview
|
||||||
|
|||||||
@@ -1,198 +0,0 @@
|
|||||||
use std::path::PathBuf;
|
|
||||||
use std::sync::{Arc, Mutex};
|
|
||||||
|
|
||||||
use chrono::Utc;
|
|
||||||
use clap::Parser;
|
|
||||||
use rayon::prelude::*;
|
|
||||||
use walkdir::WalkDir;
|
|
||||||
|
|
||||||
use image_api::database::models::InsertImageExif;
|
|
||||||
use image_api::database::{ExifDao, SqliteExifDao};
|
|
||||||
use image_api::exif;
|
|
||||||
|
|
||||||
#[derive(Parser, Debug)]
|
|
||||||
#[command(name = "migrate_exif")]
|
|
||||||
#[command(about = "Extract and store EXIF data from images", long_about = None)]
|
|
||||||
struct Args {
|
|
||||||
#[arg(long, help = "Skip files that already have EXIF data in database")]
|
|
||||||
skip_existing: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() -> anyhow::Result<()> {
|
|
||||||
env_logger::init();
|
|
||||||
dotenv::dotenv()?;
|
|
||||||
|
|
||||||
let args = Args::parse();
|
|
||||||
let base_path = dotenv::var("BASE_PATH")?;
|
|
||||||
let base = PathBuf::from(&base_path);
|
|
||||||
|
|
||||||
println!("EXIF Migration Tool");
|
|
||||||
println!("===================");
|
|
||||||
println!("Base path: {}", base.display());
|
|
||||||
if args.skip_existing {
|
|
||||||
println!("Mode: Skip existing (incremental)");
|
|
||||||
} else {
|
|
||||||
println!("Mode: Upsert (insert new, update existing)");
|
|
||||||
}
|
|
||||||
println!();
|
|
||||||
|
|
||||||
// Collect all image files that support EXIF
|
|
||||||
println!("Scanning for images...");
|
|
||||||
let image_files: Vec<PathBuf> = WalkDir::new(&base)
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|e| e.ok())
|
|
||||||
.filter(|e| e.file_type().is_file())
|
|
||||||
.filter(|e| exif::supports_exif(e.path()))
|
|
||||||
.map(|e| e.path().to_path_buf())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
println!("Found {} images to process", image_files.len());
|
|
||||||
|
|
||||||
if image_files.is_empty() {
|
|
||||||
println!("No EXIF-supporting images found. Exiting.");
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
println!();
|
|
||||||
println!("Extracting EXIF data...");
|
|
||||||
|
|
||||||
// Create a thread-safe DAO
|
|
||||||
let dao = Arc::new(Mutex::new(SqliteExifDao::new()));
|
|
||||||
|
|
||||||
// Process in parallel using rayon
|
|
||||||
let results: Vec<_> = image_files
|
|
||||||
.par_iter()
|
|
||||||
.map(|path| {
|
|
||||||
// Create context for this processing iteration
|
|
||||||
let context = opentelemetry::Context::new();
|
|
||||||
|
|
||||||
let relative_path = match path.strip_prefix(&base) {
|
|
||||||
Ok(p) => p.to_str().unwrap().replace('\\', "/"),
|
|
||||||
Err(_) => {
|
|
||||||
eprintln!(
|
|
||||||
"Error: Could not create relative path for {}",
|
|
||||||
path.display()
|
|
||||||
);
|
|
||||||
return Err(anyhow::anyhow!("Path error"));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Check if EXIF data already exists
|
|
||||||
let existing = if let Ok(mut dao_lock) = dao.lock() {
|
|
||||||
dao_lock.get_exif(&context, &relative_path).ok().flatten()
|
|
||||||
} else {
|
|
||||||
eprintln!("✗ {} - Failed to acquire database lock", relative_path);
|
|
||||||
return Err(anyhow::anyhow!("Lock error"));
|
|
||||||
};
|
|
||||||
|
|
||||||
// Skip if exists and skip_existing flag is set
|
|
||||||
if args.skip_existing && existing.is_some() {
|
|
||||||
return Ok(("skip".to_string(), relative_path));
|
|
||||||
}
|
|
||||||
|
|
||||||
match exif::extract_exif_from_path(path) {
|
|
||||||
Ok(exif_data) => {
|
|
||||||
let timestamp = Utc::now().timestamp();
|
|
||||||
let insert_exif = InsertImageExif {
|
|
||||||
library_id: image_api::libraries::PRIMARY_LIBRARY_ID,
|
|
||||||
file_path: relative_path.clone(),
|
|
||||||
camera_make: exif_data.camera_make,
|
|
||||||
camera_model: exif_data.camera_model,
|
|
||||||
lens_model: exif_data.lens_model,
|
|
||||||
width: exif_data.width,
|
|
||||||
height: exif_data.height,
|
|
||||||
orientation: exif_data.orientation,
|
|
||||||
gps_latitude: exif_data.gps_latitude.map(|v| v as f32),
|
|
||||||
gps_longitude: exif_data.gps_longitude.map(|v| v as f32),
|
|
||||||
gps_altitude: exif_data.gps_altitude.map(|v| v as f32),
|
|
||||||
focal_length: exif_data.focal_length.map(|v| v as f32),
|
|
||||||
aperture: exif_data.aperture.map(|v| v as f32),
|
|
||||||
shutter_speed: exif_data.shutter_speed,
|
|
||||||
iso: exif_data.iso,
|
|
||||||
date_taken: exif_data.date_taken,
|
|
||||||
created_time: existing
|
|
||||||
.as_ref()
|
|
||||||
.map(|e| e.created_time)
|
|
||||||
.unwrap_or(timestamp),
|
|
||||||
last_modified: timestamp,
|
|
||||||
content_hash: None,
|
|
||||||
size_bytes: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Store or update in database
|
|
||||||
if let Ok(mut dao_lock) = dao.lock() {
|
|
||||||
let result = if existing.is_some() {
|
|
||||||
// Update existing record
|
|
||||||
dao_lock
|
|
||||||
.update_exif(&context, insert_exif)
|
|
||||||
.map(|_| "update")
|
|
||||||
} else {
|
|
||||||
// Insert new record
|
|
||||||
dao_lock.store_exif(&context, insert_exif).map(|_| "insert")
|
|
||||||
};
|
|
||||||
|
|
||||||
match result {
|
|
||||||
Ok(action) => {
|
|
||||||
if action == "update" {
|
|
||||||
println!("↻ {} (updated)", relative_path);
|
|
||||||
} else {
|
|
||||||
println!("✓ {} (inserted)", relative_path);
|
|
||||||
}
|
|
||||||
Ok((action.to_string(), relative_path))
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
eprintln!("✗ {} - Database error: {:?}", relative_path, e);
|
|
||||||
Err(anyhow::anyhow!("Database error"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
eprintln!("✗ {} - Failed to acquire database lock", relative_path);
|
|
||||||
Err(anyhow::anyhow!("Lock error"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
eprintln!("✗ {} - No EXIF data: {:?}", relative_path, e);
|
|
||||||
Err(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
// Count results
|
|
||||||
let mut success_count = 0;
|
|
||||||
let mut inserted_count = 0;
|
|
||||||
let mut updated_count = 0;
|
|
||||||
let mut skipped_count = 0;
|
|
||||||
|
|
||||||
for (action, _) in results.iter().flatten() {
|
|
||||||
success_count += 1;
|
|
||||||
match action.as_str() {
|
|
||||||
"insert" => inserted_count += 1,
|
|
||||||
"update" => updated_count += 1,
|
|
||||||
"skip" => skipped_count += 1,
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let error_count = results.len() - success_count - skipped_count;
|
|
||||||
|
|
||||||
println!();
|
|
||||||
println!("===================");
|
|
||||||
println!("Migration complete!");
|
|
||||||
println!("Total images processed: {}", image_files.len());
|
|
||||||
|
|
||||||
if inserted_count > 0 {
|
|
||||||
println!(" New EXIF records inserted: {}", inserted_count);
|
|
||||||
}
|
|
||||||
if updated_count > 0 {
|
|
||||||
println!(" Existing records updated: {}", updated_count);
|
|
||||||
}
|
|
||||||
if skipped_count > 0 {
|
|
||||||
println!(" Skipped (already exists): {}", skipped_count);
|
|
||||||
}
|
|
||||||
if error_count > 0 {
|
|
||||||
println!(" Errors (no EXIF data or failures): {}", error_count);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
Reference in New Issue
Block a user