EXIF GPS write: POST /image/exif/gps via exiftool
New endpoint accepts {path, library, latitude, longitude} and shells
out to exiftool to write GPSLatitude/GPSLongitude (with N/S, E/W refs)
into the file's EXIF in place. After the write, the handler
re-extracts EXIF and updates the image_exif row so the DB stays in
sync — the response carries the updated metadata block in one
round-trip. Falls through to store_exif if the row is missing.
`exif::write_gps` is the small helper. `-overwrite_original` so no
.orig sidecar is left behind. Validates lat/lon range + supports_exif
before spawning exiftool. Format support matches the existing read
path (JPEG / TIFF / RAW / HEIF / PNG / WebP) — videos still need a
different writer and aren't covered.
Apollo's "+ PIN" carousel button (separate commit on the Apollo side)
calls this through /api/photos/exif/gps. Drive-by: cargo fmt one-line
collapse on apollo_client.rs.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
163
src/main.rs
163
src/main.rs
@@ -394,6 +394,168 @@ async fn get_file_metadata(
|
||||
}
|
||||
}
|
||||
|
||||
/// Body for `POST /image/exif/gps` — write GPS coordinates into a file's
|
||||
/// EXIF in place. Only `path` + `latitude` + `longitude` are required.
|
||||
/// `library` is optional (falls back to the primary library) and matches
|
||||
/// the convention of the other path-keyed routes.
|
||||
#[derive(serde::Deserialize)]
|
||||
struct SetGpsRequest {
|
||||
path: String,
|
||||
library: Option<String>,
|
||||
latitude: f64,
|
||||
longitude: f64,
|
||||
}
|
||||
|
||||
#[post("/image/exif/gps")]
|
||||
async fn set_image_gps(
|
||||
_: Claims,
|
||||
request: HttpRequest,
|
||||
body: web::Json<SetGpsRequest>,
|
||||
app_state: Data<AppState>,
|
||||
exif_dao: Data<Mutex<Box<dyn ExifDao>>>,
|
||||
) -> impl Responder {
|
||||
let tracer = global_tracer();
|
||||
let context = extract_context_from_request(&request);
|
||||
let mut span = tracer.start_with_context("set_image_gps", &context);
|
||||
let span_context =
|
||||
opentelemetry::Context::new().with_remote_span_context(span.span_context().clone());
|
||||
|
||||
let library = libraries::resolve_library_param(&app_state, body.library.as_deref())
|
||||
.ok()
|
||||
.flatten()
|
||||
.unwrap_or_else(|| app_state.primary_library());
|
||||
|
||||
// Same fallback as get_file_metadata: union-mode means a file may
|
||||
// resolve under a sibling library.
|
||||
let resolved = is_valid_full_path(&library.root_path, &body.path, false)
|
||||
.filter(|p| p.exists())
|
||||
.map(|p| (library, p))
|
||||
.or_else(|| {
|
||||
app_state.libraries.iter().find_map(|lib| {
|
||||
if lib.id == library.id {
|
||||
return None;
|
||||
}
|
||||
is_valid_full_path(&lib.root_path, &body.path, false)
|
||||
.filter(|p| p.exists())
|
||||
.map(|p| (lib, p))
|
||||
})
|
||||
});
|
||||
|
||||
let (resolved_library, full_path) = match resolved {
|
||||
Some(v) => v,
|
||||
None => {
|
||||
span.set_status(Status::error("file not found"));
|
||||
return HttpResponse::NotFound().body("File not found");
|
||||
}
|
||||
};
|
||||
|
||||
if !exif::supports_exif(&full_path) {
|
||||
return HttpResponse::BadRequest().body("File format does not support EXIF GPS write");
|
||||
}
|
||||
|
||||
if let Err(e) = exif::write_gps(&full_path, body.latitude, body.longitude) {
|
||||
let msg = format!("exiftool write failed: {}", e);
|
||||
error!("{}", msg);
|
||||
span.set_status(Status::error(msg.clone()));
|
||||
return HttpResponse::InternalServerError().body(msg);
|
||||
}
|
||||
|
||||
// Re-read EXIF from disk (the write path doesn't tell us the rest of
|
||||
// the parsed fields back, and we want the DB row to match what
|
||||
// extract_exif_from_path would now produce). Update the existing row
|
||||
// rather than insert — this endpoint is invoked on already-indexed
|
||||
// files only.
|
||||
let extracted = match exif::extract_exif_from_path(&full_path) {
|
||||
Ok(d) => d,
|
||||
Err(e) => {
|
||||
// GPS was written successfully but re-extraction failed; surface
|
||||
// a 500 because the DB will now disagree with disk until the
|
||||
// next file scan rewrites it.
|
||||
let msg = format!("EXIF re-read failed after write: {}", e);
|
||||
error!("{}", msg);
|
||||
return HttpResponse::InternalServerError().body(msg);
|
||||
}
|
||||
};
|
||||
let now = Utc::now().timestamp();
|
||||
let normalized_path = body.path.replace('\\', "/");
|
||||
let insert_exif = InsertImageExif {
|
||||
library_id: resolved_library.id,
|
||||
file_path: normalized_path.clone(),
|
||||
camera_make: extracted.camera_make,
|
||||
camera_model: extracted.camera_model,
|
||||
lens_model: extracted.lens_model,
|
||||
width: extracted.width,
|
||||
height: extracted.height,
|
||||
orientation: extracted.orientation,
|
||||
gps_latitude: extracted.gps_latitude.map(|v| v as f32),
|
||||
gps_longitude: extracted.gps_longitude.map(|v| v as f32),
|
||||
gps_altitude: extracted.gps_altitude.map(|v| v as f32),
|
||||
focal_length: extracted.focal_length.map(|v| v as f32),
|
||||
aperture: extracted.aperture.map(|v| v as f32),
|
||||
shutter_speed: extracted.shutter_speed,
|
||||
iso: extracted.iso,
|
||||
date_taken: extracted.date_taken,
|
||||
// Created_time is preserved by update_exif (it doesn't touch the
|
||||
// column); pass any int — it's ignored in the UPDATE statement.
|
||||
created_time: now,
|
||||
last_modified: now,
|
||||
// Hash + size aren't touched in update_exif either, but the file
|
||||
// bytes did change — best-effort recompute so the new hash lands
|
||||
// on the next call to get_exif. Failure here just leaves the old
|
||||
// values in place.
|
||||
content_hash: content_hash::compute(&full_path)
|
||||
.ok()
|
||||
.map(|c| c.content_hash),
|
||||
size_bytes: content_hash::compute(&full_path).ok().map(|c| c.size_bytes),
|
||||
};
|
||||
|
||||
let updated = {
|
||||
let mut dao = exif_dao.lock().expect("Unable to lock ExifDao");
|
||||
// If the row doesn't exist yet (file isn't indexed for some reason),
|
||||
// insert instead so the GPS write is at least visible the moment
|
||||
// the watcher catches up.
|
||||
match dao.get_exif(&span_context, &normalized_path) {
|
||||
Ok(Some(_)) => dao.update_exif(&span_context, insert_exif),
|
||||
Ok(None) => dao.store_exif(&span_context, insert_exif),
|
||||
Err(_) => dao.update_exif(&span_context, insert_exif),
|
||||
}
|
||||
};
|
||||
|
||||
match updated {
|
||||
Ok(row) => {
|
||||
// Mirror the file metadata so the client gets the new size /
|
||||
// mtime in the same response and can refresh its cached
|
||||
// metadata block in one round-trip.
|
||||
let fs_meta = std::fs::metadata(&full_path).ok();
|
||||
let mut response: MetadataResponse = match fs_meta {
|
||||
Some(m) => m.into(),
|
||||
None => MetadataResponse {
|
||||
created: None,
|
||||
modified: None,
|
||||
size: 0,
|
||||
exif: None,
|
||||
filename_date: None,
|
||||
library_id: None,
|
||||
library_name: None,
|
||||
},
|
||||
};
|
||||
response.exif = Some(row.into());
|
||||
response.library_id = Some(resolved_library.id);
|
||||
response.library_name = Some(resolved_library.name.clone());
|
||||
response.filename_date =
|
||||
memories::extract_date_from_filename(&body.path).map(|dt| dt.timestamp());
|
||||
span.set_status(Status::Ok);
|
||||
HttpResponse::Ok().json(response)
|
||||
}
|
||||
Err(e) => {
|
||||
let msg = format!("EXIF DB update failed: {:?}", e);
|
||||
error!("{}", msg);
|
||||
span.set_status(Status::error(msg.clone()));
|
||||
HttpResponse::InternalServerError().body(msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct UploadQuery {
|
||||
library: Option<String>,
|
||||
@@ -1415,6 +1577,7 @@ fn main() -> std::io::Result<()> {
|
||||
.service(put_add_favorite)
|
||||
.service(delete_favorite)
|
||||
.service(get_file_metadata)
|
||||
.service(set_image_gps)
|
||||
.service(memories::list_memories)
|
||||
.service(ai::generate_insight_handler)
|
||||
.service(ai::generate_agentic_insight_handler)
|
||||
|
||||
Reference in New Issue
Block a user