Merge pull request 'feature/manual-date-override' (#79) from feature/manual-date-override into master
Reviewed-on: #79
This commit was merged in pull request #79.
This commit is contained in:
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE image_exif DROP COLUMN original_date_taken_source;
|
||||
ALTER TABLE image_exif DROP COLUMN original_date_taken;
|
||||
15
migrations/2026-05-06-000200_add_manual_date_override/up.sql
Normal file
15
migrations/2026-05-06-000200_add_manual_date_override/up.sql
Normal file
@@ -0,0 +1,15 @@
|
||||
-- Manual date_taken override: when an operator overrides a row's date via
|
||||
-- POST /image/exif/date, the prior `(date_taken, date_taken_source)` is
|
||||
-- snapshotted into these columns and the live columns hold the new value
|
||||
-- with `date_taken_source = 'manual'`. POST /image/exif/date/clear restores
|
||||
-- the pair and nulls the originals.
|
||||
--
|
||||
-- The waterfall source-name set is now:
|
||||
-- 'exif' | 'exiftool' | 'filename' | 'fs_time' | 'manual'
|
||||
--
|
||||
-- The `idx_image_exif_date_backfill` partial index already filters to
|
||||
-- `date_taken IS NULL OR date_taken_source = 'fs_time'`, so 'manual' rows
|
||||
-- are naturally excluded from the per-tick backfill drain — no index
|
||||
-- change needed.
|
||||
ALTER TABLE image_exif ADD COLUMN original_date_taken BIGINT;
|
||||
ALTER TABLE image_exif ADD COLUMN original_date_taken_source TEXT;
|
||||
@@ -286,6 +286,16 @@ pub struct ExifMetadata {
|
||||
pub gps: Option<GpsCoordinates>,
|
||||
pub capture_settings: Option<CaptureSettings>,
|
||||
pub date_taken: Option<i64>,
|
||||
/// Which step of the canonical-date waterfall populated `date_taken`:
|
||||
/// `"exif" | "exiftool" | "filename" | "fs_time" | "manual"`. NULL when
|
||||
/// `date_taken` itself is NULL.
|
||||
pub date_taken_source: Option<String>,
|
||||
/// When `date_taken_source = "manual"`, the prior `date_taken` snapshot.
|
||||
/// Used by the UI's revert affordance and to label "manually overridden;
|
||||
/// originally X" in the details modal.
|
||||
pub original_date_taken: Option<i64>,
|
||||
/// When `date_taken_source = "manual"`, the prior source.
|
||||
pub original_date_taken_source: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
@@ -370,6 +380,9 @@ impl From<ImageExif> for ExifMetadata {
|
||||
None
|
||||
},
|
||||
date_taken: exif.date_taken,
|
||||
date_taken_source: exif.date_taken_source,
|
||||
original_date_taken: exif.original_date_taken,
|
||||
original_date_taken_source: exif.original_date_taken_source,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -439,6 +439,32 @@ pub trait ExifDao: Sync + Send {
|
||||
source: &str,
|
||||
) -> Result<(), DbError>;
|
||||
|
||||
/// Operator-driven date_taken override (POST /image/exif/date). Snapshots
|
||||
/// the prior `(date_taken, date_taken_source)` into the `original_*`
|
||||
/// pair on first override, then writes the new value with
|
||||
/// `date_taken_source = 'manual'`. Subsequent overrides keep the
|
||||
/// original snapshot intact so a single revert restores the resolver
|
||||
/// result, not whatever override was set just before. Returns the
|
||||
/// post-update row.
|
||||
fn set_manual_date_taken(
|
||||
&mut self,
|
||||
context: &opentelemetry::Context,
|
||||
library_id: i32,
|
||||
rel_path: &str,
|
||||
date_taken: i64,
|
||||
) -> Result<ImageExif, DbError>;
|
||||
|
||||
/// Revert a manual override (POST /image/exif/date/clear): restore
|
||||
/// `date_taken` + `date_taken_source` from the `original_*` snapshot,
|
||||
/// then null both originals. No-op (returns current row unchanged) when
|
||||
/// no override is active.
|
||||
fn clear_manual_date_taken(
|
||||
&mut self,
|
||||
context: &opentelemetry::Context,
|
||||
library_id: i32,
|
||||
rel_path: &str,
|
||||
) -> Result<ImageExif, DbError>;
|
||||
|
||||
/// Single-query backend for `/memories`. Returns
|
||||
/// `(rel_path, date_taken, last_modified)` for rows in `library_id`
|
||||
/// whose `date_taken` falls within `[now - years_back y, now]` and
|
||||
@@ -1172,15 +1198,152 @@ impl ExifDao for SqliteExifDao {
|
||||
|
||||
let mut connection = self.connection.lock().expect("Unable to get ExifDao");
|
||||
|
||||
diesel::update(
|
||||
let result = diesel::update(
|
||||
image_exif
|
||||
.filter(library_id.eq(library_id_val))
|
||||
.filter(rel_path.eq(rel_path_val)),
|
||||
)
|
||||
.set((date_taken.eq(date_taken_val), date_taken_source.eq(source)))
|
||||
.execute(connection.deref_mut());
|
||||
|
||||
match result {
|
||||
Ok(rows) => {
|
||||
// Surface zero-row updates as a warning rather than a
|
||||
// silent success. They mean the (library_id, rel_path)
|
||||
// row was deleted between the `get_rows_needing_date_
|
||||
// backfill` query and this update — rare but possible
|
||||
// when the file watcher is racing the drain. The drain
|
||||
// shouldn't treat that as a hard error, so still
|
||||
// return Ok(()).
|
||||
if rows == 0 {
|
||||
log::debug!(
|
||||
"backfill_date_taken: 0 rows matched lib={} {} \
|
||||
(row likely retired by missing-file scan)",
|
||||
library_id_val,
|
||||
rel_path_val
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
// Preserve the diesel error in the chain so warnings at
|
||||
// the call site can articulate the cause (a flat "Update
|
||||
// error" was useless for triage).
|
||||
Err(e) => Err(anyhow::anyhow!(
|
||||
"diesel update failed (lib={}, rel_path={}, date_taken={}, source={}): {}",
|
||||
library_id_val,
|
||||
rel_path_val,
|
||||
date_taken_val,
|
||||
source,
|
||||
e
|
||||
)),
|
||||
}
|
||||
})
|
||||
.map_err(|e| {
|
||||
// Log before the anyhow message gets stripped by the
|
||||
// DbError-only return type.
|
||||
log::warn!("backfill_date_taken: {}", e);
|
||||
DbError::new(DbErrorKind::UpdateError)
|
||||
})
|
||||
}
|
||||
|
||||
fn set_manual_date_taken(
|
||||
&mut self,
|
||||
context: &opentelemetry::Context,
|
||||
library_id_val: i32,
|
||||
rel_path_val: &str,
|
||||
date_taken_val: i64,
|
||||
) -> Result<ImageExif, DbError> {
|
||||
trace_db_call(context, "update", "set_manual_date_taken", |_span| {
|
||||
use schema::image_exif::dsl::*;
|
||||
|
||||
let mut connection = self.connection.lock().expect("Unable to get ExifDao");
|
||||
|
||||
// Read-modify-write under the dao mutex so the snapshot is
|
||||
// consistent with the value being overwritten. The mutex holds
|
||||
// for the duration of this closure — no other writer can race.
|
||||
let current: ImageExif = image_exif
|
||||
.filter(library_id.eq(library_id_val))
|
||||
.filter(rel_path.eq(rel_path_val))
|
||||
.first(connection.deref_mut())
|
||||
.map_err(|_| anyhow::anyhow!("row not found"))?;
|
||||
|
||||
// Snapshot only on first override. Subsequent overrides keep
|
||||
// the original snapshot intact so a single revert restores
|
||||
// the resolver-derived value, not the prior override.
|
||||
let (orig_dt, orig_src) = if current.original_date_taken.is_none() {
|
||||
(current.date_taken, current.date_taken_source.clone())
|
||||
} else {
|
||||
(
|
||||
current.original_date_taken,
|
||||
current.original_date_taken_source.clone(),
|
||||
)
|
||||
};
|
||||
|
||||
diesel::update(
|
||||
image_exif
|
||||
.filter(library_id.eq(library_id_val))
|
||||
.filter(rel_path.eq(rel_path_val)),
|
||||
)
|
||||
.set((
|
||||
date_taken.eq(Some(date_taken_val)),
|
||||
date_taken_source.eq(Some("manual".to_string())),
|
||||
original_date_taken.eq(orig_dt),
|
||||
original_date_taken_source.eq(orig_src),
|
||||
))
|
||||
.execute(connection.deref_mut())
|
||||
.map(|_| ())
|
||||
.map_err(|_| anyhow::anyhow!("Update error"))
|
||||
.map_err(|_| anyhow::anyhow!("Update error"))?;
|
||||
|
||||
image_exif
|
||||
.filter(library_id.eq(library_id_val))
|
||||
.filter(rel_path.eq(rel_path_val))
|
||||
.first::<ImageExif>(connection.deref_mut())
|
||||
.map_err(|_| anyhow::anyhow!("Re-read error"))
|
||||
})
|
||||
.map_err(|_| DbError::new(DbErrorKind::UpdateError))
|
||||
}
|
||||
|
||||
fn clear_manual_date_taken(
|
||||
&mut self,
|
||||
context: &opentelemetry::Context,
|
||||
library_id_val: i32,
|
||||
rel_path_val: &str,
|
||||
) -> Result<ImageExif, DbError> {
|
||||
trace_db_call(context, "update", "clear_manual_date_taken", |_span| {
|
||||
use schema::image_exif::dsl::*;
|
||||
|
||||
let mut connection = self.connection.lock().expect("Unable to get ExifDao");
|
||||
|
||||
let current: ImageExif = image_exif
|
||||
.filter(library_id.eq(library_id_val))
|
||||
.filter(rel_path.eq(rel_path_val))
|
||||
.first(connection.deref_mut())
|
||||
.map_err(|_| anyhow::anyhow!("row not found"))?;
|
||||
|
||||
// No override active — nothing to revert. Return the current
|
||||
// row unchanged so the endpoint is idempotent.
|
||||
if current.original_date_taken.is_none() {
|
||||
return Ok(current);
|
||||
}
|
||||
|
||||
diesel::update(
|
||||
image_exif
|
||||
.filter(library_id.eq(library_id_val))
|
||||
.filter(rel_path.eq(rel_path_val)),
|
||||
)
|
||||
.set((
|
||||
date_taken.eq(current.original_date_taken),
|
||||
date_taken_source.eq(current.original_date_taken_source.clone()),
|
||||
original_date_taken.eq::<Option<i64>>(None),
|
||||
original_date_taken_source.eq::<Option<String>>(None),
|
||||
))
|
||||
.execute(connection.deref_mut())
|
||||
.map_err(|_| anyhow::anyhow!("Update error"))?;
|
||||
|
||||
image_exif
|
||||
.filter(library_id.eq(library_id_val))
|
||||
.filter(rel_path.eq(rel_path_val))
|
||||
.first::<ImageExif>(connection.deref_mut())
|
||||
.map_err(|_| anyhow::anyhow!("Re-read error"))
|
||||
})
|
||||
.map_err(|_| DbError::new(DbErrorKind::UpdateError))
|
||||
}
|
||||
|
||||
@@ -105,7 +105,15 @@ pub struct ImageExif {
|
||||
/// Unix seconds at which the resolve was committed.
|
||||
pub duplicate_decided_at: Option<i64>,
|
||||
/// Which step of the canonical-date waterfall populated `date_taken`.
|
||||
/// Plus `"manual"` when the operator has set it via POST /image/exif/date.
|
||||
pub date_taken_source: Option<String>,
|
||||
/// Snapshot of the prior `date_taken` taken on first manual override.
|
||||
/// NULL when no override is active. POST /image/exif/date/clear restores
|
||||
/// `date_taken` from this column and nulls it back out.
|
||||
pub original_date_taken: Option<i64>,
|
||||
/// Snapshot of the prior `date_taken_source` taken on first manual
|
||||
/// override. NULL when no override is active.
|
||||
pub original_date_taken_source: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Insertable)]
|
||||
|
||||
@@ -126,6 +126,8 @@ diesel::table! {
|
||||
duplicate_of_hash -> Nullable<Text>,
|
||||
duplicate_decided_at -> Nullable<BigInt>,
|
||||
date_taken_source -> Nullable<Text>,
|
||||
original_date_taken -> Nullable<BigInt>,
|
||||
original_date_taken_source -> Nullable<Text>,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
47
src/exif.rs
47
src/exif.rs
@@ -71,6 +71,53 @@ fn read_jpeg_at_ifd(exif: &exif::Exif, path: &Path, ifd: In) -> Option<Vec<u8>>
|
||||
Some(buf)
|
||||
}
|
||||
|
||||
/// Shell out to `exiftool -j -G -n <path>` and return the per-file tag map.
|
||||
///
|
||||
/// `-j` requests JSON; the response is always an array of one element per
|
||||
/// input path. `-G` prefixes each key with the group name (`EXIF:Make`,
|
||||
/// `MakerNotes:LensInfo`, `File:FileSize`, …) so a UI can group the dump.
|
||||
/// `-n` returns numeric / raw values rather than exiftool's pretty-printed
|
||||
/// human strings, which keeps the output stable for clients that want to
|
||||
/// reformat (e.g. divide a focal-length numerator/denominator).
|
||||
///
|
||||
/// Returns:
|
||||
/// - `Ok(Some(value))` — the parsed object for this file.
|
||||
/// - `Ok(None)` — exiftool ran but the array was empty / not an object.
|
||||
/// - `Err(_)` — exiftool isn't on PATH, the spawn failed, or its stderr
|
||||
/// indicates an unsupported file. Caller surfaces a 503 / 422.
|
||||
///
|
||||
/// Used by `GET /image/exif/full` to power Apollo's DETAILS modal "FULL
|
||||
/// EXIF" pane. Per-file shell-out is fine for this on-demand surface;
|
||||
/// the indexer does NOT call this on the hot path (kamadak-exif covers
|
||||
/// the indexed columns; exiftool is the slow-path preview helper).
|
||||
pub fn read_full_exif_via_exiftool(path: &Path) -> Result<Option<serde_json::Value>> {
|
||||
let output = Command::new("exiftool")
|
||||
.arg("-j")
|
||||
.arg("-G")
|
||||
.arg("-n")
|
||||
.arg(path)
|
||||
.output()
|
||||
.map_err(|e| anyhow!("exiftool spawn failed (is it on PATH?): {}", e))?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
return Err(anyhow!(
|
||||
"exiftool exited with {}: {}",
|
||||
output.status,
|
||||
stderr.trim()
|
||||
));
|
||||
}
|
||||
|
||||
let parsed: serde_json::Value = serde_json::from_slice(&output.stdout)
|
||||
.map_err(|e| anyhow!("exiftool returned non-JSON output: {}", e))?;
|
||||
|
||||
// `-j` always wraps the result in an array — pull out the first object.
|
||||
let arr = parsed
|
||||
.as_array()
|
||||
.ok_or_else(|| anyhow!("expected JSON array from exiftool -j"))?;
|
||||
Ok(arr.first().cloned())
|
||||
}
|
||||
|
||||
/// Tags exiftool exposes for embedded JPEG previews, in priority order. The
|
||||
/// largest valid JPEG returned by any of them wins. Different camera makers
|
||||
/// stash their largest preview under different names: Nikon's full-res
|
||||
|
||||
64
src/files.rs
64
src/files.rs
@@ -1475,6 +1475,44 @@ mod tests {
|
||||
|
||||
struct MockExifDao;
|
||||
|
||||
fn mock_exif_row(
|
||||
library_id: i32,
|
||||
rel_path: &str,
|
||||
date_taken: Option<i64>,
|
||||
date_taken_source: Option<String>,
|
||||
) -> crate::database::models::ImageExif {
|
||||
crate::database::models::ImageExif {
|
||||
id: 1,
|
||||
library_id,
|
||||
file_path: rel_path.to_string(),
|
||||
camera_make: None,
|
||||
camera_model: None,
|
||||
lens_model: None,
|
||||
width: None,
|
||||
height: None,
|
||||
orientation: None,
|
||||
gps_latitude: None,
|
||||
gps_longitude: None,
|
||||
gps_altitude: None,
|
||||
focal_length: None,
|
||||
aperture: None,
|
||||
shutter_speed: None,
|
||||
iso: None,
|
||||
date_taken,
|
||||
created_time: 0,
|
||||
last_modified: 0,
|
||||
content_hash: None,
|
||||
size_bytes: None,
|
||||
phash_64: None,
|
||||
dhash_64: None,
|
||||
duplicate_of_hash: None,
|
||||
duplicate_decided_at: None,
|
||||
date_taken_source,
|
||||
original_date_taken: None,
|
||||
original_date_taken_source: None,
|
||||
}
|
||||
}
|
||||
|
||||
impl ExifDao for MockExifDao {
|
||||
fn store_exif(
|
||||
&mut self,
|
||||
@@ -1509,6 +1547,8 @@ mod tests {
|
||||
duplicate_of_hash: None,
|
||||
duplicate_decided_at: None,
|
||||
date_taken_source: data.date_taken_source.clone(),
|
||||
original_date_taken: None,
|
||||
original_date_taken_source: None,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1553,6 +1593,8 @@ mod tests {
|
||||
duplicate_of_hash: None,
|
||||
duplicate_decided_at: None,
|
||||
date_taken_source: data.date_taken_source.clone(),
|
||||
original_date_taken: None,
|
||||
original_date_taken_source: None,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1666,6 +1708,28 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_manual_date_taken(
|
||||
&mut self,
|
||||
_context: &opentelemetry::Context,
|
||||
library_id: i32,
|
||||
rel_path: &str,
|
||||
date_taken: i64,
|
||||
) -> Result<crate::database::models::ImageExif, DbError> {
|
||||
// Mock — files.rs tests don't exercise the date-override endpoints.
|
||||
// Returning a synthetic row keeps the trait satisfied without
|
||||
// depending on private DbError constructors.
|
||||
Ok(mock_exif_row(library_id, rel_path, Some(date_taken), Some("manual".to_string())))
|
||||
}
|
||||
|
||||
fn clear_manual_date_taken(
|
||||
&mut self,
|
||||
_context: &opentelemetry::Context,
|
||||
library_id: i32,
|
||||
rel_path: &str,
|
||||
) -> Result<crate::database::models::ImageExif, DbError> {
|
||||
Ok(mock_exif_row(library_id, rel_path, None, None))
|
||||
}
|
||||
|
||||
fn get_memories_in_window(
|
||||
&mut self,
|
||||
_context: &opentelemetry::Context,
|
||||
|
||||
244
src/main.rs
244
src/main.rs
@@ -41,7 +41,7 @@ use urlencoding::decode;
|
||||
use crate::ai::InsightGenerator;
|
||||
use crate::auth::login;
|
||||
use crate::data::*;
|
||||
use crate::database::models::InsertImageExif;
|
||||
use crate::database::models::{ImageExif, InsertImageExif};
|
||||
use crate::database::*;
|
||||
use crate::files::{
|
||||
RealFileSystem, RefreshThumbnailsMessage, is_image_or_video, is_valid_full_path, move_file,
|
||||
@@ -593,6 +593,245 @@ async fn set_image_gps(
|
||||
}
|
||||
}
|
||||
|
||||
/// `GET /image/exif/full?path=&library=` — full per-file EXIF dump via
|
||||
/// exiftool, for the DETAILS modal's "FULL EXIF" pane. Strictly richer
|
||||
/// than `/image/metadata`'s curated subset (every group exiftool can
|
||||
/// see: EXIF, File, MakerNotes, Composite, ICC_Profile, IPTC, …).
|
||||
///
|
||||
/// On-demand only — the watcher / indexer never calls this. Falls back
|
||||
/// to 503 when exiftool isn't installed (deployer guidance is the same
|
||||
/// as for the RAW preview pipeline: install exiftool for full coverage).
|
||||
#[get("/image/exif/full")]
|
||||
async fn get_full_exif(
|
||||
_: Claims,
|
||||
request: HttpRequest,
|
||||
path: web::Query<ThumbnailRequest>,
|
||||
app_state: Data<AppState>,
|
||||
) -> impl Responder {
|
||||
let tracer = global_tracer();
|
||||
let context = extract_context_from_request(&request);
|
||||
let mut span = tracer.start_with_context("get_full_exif", &context);
|
||||
|
||||
let library = libraries::resolve_library_param(&app_state, path.library.as_deref())
|
||||
.ok()
|
||||
.flatten()
|
||||
.unwrap_or_else(|| app_state.primary_library());
|
||||
|
||||
// Same union-mode fallback as get_file_metadata — the file may live
|
||||
// under a sibling library when the requested one's path resolves but
|
||||
// doesn't actually contain the bytes.
|
||||
let resolved = is_valid_full_path(&library.root_path, &path.path, false)
|
||||
.filter(|p| p.exists())
|
||||
.map(|p| (library, p))
|
||||
.or_else(|| {
|
||||
app_state.libraries.iter().find_map(|lib| {
|
||||
if lib.id == library.id {
|
||||
return None;
|
||||
}
|
||||
is_valid_full_path(&lib.root_path, &path.path, false)
|
||||
.filter(|p| p.exists())
|
||||
.map(|p| (lib, p))
|
||||
})
|
||||
});
|
||||
|
||||
let (resolved_library, full_path) = match resolved {
|
||||
Some(v) => v,
|
||||
None => {
|
||||
span.set_status(Status::error("file not found"));
|
||||
return HttpResponse::NotFound().body("File not found");
|
||||
}
|
||||
};
|
||||
|
||||
// exiftool spawn is blocking — keep it off the actix worker by
|
||||
// running on the blocking pool. ~50–200 ms typical for a JPEG;
|
||||
// longer for RAW with rich MakerNotes.
|
||||
let exif_result =
|
||||
web::block(move || crate::exif::read_full_exif_via_exiftool(&full_path)).await;
|
||||
|
||||
match exif_result {
|
||||
Ok(Ok(Some(tags))) => {
|
||||
span.set_status(Status::Ok);
|
||||
HttpResponse::Ok().json(serde_json::json!({
|
||||
"library_id": resolved_library.id,
|
||||
"library_name": resolved_library.name,
|
||||
"tags": tags,
|
||||
}))
|
||||
}
|
||||
Ok(Ok(None)) => {
|
||||
// exiftool ran but produced no output for this file — treat as
|
||||
// empty rather than an error so the modal renders "no tags"
|
||||
// gracefully.
|
||||
HttpResponse::Ok().json(serde_json::json!({
|
||||
"library_id": resolved_library.id,
|
||||
"library_name": resolved_library.name,
|
||||
"tags": serde_json::Value::Object(Default::default()),
|
||||
}))
|
||||
}
|
||||
Ok(Err(e)) => {
|
||||
let msg = format!("exiftool failed: {}", e);
|
||||
error!("{}", msg);
|
||||
span.set_status(Status::error(msg.clone()));
|
||||
// 503 — typically "exiftool isn't on PATH" or a transient spawn
|
||||
// failure. Apollo surfaces a hint in the modal.
|
||||
HttpResponse::ServiceUnavailable().body(msg)
|
||||
}
|
||||
Err(e) => {
|
||||
let msg = format!("blocking-pool error: {}", e);
|
||||
error!("{}", msg);
|
||||
span.set_status(Status::error(msg.clone()));
|
||||
HttpResponse::InternalServerError().body(msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Body for `POST /image/exif/date` — operator-driven date_taken override.
|
||||
/// `date_taken` is unix seconds (matches `image_exif.date_taken`'s convention
|
||||
/// — naive local reinterpreted as UTC, not real UTC; the Apollo client passes
|
||||
/// through the same value the photo carousel rendered before edit).
|
||||
#[derive(serde::Deserialize)]
|
||||
struct SetDateRequest {
|
||||
path: String,
|
||||
library: Option<String>,
|
||||
date_taken: i64,
|
||||
}
|
||||
|
||||
/// Body for `POST /image/exif/date/clear` — revert a manual override and
|
||||
/// restore the resolver-derived `(date_taken, date_taken_source)` pair from
|
||||
/// the snapshot.
|
||||
#[derive(serde::Deserialize)]
|
||||
struct ClearDateRequest {
|
||||
path: String,
|
||||
library: Option<String>,
|
||||
}
|
||||
|
||||
/// Build a `MetadataResponse` for the date endpoints. Mirrors
|
||||
/// `get_file_metadata`'s shape so the client gets a single source of truth
|
||||
/// after every mutation. Filesystem metadata is best-effort: if the file is
|
||||
/// on a stale mount or moved, the DB-side override still succeeds and the
|
||||
/// response carries `created=None, modified=None, size=0`. The DB row's
|
||||
/// updated EXIF is what matters here.
|
||||
fn build_metadata_response_for_date_mutation(
|
||||
library: &libraries::Library,
|
||||
rel_path: &str,
|
||||
exif: ImageExif,
|
||||
) -> MetadataResponse {
|
||||
let full_path = is_valid_full_path(&library.root_path, &rel_path.to_string(), false);
|
||||
let fs_meta = full_path
|
||||
.as_ref()
|
||||
.filter(|p| p.exists())
|
||||
.and_then(|p| std::fs::metadata(p).ok());
|
||||
let mut response: MetadataResponse = match fs_meta {
|
||||
Some(m) => m.into(),
|
||||
None => MetadataResponse {
|
||||
created: None,
|
||||
modified: None,
|
||||
size: 0,
|
||||
exif: None,
|
||||
filename_date: None,
|
||||
library_id: None,
|
||||
library_name: None,
|
||||
},
|
||||
};
|
||||
response.exif = Some(exif.into());
|
||||
response.library_id = Some(library.id);
|
||||
response.library_name = Some(library.name.clone());
|
||||
response.filename_date =
|
||||
memories::extract_date_from_filename(rel_path).map(|dt| dt.timestamp());
|
||||
response
|
||||
}
|
||||
|
||||
#[post("/image/exif/date")]
|
||||
async fn set_image_date(
|
||||
_: Claims,
|
||||
request: HttpRequest,
|
||||
body: web::Json<SetDateRequest>,
|
||||
app_state: Data<AppState>,
|
||||
exif_dao: Data<Mutex<Box<dyn ExifDao>>>,
|
||||
) -> impl Responder {
|
||||
let tracer = global_tracer();
|
||||
let context = extract_context_from_request(&request);
|
||||
let mut span = tracer.start_with_context("set_image_date", &context);
|
||||
let span_context =
|
||||
opentelemetry::Context::new().with_remote_span_context(span.span_context().clone());
|
||||
|
||||
let library = libraries::resolve_library_param(&app_state, body.library.as_deref())
|
||||
.ok()
|
||||
.flatten()
|
||||
.unwrap_or_else(|| app_state.primary_library());
|
||||
|
||||
// Path normalization matches set_image_gps so a Windows-import client
|
||||
// doesn't end up with a backslash variant that misses the row.
|
||||
let normalized_path = body.path.replace('\\', "/");
|
||||
|
||||
let updated = {
|
||||
let mut dao = exif_dao.lock().expect("Unable to lock ExifDao");
|
||||
dao.set_manual_date_taken(&span_context, library.id, &normalized_path, body.date_taken)
|
||||
};
|
||||
|
||||
match updated {
|
||||
Ok(row) => {
|
||||
span.set_status(Status::Ok);
|
||||
HttpResponse::Ok().json(build_metadata_response_for_date_mutation(
|
||||
&library,
|
||||
&normalized_path,
|
||||
row,
|
||||
))
|
||||
}
|
||||
Err(e) => {
|
||||
let msg = format!("set_manual_date_taken failed: {:?}", e);
|
||||
error!("{}", msg);
|
||||
span.set_status(Status::error(msg.clone()));
|
||||
// Likely "row not found" — the file isn't indexed under this
|
||||
// (library, path). 404 lets the client distinguish from a 5xx.
|
||||
HttpResponse::NotFound().body(msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[post("/image/exif/date/clear")]
|
||||
async fn clear_image_date(
|
||||
_: Claims,
|
||||
request: HttpRequest,
|
||||
body: web::Json<ClearDateRequest>,
|
||||
app_state: Data<AppState>,
|
||||
exif_dao: Data<Mutex<Box<dyn ExifDao>>>,
|
||||
) -> impl Responder {
|
||||
let tracer = global_tracer();
|
||||
let context = extract_context_from_request(&request);
|
||||
let mut span = tracer.start_with_context("clear_image_date", &context);
|
||||
let span_context =
|
||||
opentelemetry::Context::new().with_remote_span_context(span.span_context().clone());
|
||||
|
||||
let library = libraries::resolve_library_param(&app_state, body.library.as_deref())
|
||||
.ok()
|
||||
.flatten()
|
||||
.unwrap_or_else(|| app_state.primary_library());
|
||||
|
||||
let normalized_path = body.path.replace('\\', "/");
|
||||
|
||||
let updated = {
|
||||
let mut dao = exif_dao.lock().expect("Unable to lock ExifDao");
|
||||
dao.clear_manual_date_taken(&span_context, library.id, &normalized_path)
|
||||
};
|
||||
|
||||
match updated {
|
||||
Ok(row) => {
|
||||
span.set_status(Status::Ok);
|
||||
HttpResponse::Ok().json(build_metadata_response_for_date_mutation(
|
||||
&library,
|
||||
&normalized_path,
|
||||
row,
|
||||
))
|
||||
}
|
||||
Err(e) => {
|
||||
let msg = format!("clear_manual_date_taken failed: {:?}", e);
|
||||
error!("{}", msg);
|
||||
span.set_status(Status::error(msg.clone()));
|
||||
HttpResponse::NotFound().body(msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct UploadQuery {
|
||||
library: Option<String>,
|
||||
@@ -1697,6 +1936,9 @@ fn main() -> std::io::Result<()> {
|
||||
.service(delete_favorite)
|
||||
.service(get_file_metadata)
|
||||
.service(set_image_gps)
|
||||
.service(set_image_date)
|
||||
.service(clear_image_date)
|
||||
.service(get_full_exif)
|
||||
.service(memories::list_memories)
|
||||
.service(ai::generate_insight_handler)
|
||||
.service(ai::generate_agentic_insight_handler)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{HttpRequest, HttpResponse, Responder, get, web};
|
||||
use chrono::LocalResult::{Ambiguous, Single};
|
||||
use chrono::{DateTime, FixedOffset, Local, LocalResult, NaiveDate, TimeZone};
|
||||
use chrono::{DateTime, FixedOffset, Local, NaiveDate, TimeZone, Utc};
|
||||
use log::{debug, trace, warn};
|
||||
use opentelemetry::KeyValue;
|
||||
use opentelemetry::trace::{Span, Status, TraceContextExt, Tracer};
|
||||
@@ -134,6 +133,15 @@ pub struct MemoriesResponse {
|
||||
}
|
||||
|
||||
pub fn extract_date_from_filename(filename: &str) -> Option<DateTime<FixedOffset>> {
|
||||
// Filenames carry only digits — no timezone. We deliberately interpret
|
||||
// them as UTC so `.timestamp()` returns the wall-clock-as-UTC unix
|
||||
// seconds, matching the "naive local reinterpreted as UTC" convention
|
||||
// image_exif.date_taken uses for kamadak-exif DateTimeOriginal (which
|
||||
// is also naive). Anything else (Local::from_local_datetime, the
|
||||
// previous behavior) shifted filename-sourced dates by the SERVER's
|
||||
// TZ offset relative to UTC, making them disagree with EXIF-sourced
|
||||
// dates by hours and double-shifting through Apollo's photo matcher
|
||||
// (which re-anchors naive-as-UTC via the browser TZ).
|
||||
let build_date_from_ymd_capture =
|
||||
|captures: ®ex::Captures| -> Option<DateTime<FixedOffset>> {
|
||||
let year = captures.get(1)?.as_str().parse::<i32>().ok()?;
|
||||
@@ -143,16 +151,8 @@ pub fn extract_date_from_filename(filename: &str) -> Option<DateTime<FixedOffset
|
||||
let min = captures.get(5)?.as_str().parse::<u32>().ok()?;
|
||||
let sec = captures.get(6)?.as_str().parse::<u32>().ok()?;
|
||||
|
||||
match Local.from_local_datetime(
|
||||
&NaiveDate::from_ymd_opt(year, month, day)?.and_hms_opt(hour, min, sec)?,
|
||||
) {
|
||||
Single(dt) => Some(dt.fixed_offset()),
|
||||
Ambiguous(early_dt, _) => Some(early_dt.fixed_offset()),
|
||||
LocalResult::None => {
|
||||
warn!("Weird local date: {:?}", filename);
|
||||
None
|
||||
}
|
||||
}
|
||||
let naive = NaiveDate::from_ymd_opt(year, month, day)?.and_hms_opt(hour, min, sec)?;
|
||||
Some(Utc.from_utc_datetime(&naive).fixed_offset())
|
||||
};
|
||||
|
||||
// 1. Screenshot format: Screenshot_2014-06-01-20-44-50.png
|
||||
|
||||
Reference in New Issue
Block a user