feature/date-backfill-null-only #93

Merged
cameron merged 2 commits from feature/date-backfill-null-only into master 2026-05-12 18:42:22 +00:00
4 changed files with 59 additions and 23 deletions
Showing only changes of commit 2d56047497 - Show all commits

View File

@@ -398,14 +398,21 @@ date) is unchanged.
The `backfill_missing_date_taken` drain (`src/backfill.rs`) runs every
watcher tick alongside `backfill_unhashed_backlog` (also `src/backfill.rs`). It loads up to
`DATE_BACKFILL_MAX_PER_TICK` rows (default 500) where
`date_taken IS NULL OR date_taken_source = 'fs_time'` (backed by the
`idx_image_exif_date_backfill` partial index), runs the waterfall
batch via `resolve_dates_batch`, and writes results via the
`backfill_date_taken` DAO method (touches only `date_taken` +
`date_taken_source` so EXIF / hash / perceptual columns are
preserved). `filename`-sourced rows are intentionally not re-resolved
the regex is authoritative when it matches, and re-running exiftool
won't change the answer.
`date_taken IS NULL` (backed by the `idx_image_exif_date_backfill`
partial index), runs the waterfall batch via `resolve_dates_batch`,
and writes results via the `backfill_date_taken` DAO method (touches
only `date_taken` + `date_taken_source` so EXIF / hash / perceptual
columns are preserved). Resolved rows — including the ones the
waterfall could only resolve via `fs_time` — are not re-eligible:
the resolver is deterministic on file bytes + filename + fs metadata,
so re-running on the same inputs lands on the same source every time.
An earlier version included `date_taken_source = 'fs_time'` in the
eligibility predicate, but with `ORDER BY id ASC LIMIT 500` it spun on
the same lowest-id rows in perpetuity and held the SQLite write lock
long enough to starve face-PATCH writers (5s busy_timeout → 500). If
a stronger tool comes online (exiftool install, new filename regex),
re-resolve out-of-band rather than re-introducing the steady-state
eligibility.
`/memories` is a single SQL query against this column
(`get_memories_in_window` in `src/database/mod.rs`), using

View File

@@ -0,0 +1,5 @@
DROP INDEX IF EXISTS idx_image_exif_date_backfill;
CREATE INDEX idx_image_exif_date_backfill
ON image_exif (library_id, id)
WHERE date_taken IS NULL OR date_taken_source = 'fs_time';

View File

@@ -0,0 +1,18 @@
-- Narrow the date-backfill partial index to NULL-only rows.
--
-- The original index (2026-05-06-000000_add_date_taken_source) also matched
-- `date_taken_source = 'fs_time'` so the drain could "re-resolve weak
-- entries when better tools become available." In practice the resolver
-- is deterministic on file bytes + filename + fs metadata: a row that
-- landed on fs_time once will land on fs_time again on every subsequent
-- tick. With `ORDER BY id ASC LIMIT 500`, the drain spun on the same
-- lowest-id fs_time rows in perpetuity, never advancing, while hammering
-- the SQLite write lock once per row and starving other writers (face
-- PATCHes were hitting busy_timeout and returning 500). Drop fs_time
-- from the eligibility set; if exiftool / a new filename pattern ever
-- comes online, a one-shot operator command can re-resolve.
DROP INDEX IF EXISTS idx_image_exif_date_backfill;
CREATE INDEX idx_image_exif_date_backfill
ON image_exif (library_id, id)
WHERE date_taken IS NULL;

View File

@@ -414,14 +414,21 @@ pub trait ExifDao: Sync + Send {
size_bytes: i64,
) -> Result<(), DbError>;
/// Return image_exif rows that need their `date_taken` re-resolved by
/// the canonical-date waterfall (see `crate::date_resolver`):
/// either no source ever ran (`date_taken IS NULL`), or only the
/// weakest fallback resolved it (`date_taken_source = 'fs_time'`).
/// Returns `(library_id, rel_path)`. The caller filters to its own
/// library on the way through; rows from other libraries fall to the
/// next library's tick. Backed by the partial index
/// Return image_exif rows that need their `date_taken` resolved by the
/// canonical-date waterfall (see `crate::date_resolver`): `date_taken
/// IS NULL`. Returns `(library_id, rel_path)`. The caller filters to
/// its own library on the way through; rows from other libraries fall
/// to the next library's tick. Backed by the partial index
/// `idx_image_exif_date_backfill`.
///
/// `fs_time`-sourced rows are intentionally excluded even though they
/// represent the weakest resolution: the resolver is deterministic on
/// file bytes + filename + fs metadata, so a row that landed on
/// fs_time once will land there again on every retry. Including them
/// in the drain caused it to spin on the same lowest-id rows forever
/// and starve other SQLite writers (face PATCHes hitting busy_timeout).
/// If a stronger tool comes online (exiftool install, new filename
/// regex), an operator can issue a one-shot re-resolve out-of-band.
fn get_rows_needing_date_backfill(
&mut self,
context: &opentelemetry::Context,
@@ -1240,11 +1247,10 @@ impl ExifDao for SqliteExifDao {
let mut connection = self.connection.lock().expect("Unable to get ExifDao");
// The partial index is on `(library_id, id) WHERE date_taken
// IS NULL OR date_taken_source = 'fs_time'`, so the planner
// hits it directly when both predicates are present.
// IS NULL`, so the planner hits it directly.
image_exif
.filter(library_id.eq(library_id_val))
.filter(date_taken.is_null().or(date_taken_source.eq("fs_time")))
.filter(date_taken.is_null())
.select((library_id, rel_path))
.order(id.asc())
.limit(limit)
@@ -2395,10 +2401,11 @@ mod exif_dao_tests {
}
#[test]
fn get_rows_needing_date_backfill_returns_null_and_fs_time() {
fn get_rows_needing_date_backfill_returns_null_only() {
let mut dao = setup_two_libraries();
// Each row exercises a different source: null, fs_time (eligible),
// filename and exif (skipped).
// Each row exercises a different source. Only NULL is eligible
// fs_time was removed from the drain because re-resolving it is
// deterministic-no-op work that starves other writers.
insert_row_with_source(&mut dao, 1, "main/null.jpg", None, None);
insert_row_with_source(&mut dao, 1, "main/fs.jpg", Some(123), Some("fs_time"));
insert_row_with_source(&mut dao, 1, "main/name.jpg", Some(456), Some("filename"));
@@ -2408,9 +2415,8 @@ mod exif_dao_tests {
let rows = dao.get_rows_needing_date_backfill(&ctx(), 1, 100).unwrap();
let paths: Vec<String> = rows.into_iter().map(|(_, p)| p).collect();
assert_eq!(paths.len(), 2, "expected null + fs_time eligible only");
assert_eq!(paths.len(), 1, "expected only NULL-date rows");
assert!(paths.contains(&"main/null.jpg".to_string()));
assert!(paths.contains(&"main/fs.jpg".to_string()));
}
#[test]