Compare commits

..

1 Commits

Author SHA1 Message Date
Cameron Cordes
7e1c4ab318 backfill_date_taken: surface the actual diesel error in warnings
The DAO swallowed every diesel::update failure as a flat
`anyhow!("Update error")`, then trace_db_call further reduced it to
`DbError { kind: UpdateError }`. Operators saw "update failed for lib
2 Snapchat/foo.mp4: DbError { kind: UpdateError }" with no clue why
(constraint violation? type mismatch? row vanished mid-flight? DB
locked?).

Two changes:
- Preserve the diesel error in the anyhow chain along with the input
  params (lib, rel_path, date_taken, source) so the cause is visible.
- Log the chain at warn-level inside the DAO before the trace wrapper
  collapses it to DbErrorKind::UpdateError, so the warning at the
  call site finally has something diagnosable next to it.
- Treat zero-row updates as a debug-level "row likely retired by the
  missing-file scan" rather than a hard failure — that case is benign
  and shouldn't poison the drain's error tally.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-07 11:07:17 -04:00
7 changed files with 155 additions and 653 deletions

View File

@@ -48,11 +48,6 @@ pub struct GeneratePhotoInsightRequest {
/// falls back to `DEFAULT_FEWSHOT_INSIGHT_IDS`.
#[serde(default)]
pub fewshot_insight_ids: Option<Vec<i32>>,
/// When true, drop `store_entity` / `store_fact` from the tool palette
/// for this run. Use for one-off explorations (caption-style prompts,
/// experimentation) that shouldn't pollute the persistent knowledge KB.
#[serde(default)]
pub disable_writes: bool,
}
#[derive(Debug, Deserialize)]
@@ -395,7 +390,6 @@ pub async fn generate_agentic_insight_handler(
request.backend.clone(),
fewshot_examples,
fewshot_ids,
request.disable_writes,
)
.await;
@@ -648,10 +642,6 @@ pub struct ChatTurnHttpRequest {
pub max_iterations: Option<usize>,
#[serde(default)]
pub amend: bool,
/// Drop store_entity / store_fact from the tool palette for this turn —
/// useful for hypothetical/exploration chats that shouldn't pollute the KB.
#[serde(default)]
pub disable_writes: bool,
}
#[derive(Debug, Serialize)]
@@ -706,7 +696,6 @@ pub async fn chat_turn_handler(
min_p: request.min_p,
max_iterations: request.max_iterations,
amend: request.amend,
disable_writes: request.disable_writes,
};
match app_state.insight_chat.chat_turn(chat_req).await {
@@ -921,7 +910,6 @@ pub async fn chat_stream_handler(
min_p: request.min_p,
max_iterations: request.max_iterations,
amend: request.amend,
disable_writes: request.disable_writes,
};
let service = app_state.insight_chat.clone();

View File

@@ -48,10 +48,6 @@ pub struct ChatTurnRequest {
/// When true, write a new insight row (regenerating title) instead of
/// updating training_messages on the existing row.
pub amend: bool,
/// When true, drop `store_entity` / `store_fact` from the tool palette
/// for this turn. Use to explore alternate phrasings or run
/// hypothetical chats without polluting the persistent KB.
pub disable_writes: bool,
}
#[derive(Debug)]
@@ -366,7 +362,6 @@ impl InsightChatService {
let tools = InsightGenerator::build_tool_definitions(
offer_describe_tool,
self.generator.apollo_enabled(),
req.disable_writes,
);
// Image base64 only needed when describe_photo is on the menu. Load
@@ -402,9 +397,6 @@ impl InsightChatService {
// tighter and dispatching tools through the shared executor.
let loop_span = tracer.start_with_context("ai.chat.loop", &insight_cx);
let loop_cx = insight_cx.with_span(loop_span);
// Memoize describe_photo for this turn so repeated calls don't
// produce conflicting visual descriptions in the assistant transcript.
let describe_cache: tokio::sync::Mutex<Option<String>> = tokio::sync::Mutex::new(None);
let mut tool_calls_made = 0usize;
let mut iterations_used = 0usize;
let mut last_prompt_eval_count: Option<i32> = None;
@@ -453,7 +445,6 @@ impl InsightChatService {
&image_base64,
&normalized,
&loop_cx,
Some(&describe_cache),
)
.await;
messages.push(ChatMessage::tool_result(result));
@@ -802,7 +793,6 @@ impl InsightChatService {
let tools = InsightGenerator::build_tool_definitions(
offer_describe_tool,
self.generator.apollo_enabled(),
req.disable_writes,
);
let image_base64: Option<String> = if offer_describe_tool {
@@ -824,9 +814,6 @@ impl InsightChatService {
let original_system_content = annotate_system_with_budget(&mut messages, max_iterations);
// Per-turn describe_photo memo, same intent as the non-streaming
// path: avoid replaying conflicting visual descriptions in transcript.
let describe_cache: tokio::sync::Mutex<Option<String>> = tokio::sync::Mutex::new(None);
let mut tool_calls_made = 0usize;
let mut iterations_used = 0usize;
let mut last_prompt_eval_count: Option<i32> = None;
@@ -902,7 +889,6 @@ impl InsightChatService {
&image_base64,
&normalized,
&cx,
Some(&describe_cache),
)
.await;
let (result_preview, result_truncated) = truncate_tool_result(&result);
@@ -1148,12 +1134,8 @@ fn annotate_system_with_budget(
return None;
}
let original = first.content.clone();
// Formatted as its own section so small models don't skim past it the
// way they tend to with parenthetical asides at the bottom of a long prompt.
// Phrasing matches the base prompt: budget = capacity, not a constraint
// to conserve. Small models otherwise tend to stop early.
first.content = format!(
"{}\n\n## Budget for this chat turn\n\nYou have up to {} iterations available. Use as many as the question warrants for context-gathering, and reserve the last one for your reply.",
"{}\n\n(Budget for this chat turn: up to {} tool-calling iterations. Produce your final reply before the budget is exhausted.)",
first.content, max_iterations
);
Some(original)

File diff suppressed because it is too large Load Diff

View File

@@ -20,24 +20,22 @@ impl SmsApiClient {
}
}
/// Fetch messages for a specific contact within ±`days_radius` days of
/// the given timestamp (defaults to ±4 days when `None`). Falls back to
/// all contacts if no messages are found for the specified contact.
/// Messages are sorted by proximity to the center timestamp.
/// Fetch messages for a specific contact within ±4 days of the given timestamp
/// Falls back to all contacts if no messages found for the specific contact
/// Messages are sorted by proximity to the center timestamp
pub async fn fetch_messages_for_contact(
&self,
contact: Option<&str>,
center_timestamp: i64,
days_radius: Option<i64>,
) -> Result<Vec<SmsMessage>> {
use chrono::Duration;
let radius = days_radius.unwrap_or(4).clamp(1, 30);
// Calculate ±4 days range around the center timestamp
let center_dt = chrono::DateTime::from_timestamp(center_timestamp, 0)
.ok_or_else(|| anyhow::anyhow!("Invalid timestamp"))?;
let start_dt = center_dt - Duration::days(radius);
let end_dt = center_dt + Duration::days(radius);
let start_dt = center_dt - Duration::days(4);
let end_dt = center_dt + Duration::days(4);
let start_ts = start_dt.timestamp();
let end_ts = end_dt.timestamp();
@@ -45,9 +43,8 @@ impl SmsApiClient {
// If contact specified, try fetching for that contact first
if let Some(contact_name) = contact {
log::info!(
"Fetching SMS for contact: {} (±{} days from {})",
"Fetching SMS for contact: {} (±4 days from {})",
contact_name,
radius,
center_dt.format("%Y-%m-%d %H:%M:%S")
);
let messages = self
@@ -71,8 +68,7 @@ impl SmsApiClient {
// Fallback to all contacts
log::info!(
"Fetching all SMS messages (±{} days from {})",
radius,
"Fetching all SMS messages (±4 days from {})",
center_dt.format("%Y-%m-%d %H:%M:%S")
);
self.fetch_messages(start_ts, end_ts, None, Some(center_timestamp))

View File

@@ -331,7 +331,6 @@ async fn main() -> anyhow::Result<()> {
None,
Vec::new(),
Vec::new(),
false, // disable_writes — keep KB writes on for the population job
)
.await
{

View File

@@ -1718,12 +1718,7 @@ mod tests {
// Mock — files.rs tests don't exercise the date-override endpoints.
// Returning a synthetic row keeps the trait satisfied without
// depending on private DbError constructors.
Ok(mock_exif_row(
library_id,
rel_path,
Some(date_taken),
Some("manual".to_string()),
))
Ok(mock_exif_row(library_id, rel_path, Some(date_taken), Some("manual".to_string())))
}
fn clear_manual_date_taken(

View File

@@ -995,8 +995,10 @@ async fn upload_image(
}
};
let perceptual = perceptual_hash::compute(&uploaded_path);
let resolved_date =
date_resolver::resolve_date_taken(&uploaded_path, exif_data.date_taken);
let resolved_date = date_resolver::resolve_date_taken(
&uploaded_path,
exif_data.date_taken,
);
let insert_exif = InsertImageExif {
library_id: target_library.id,
file_path: relative_path.clone(),
@@ -1020,7 +1022,8 @@ async fn upload_image(
size_bytes,
phash_64: perceptual.map(|h| h.phash_64),
dhash_64: perceptual.map(|h| h.dhash_64),
date_taken_source: resolved_date.map(|r| r.source.as_str().to_string()),
date_taken_source: resolved_date
.map(|r| r.source.as_str().to_string()),
};
if let Ok(mut dao) = exif_dao.lock() {