From 9071d059320cd21a6f9c88ed9b3c7a87154e4dbf Mon Sep 17 00:00:00 2001 From: Cameron Cordes Date: Wed, 6 May 2026 22:37:32 -0400 Subject: [PATCH] =?UTF-8?q?ai:=20insight=20tools=20audit=20=E2=80=94=20bug?= =?UTF-8?q?=20fixes,=20new=20tools,=20prompt=20structure?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bug fixes: - get_sms_messages.days_radius is now actually honored (was hardcoded to ±4d in SmsApiClient::fetch_messages_for_contact). - describe_photo memoized for the lifetime of one agentic loop / one chat turn — re-running mid-loop produced conflicting visual descriptions in the transcript. Agentic user message: - Pre-resolve location via Apollo + Nominatim and emit one Location: line instead of bare GPS, mirroring the non-agentic flow. - Date now formats with weekday + canonical-date source so the model can hedge on fs_time-derived dates. - Hybrid mode visual block tells the model not to call describe_photo (the tool is already gated off in hybrid). System prompt structure: - custom_system_prompt now appends under an explicit "User overrides (these take precedence)" heading instead of prepending — so a custom voice/POV/format prompt actually beats the built-in defaults. - Numbered rules collapsed into bulleted "Tool-use guidance"; merged the contradictory "multiple tools BEFORE" / "after 5 calls" rules. - Chat budget annotation surfaces as its own ## heading. New tools: - recall_facts_for_entity(entity_id|name) — facts for one entity without needing a photo path. Fills the "tell me about Sarah" chat case where recall_facts_for_photo doesn't apply. - find_photos_with_entity(entity_id|name) — "when did I last see X / show me photos from the Tahoe trip" via entity_photo_links. - get_exif(file_path) — full EXIF row for any photo, for technical ("what camera was this on?") questions. Tools removed: - get_file_tags duplicated the inline Tags: line on the user message; exposing both gave models an excuse to "confirm" what they had. Tool descriptions tightened: - search_rag now correctly says "per-day, per-contact summaries" and explains the date is for time-decay weighting. - recall_entities warns about empty-filter dumps. - store_entity / store_fact document dedup return + snake_case predicate vocabulary. - reverse_geocode defers to the pre-resolved location and to get_personal_place_at for personal places. - get_current_datetime narrowed to time-since-photo use. Calendar / location: - get_calendar_events accepts query and embeds it for hybrid time + semantic ranking (was always passing None for the embedding). - get_location_history exposes limit; description tells the model there's no semantic ranking on this surface. New disable_writes flag: - POST /insights/generate/agentic and the chat endpoints accept disable_writes: bool. When true, drops store_entity / store_fact from the tool palette and rewrites the system prompt's knowledge- write line. Lets users explore alternate prompts (caption-style, third-person, haiku) without polluting the persistent KB. Co-Authored-By: Claude Opus 4.7 (1M context) --- src/ai/handlers.rs | 12 + src/ai/insight_chat.rs | 18 +- src/ai/insight_generator.rs | 685 ++++++++++++++++++++++++++++------ src/ai/sms_client.rs | 20 +- src/bin/populate_knowledge.rs | 1 + src/files.rs | 7 +- src/main.rs | 9 +- 7 files changed, 615 insertions(+), 137 deletions(-) diff --git a/src/ai/handlers.rs b/src/ai/handlers.rs index 7ec6ab7..bb980a5 100644 --- a/src/ai/handlers.rs +++ b/src/ai/handlers.rs @@ -48,6 +48,11 @@ pub struct GeneratePhotoInsightRequest { /// falls back to `DEFAULT_FEWSHOT_INSIGHT_IDS`. #[serde(default)] pub fewshot_insight_ids: Option>, + /// When true, drop `store_entity` / `store_fact` from the tool palette + /// for this run. Use for one-off explorations (caption-style prompts, + /// experimentation) that shouldn't pollute the persistent knowledge KB. + #[serde(default)] + pub disable_writes: bool, } #[derive(Debug, Deserialize)] @@ -390,6 +395,7 @@ pub async fn generate_agentic_insight_handler( request.backend.clone(), fewshot_examples, fewshot_ids, + request.disable_writes, ) .await; @@ -642,6 +648,10 @@ pub struct ChatTurnHttpRequest { pub max_iterations: Option, #[serde(default)] pub amend: bool, + /// Drop store_entity / store_fact from the tool palette for this turn — + /// useful for hypothetical/exploration chats that shouldn't pollute the KB. + #[serde(default)] + pub disable_writes: bool, } #[derive(Debug, Serialize)] @@ -696,6 +706,7 @@ pub async fn chat_turn_handler( min_p: request.min_p, max_iterations: request.max_iterations, amend: request.amend, + disable_writes: request.disable_writes, }; match app_state.insight_chat.chat_turn(chat_req).await { @@ -910,6 +921,7 @@ pub async fn chat_stream_handler( min_p: request.min_p, max_iterations: request.max_iterations, amend: request.amend, + disable_writes: request.disable_writes, }; let service = app_state.insight_chat.clone(); diff --git a/src/ai/insight_chat.rs b/src/ai/insight_chat.rs index 792f124..8739b48 100644 --- a/src/ai/insight_chat.rs +++ b/src/ai/insight_chat.rs @@ -48,6 +48,10 @@ pub struct ChatTurnRequest { /// When true, write a new insight row (regenerating title) instead of /// updating training_messages on the existing row. pub amend: bool, + /// When true, drop `store_entity` / `store_fact` from the tool palette + /// for this turn. Use to explore alternate phrasings or run + /// hypothetical chats without polluting the persistent KB. + pub disable_writes: bool, } #[derive(Debug)] @@ -362,6 +366,7 @@ impl InsightChatService { let tools = InsightGenerator::build_tool_definitions( offer_describe_tool, self.generator.apollo_enabled(), + req.disable_writes, ); // Image base64 only needed when describe_photo is on the menu. Load @@ -397,6 +402,9 @@ impl InsightChatService { // tighter and dispatching tools through the shared executor. let loop_span = tracer.start_with_context("ai.chat.loop", &insight_cx); let loop_cx = insight_cx.with_span(loop_span); + // Memoize describe_photo for this turn so repeated calls don't + // produce conflicting visual descriptions in the assistant transcript. + let describe_cache: tokio::sync::Mutex> = tokio::sync::Mutex::new(None); let mut tool_calls_made = 0usize; let mut iterations_used = 0usize; let mut last_prompt_eval_count: Option = None; @@ -445,6 +453,7 @@ impl InsightChatService { &image_base64, &normalized, &loop_cx, + Some(&describe_cache), ) .await; messages.push(ChatMessage::tool_result(result)); @@ -793,6 +802,7 @@ impl InsightChatService { let tools = InsightGenerator::build_tool_definitions( offer_describe_tool, self.generator.apollo_enabled(), + req.disable_writes, ); let image_base64: Option = if offer_describe_tool { @@ -814,6 +824,9 @@ impl InsightChatService { let original_system_content = annotate_system_with_budget(&mut messages, max_iterations); + // Per-turn describe_photo memo, same intent as the non-streaming + // path: avoid replaying conflicting visual descriptions in transcript. + let describe_cache: tokio::sync::Mutex> = tokio::sync::Mutex::new(None); let mut tool_calls_made = 0usize; let mut iterations_used = 0usize; let mut last_prompt_eval_count: Option = None; @@ -889,6 +902,7 @@ impl InsightChatService { &image_base64, &normalized, &cx, + Some(&describe_cache), ) .await; let (result_preview, result_truncated) = truncate_tool_result(&result); @@ -1134,8 +1148,10 @@ fn annotate_system_with_budget( return None; } let original = first.content.clone(); + // Formatted as its own section so small models don't skim past it the + // way they tend to with parenthetical asides at the bottom of a long prompt. first.content = format!( - "{}\n\n(Budget for this chat turn: up to {} tool-calling iterations. Produce your final reply before the budget is exhausted.)", + "{}\n\n## Budget for this chat turn\n\nYou have up to {} tool-calling iterations. Stop calling tools and write the final reply before the budget is exhausted.", first.content, max_iterations ); Some(original) diff --git a/src/ai/insight_generator.rs b/src/ai/insight_generator.rs index e03b1af..eed0949 100644 --- a/src/ai/insight_generator.rs +++ b/src/ai/insight_generator.rs @@ -983,7 +983,7 @@ impl InsightGenerator { // Step 1: Get FULL immediate temporal context (±4 days, ALL messages) let immediate_messages = self .sms_client - .fetch_messages_for_contact(contact.as_deref(), timestamp) + .fetch_messages_for_contact(contact.as_deref(), timestamp, None) .await .unwrap_or_else(|e| { log::error!("Failed to fetch immediate messages: {}", e); @@ -1129,7 +1129,7 @@ impl InsightGenerator { log::info!("Using traditional time-based message retrieval (±4 days)"); let sms_messages = self .sms_client - .fetch_messages_for_contact(contact.as_deref(), timestamp) + .fetch_messages_for_contact(contact.as_deref(), timestamp, None) .await .unwrap_or_else(|e| { log::error!("Failed to fetch SMS messages: {}", e); @@ -1459,7 +1459,14 @@ Return ONLY the summary, nothing else."#, // ── Tool executors for agentic loop ──────────────────────────────── - /// Dispatch a tool call to the appropriate executor + /// Dispatch a tool call to the appropriate executor. + /// + /// `describe_photo_cache` lets the agentic loop memoize the visual + /// description across iterations — vision describes are non-deterministic + /// and re-running mid-loop produces conflicting answers in the chat + /// history. Pass `None` to skip caching (chat continuation falls through + /// to the live call each turn — the description isn't replayed in chat). + #[allow(clippy::too_many_arguments)] pub(crate) async fn execute_tool( &self, tool_name: &str, @@ -1468,6 +1475,7 @@ Return ONLY the summary, nothing else."#, image_base64: &Option, file_path: &str, cx: &opentelemetry::Context, + describe_photo_cache: Option<&tokio::sync::Mutex>>, ) -> String { let result = match tool_name { "search_rag" => self.tool_search_rag(arguments, ollama, cx).await, @@ -1475,12 +1483,34 @@ Return ONLY the summary, nothing else."#, "get_sms_messages" => self.tool_get_sms_messages(arguments, cx).await, "get_calendar_events" => self.tool_get_calendar_events(arguments, cx).await, "get_location_history" => self.tool_get_location_history(arguments, cx).await, - "get_file_tags" => self.tool_get_file_tags(arguments, cx).await, - "describe_photo" => self.tool_describe_photo(ollama, image_base64).await, + "describe_photo" => match describe_photo_cache { + Some(cache) => { + let mut guard = cache.lock().await; + if let Some(ref cached) = *guard { + log::info!( + "tool_describe_photo: returning cached description ({} chars)", + cached.len() + ); + cached.clone() + } else { + let fresh = self.tool_describe_photo(ollama, image_base64).await; + // Only cache successful descriptions — error strings + // shouldn't lock in for the rest of the loop. + if !fresh.starts_with("Error") && !fresh.starts_with("No image") { + *guard = Some(fresh.clone()); + } + fresh + } + } + None => self.tool_describe_photo(ollama, image_base64).await, + }, "reverse_geocode" => self.tool_reverse_geocode(arguments).await, "get_personal_place_at" => self.tool_get_personal_place_at(arguments).await, "recall_entities" => self.tool_recall_entities(arguments, cx).await, "recall_facts_for_photo" => self.tool_recall_facts_for_photo(arguments, cx).await, + "recall_facts_for_entity" => self.tool_recall_facts_for_entity(arguments, cx).await, + "find_photos_with_entity" => self.tool_find_photos_with_entity(arguments, cx).await, + "get_exif" => self.tool_get_exif(arguments, cx).await, "store_entity" => self.tool_store_entity(arguments, ollama, cx).await, "store_fact" => self.tool_store_fact(arguments, file_path, cx).await, "get_current_datetime" => Self::tool_get_current_datetime(), @@ -1835,7 +1865,7 @@ Return ONLY the summary, nothing else."#, match self .sms_client - .fetch_messages_for_contact(contact.as_deref(), timestamp) + .fetch_messages_for_contact(contact.as_deref(), timestamp, Some(days_radius)) .await { Ok(messages) if !messages.is_empty() => { @@ -1869,7 +1899,8 @@ Return ONLY the summary, nothing else."#, } } - /// Tool: get_calendar_events — fetch calendar events near a date + /// Tool: get_calendar_events — fetch calendar events near a date, + /// optionally ranked by semantic similarity to a query string. async fn tool_get_calendar_events( &self, args: &serde_json::Value, @@ -1879,6 +1910,11 @@ Return ONLY the summary, nothing else."#, Some(d) => d, None => return "Error: missing required parameter 'date'".to_string(), }; + let query = args + .get("query") + .and_then(|v| v.as_str()) + .map(|s| s.trim().to_string()) + .filter(|s| !s.is_empty()); let days_radius = args .get("days_radius") .and_then(|v| v.as_i64()) @@ -1896,19 +1932,41 @@ Return ONLY the summary, nothing else."#, let timestamp = date.and_hms_opt(12, 0, 0).unwrap().and_utc().timestamp(); log::info!( - "tool_get_calendar_events: date={}, days_radius={}, limit={}", + "tool_get_calendar_events: date={}, days_radius={}, limit={}, query={:?}", date, days_radius, - limit + limit, + query ); + // Embed the query (best-effort) so the DAO can do hybrid time + semantic ranking. + let query_embedding: Option> = match query.as_deref() { + Some(q) => match self.ollama.generate_embedding(q).await { + Ok(emb) => Some(emb), + Err(e) => { + log::warn!( + "Calendar query embedding failed, falling back to time-only: {}", + e + ); + None + } + }, + None => None, + }; + let events = { let mut dao = self .calendar_dao .lock() .expect("Unable to lock CalendarEventDao"); - dao.find_relevant_events_hybrid(cx, timestamp, days_radius, None, limit) - .ok() + dao.find_relevant_events_hybrid( + cx, + timestamp, + days_radius, + query_embedding.as_deref(), + limit, + ) + .ok() }; match events { @@ -1962,6 +2020,11 @@ Return ONLY the summary, nothing else."#, .get("days_radius") .and_then(|v| v.as_i64()) .unwrap_or(14); + let limit = args + .get("limit") + .and_then(|v| v.as_i64()) + .unwrap_or(20) + .clamp(1, 50) as usize; let date = match NaiveDate::parse_from_str(date_str, "%Y-%m-%d") { Ok(d) => d, @@ -1970,9 +2033,10 @@ Return ONLY the summary, nothing else."#, let timestamp = date.and_hms_opt(12, 0, 0).unwrap().and_utc().timestamp(); log::info!( - "tool_get_location_history: date={}, days_radius={}", + "tool_get_location_history: date={}, days_radius={}, limit={}", date, - days_radius + days_radius, + limit ); let start_ts = timestamp - (days_radius * 86400); @@ -1990,7 +2054,7 @@ Return ONLY the summary, nothing else."#, Some(locs) if !locs.is_empty() => { let formatted: Vec = locs .iter() - .take(20) + .take(limit) .map(|loc| { let dt = DateTime::from_timestamp(loc.timestamp, 0) .map(|dt| { @@ -2026,34 +2090,6 @@ Return ONLY the summary, nothing else."#, } } - /// Tool: get_file_tags — fetch tags for a file path - async fn tool_get_file_tags( - &self, - args: &serde_json::Value, - cx: &opentelemetry::Context, - ) -> String { - let file_path = match args.get("file_path").and_then(|v| v.as_str()) { - Some(p) => p.to_string(), - None => return "Error: missing required parameter 'file_path'".to_string(), - }; - - log::info!("tool_get_file_tags: file_path='{}'", file_path); - - let tags = { - let mut dao = self.tag_dao.lock().expect("Unable to lock TagDao"); - dao.get_tags_for_path(cx, &file_path).ok() - }; - - match tags { - Some(t) if !t.is_empty() => { - let names: Vec = t.into_iter().map(|tag| tag.name).collect(); - names.join(", ") - } - Some(_) => "No tags found.".to_string(), - None => "No tags found.".to_string(), - } - } - /// Tool: describe_photo — generate a visual description of the photo async fn tool_describe_photo( &self, @@ -2264,6 +2300,255 @@ Return ONLY the summary, nothing else."#, } } + /// Resolve `entity_id` directly when provided, otherwise look up by + /// `name` (+ optional `entity_type`). Returns the chosen entity, or an + /// error string suitable for use as a tool result. + fn resolve_entity_arg( + &self, + args: &serde_json::Value, + cx: &opentelemetry::Context, + ) -> std::result::Result { + if let Some(eid) = args.get("entity_id").and_then(|v| v.as_i64()) { + let mut kdao = self + .knowledge_dao + .lock() + .expect("Unable to lock KnowledgeDao"); + return match kdao.get_entity_by_id(cx, eid as i32) { + Ok(Some(e)) => Ok(e), + Ok(None) => Err(format!("No entity found with id {}.", eid)), + Err(e) => Err(format!("Error looking up entity: {:?}", e)), + }; + } + let Some(name) = args.get("name").and_then(|v| v.as_str()) else { + return Err("Error: provide either 'entity_id' or 'name'".to_string()); + }; + let entity_type = args + .get("entity_type") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()); + let mut kdao = self + .knowledge_dao + .lock() + .expect("Unable to lock KnowledgeDao"); + match kdao.get_entity_by_name(cx, name, entity_type.as_deref()) { + Ok(matches) => { + // Prefer active rows; among active, highest confidence wins. + // Falls through to any match if no active rows exist. + let mut active: Vec<_> = matches + .iter() + .filter(|e| e.status == "active") + .cloned() + .collect(); + active.sort_by(|a, b| { + b.confidence + .partial_cmp(&a.confidence) + .unwrap_or(std::cmp::Ordering::Equal) + }); + if let Some(e) = active.into_iter().next() { + return Ok(e); + } + if let Some(e) = matches.into_iter().next() { + return Ok(e); + } + Err(format!( + "No entity found matching name '{}'{}.", + name, + entity_type + .map(|t| format!(" of type '{}'", t)) + .unwrap_or_default() + )) + } + Err(e) => Err(format!("Error looking up entity by name: {:?}", e)), + } + } + + /// Tool: recall_facts_for_entity — list active facts for one entity + async fn tool_recall_facts_for_entity( + &self, + args: &serde_json::Value, + cx: &opentelemetry::Context, + ) -> String { + let entity = match self.resolve_entity_arg(args, cx) { + Ok(e) => e, + Err(msg) => return msg, + }; + log::info!( + "tool_recall_facts_for_entity: entity_id={}, name='{}'", + entity.id, + entity.name + ); + + let mut kdao = self + .knowledge_dao + .lock() + .expect("Unable to lock KnowledgeDao"); + let facts = match kdao.get_facts_for_entity(cx, entity.id) { + Ok(f) => f, + Err(e) => return format!("Error fetching facts: {:?}", e), + }; + + let mut lines: Vec = Vec::new(); + lines.push(format!( + "Entity: {} (id {}, {}, confidence {:.2})", + entity.name, entity.id, entity.entity_type, entity.confidence + )); + if !entity.description.is_empty() { + lines.push(format!(" description: {}", entity.description)); + } + let mut wrote_any_fact = false; + for f in facts.iter().filter(|f| f.status == "active") { + wrote_any_fact = true; + let obj = if let Some(ref v) = f.object_value { + v.clone() + } else if let Some(oid) = f.object_entity_id { + kdao.get_entity_by_id(cx, oid) + .ok() + .flatten() + .map(|e| format!("{} (id {})", e.name, e.id)) + .unwrap_or_else(|| format!("entity:{}", oid)) + } else { + "(unknown)".to_string() + }; + lines.push(format!( + " - {} {} (confidence {:.2})", + f.predicate, obj, f.confidence + )); + } + if !wrote_any_fact { + lines.push(" (no active facts)".to_string()); + } + lines.join("\n") + } + + /// Tool: find_photos_with_entity — list photo paths linked to an entity + async fn tool_find_photos_with_entity( + &self, + args: &serde_json::Value, + cx: &opentelemetry::Context, + ) -> String { + let entity = match self.resolve_entity_arg(args, cx) { + Ok(e) => e, + Err(msg) => return msg, + }; + let limit = args + .get("limit") + .and_then(|v| v.as_i64()) + .unwrap_or(20) + .clamp(1, 50) as usize; + + log::info!( + "tool_find_photos_with_entity: entity_id={}, name='{}', limit={}", + entity.id, + entity.name, + limit + ); + + let links = { + let mut kdao = self + .knowledge_dao + .lock() + .expect("Unable to lock KnowledgeDao"); + match kdao.get_links_for_entity(cx, entity.id) { + Ok(l) => l, + Err(e) => return format!("Error fetching links: {:?}", e), + } + }; + + if links.is_empty() { + return format!( + "No photos linked to entity '{}' (id {}).", + entity.name, entity.id + ); + } + + // Deduplicate by file_path so the same photo under multiple libraries + // shows once. Roles for the same path are unioned. + use std::collections::BTreeMap; + let mut by_path: BTreeMap> = BTreeMap::new(); + for l in links { + by_path.entry(l.file_path).or_default().push(l.role); + } + + let total = by_path.len(); + let mut lines = Vec::with_capacity(limit + 1); + lines.push(format!( + "Found {} photo(s) linked to '{}' (id {}); showing up to {}:", + total, entity.name, entity.id, limit + )); + for (path, mut roles) in by_path.into_iter().take(limit) { + roles.sort(); + roles.dedup(); + lines.push(format!("- {} [roles: {}]", path, roles.join(", "))); + } + lines.join("\n") + } + + /// Tool: get_exif — return the stored EXIF row for a photo file path + async fn tool_get_exif(&self, args: &serde_json::Value, cx: &opentelemetry::Context) -> String { + let file_path = match args.get("file_path").and_then(|v| v.as_str()) { + Some(p) => p.to_string(), + None => return "Error: missing required parameter 'file_path'".to_string(), + }; + let normalized = normalize_path(&file_path); + log::info!("tool_get_exif: file_path='{}'", normalized); + + let exif = { + let mut dao = self.exif_dao.lock().expect("Unable to lock ExifDao"); + match dao.get_exif(cx, &normalized) { + Ok(e) => e, + Err(e) => return format!("Error fetching EXIF: {:?}", e), + } + }; + let Some(e) = exif else { + return format!("No EXIF row found for '{}'.", normalized); + }; + + let mut lines: Vec = Vec::new(); + lines.push(format!("EXIF for {}:", normalized)); + if let (Some(make), Some(model)) = (e.camera_make.as_deref(), e.camera_model.as_deref()) { + lines.push(format!(" camera: {} {}", make, model)); + } else if let Some(make) = e.camera_make.as_deref() { + lines.push(format!(" camera_make: {}", make)); + } else if let Some(model) = e.camera_model.as_deref() { + lines.push(format!(" camera_model: {}", model)); + } + if let Some(lens) = e.lens_model.as_deref() { + lines.push(format!(" lens: {}", lens)); + } + if let (Some(w), Some(h)) = (e.width, e.height) { + lines.push(format!(" dimensions: {}x{}", w, h)); + } + if let Some(fl) = e.focal_length { + lines.push(format!(" focal_length: {:.1} mm", fl)); + } + if let Some(ap) = e.aperture { + lines.push(format!(" aperture: f/{:.1}", ap)); + } + if let Some(ref ss) = e.shutter_speed { + lines.push(format!(" shutter_speed: {}", ss)); + } + if let Some(iso) = e.iso { + lines.push(format!(" iso: {}", iso)); + } + if let Some(dt) = e.date_taken { + let dt_str = chrono::DateTime::from_timestamp(dt, 0) + .map(|t| t.format("%Y-%m-%d %H:%M:%S UTC").to_string()) + .unwrap_or_else(|| dt.to_string()); + lines.push(format!( + " date_taken: {} (source: {})", + dt_str, + e.date_taken_source.as_deref().unwrap_or("unknown") + )); + } + if let (Some(lat), Some(lon)) = (e.gps_latitude, e.gps_longitude) { + lines.push(format!(" gps: {:.6}, {:.6}", lat, lon)); + } + if let Some(alt) = e.gps_altitude { + lines.push(format!(" gps_altitude: {:.1} m", alt)); + } + lines.join("\n") + } + /// Tool: store_entity — upsert an entity into the knowledge memory async fn tool_store_entity( &self, @@ -2485,26 +2770,30 @@ Return ONLY the summary, nothing else."#, // ── Agentic insight generation ────────────────────────────────────── /// Build the list of tool definitions for the agentic loop - pub(crate) fn build_tool_definitions(has_vision: bool, apollo_enabled: bool) -> Vec { + pub(crate) fn build_tool_definitions( + has_vision: bool, + apollo_enabled: bool, + disable_writes: bool, + ) -> Vec { let mut tools = vec![ Tool::function( "search_rag", - "Search conversation history using semantic search. Use this to find relevant past conversations about specific topics, people, or events.", + "Semantic search over per-day, per-contact CONVERSATION SUMMARIES (not raw messages). Each hit is one compressed paragraph for one (date, contact) day. Use for high-level themes around a date — for specific wording, call `search_messages`. The `date` argument biases ranking toward summaries near that date and is required even when searching across all time.", serde_json::json!({ "type": "object", "required": ["query", "date"], "properties": { "query": { "type": "string", - "description": "The search query to find relevant conversations" + "description": "Topic / theme query (semantic). 'wedding planning', 'job search stress', 'Tahoe trip'." }, "date": { "type": "string", - "description": "The reference date in YYYY-MM-DD format" + "description": "Reference date in YYYY-MM-DD format. Used for time-decay weighting (closer summaries rank higher)." }, "contact": { "type": "string", - "description": "Optional contact name to filter results" + "description": "Optional contact name to filter results. When you know the contact, also make at least one call WITHOUT this filter to surface what else was happening that week." }, "limit": { "type": "integer", @@ -2564,7 +2853,7 @@ Return ONLY the summary, nothing else."#, ), Tool::function( "get_calendar_events", - "Fetch calendar events near a specific date. Shows scheduled events, meetings, and activities.", + "Fetch calendar events near a specific date. Pass a `query` to rank by semantic similarity (e.g. 'wedding', 'doctor visit', 'work travel') in addition to time. Without a query, results are time-ordered.", serde_json::json!({ "type": "object", "required": ["date"], @@ -2573,6 +2862,10 @@ Return ONLY the summary, nothing else."#, "type": "string", "description": "The center date in YYYY-MM-DD format" }, + "query": { + "type": "string", + "description": "Optional topic / theme to rank events by (semantic). Pairs well with photo-relevant cues from the visual description." + }, "days_radius": { "type": "integer", "description": "Number of days before and after the date to search (default: 7)" @@ -2586,7 +2879,7 @@ Return ONLY the summary, nothing else."#, ), Tool::function( "get_location_history", - "Fetch location history records near a specific date. Shows places visited and activities.", + "Fetch location-history records (lat/lon / activity / place_name) within ±days_radius days of a date. Time-ordered; no semantic ranking. Useful for reconstructing where the user was around the photo's timestamp.", serde_json::json!({ "type": "object", "required": ["date"], @@ -2598,29 +2891,22 @@ Return ONLY the summary, nothing else."#, "days_radius": { "type": "integer", "description": "Number of days before and after the date to search (default: 14)" - } - } - }), - ), - Tool::function( - "get_file_tags", - "Get tags/labels that have been applied to a specific photo file.", - serde_json::json!({ - "type": "object", - "required": ["file_path"], - "properties": { - "file_path": { - "type": "string", - "description": "The file path of the photo to get tags for" + }, + "limit": { + "type": "integer", + "description": "Maximum number of records to return (default: 20, max: 50)" } } }), ), ]; + // (`get_file_tags` was removed — the tags for the current photo are + // already inlined in the user message, and exposing both gave models + // an excuse to spend an iteration "confirming" what they already had.) tools.push(Tool::function( "reverse_geocode", - "Convert GPS latitude/longitude coordinates to a human-readable place name (city, state). Use this when GPS coordinates are available in the photo metadata, or to resolve coordinates returned by get_location_history.", + "Convert GPS latitude/longitude to a city/state place name via Nominatim. The photo's primary location is already pre-resolved on the user message — only call this for *other* coordinates (e.g. those returned by get_location_history). For the user's personal places (Home, Work, Cabin) prefer `get_personal_place_at`.", serde_json::json!({ "type": "object", "required": ["latitude", "longitude"], @@ -2642,7 +2928,7 @@ Return ONLY the summary, nothing else."#, if apollo_enabled { tools.push(Tool::function( "get_personal_place_at", - "Get the user's personal, named place (e.g. Home, Work, Cabin) at a GPS coordinate, if any. Returns the place name, category, free-text description (the user's own notes about the location), and radius. More specific than reverse_geocode — prefer this when both apply.", + "Get the user's personal, named place (e.g. Home, Work, Cabin) at a GPS coordinate, if any. Returns the place name, category, free-text description (the user's own notes about the location), and radius. More specific than reverse_geocode — prefer this when both apply. The cheap default is to call this once with the photo's GPS before any other location reasoning.", serde_json::json!({ "type": "object", "required": ["latitude", "longitude"], @@ -2657,7 +2943,7 @@ Return ONLY the summary, nothing else."#, // Knowledge memory tools tools.push(Tool::function( "recall_entities", - "Search the knowledge memory for people, places, events, or things previously learned from other photos. Use this to retrieve context about subjects appearing in this photo.", + "Search the knowledge memory for people, places, events, or things previously learned from other photos. Provide at least one of `name` or `entity_type` — calling with neither dumps up to 50 entities ordered by id, which is rarely what you want.", serde_json::json!({ "type": "object", "properties": { @@ -2668,7 +2954,7 @@ Return ONLY the summary, nothing else."#, "entity_type": { "type": "string", "enum": ["person", "place", "event", "thing"], - "description": "Filter by entity type (optional)" + "description": "Filter by entity type. Pass alone to enumerate everything of one kind (e.g. 'all known places')." }, "limit": { "type": "integer", @@ -2694,63 +2980,140 @@ Return ONLY the summary, nothing else."#, )); tools.push(Tool::function( - "store_entity", - "Store or update a person, place, event, or thing in the knowledge memory. Call this when you identify a subject in this photo that should be remembered for future insights.", + "recall_facts_for_entity", + "Retrieve all stored facts about one entity (person, place, event, thing) without needing a photo path. Use in chat when the user asks about a known subject (e.g. 'tell me more about Sarah') and you have the entity_id from recall_entities — or pass `name` to look it up by name.", serde_json::json!({ "type": "object", - "required": ["name", "entity_type"], "properties": { + "entity_id": { + "type": "integer", + "description": "The entity's ID (preferred — exact match)." + }, "name": { "type": "string", - "description": "The canonical name of the entity (e.g. 'John Smith', 'Banff National Park')" + "description": "Entity name (case-insensitive). Resolves to the highest-confidence active entity with that name. Provide this OR entity_id." }, "entity_type": { "type": "string", "enum": ["person", "place", "event", "thing"], - "description": "The type of entity" - }, - "description": { - "type": "string", - "description": "A brief description of the entity" + "description": "Optional filter when looking up by name (e.g. disambiguate a person named 'Tahoe' from the place)." } } }), )); tools.push(Tool::function( - "store_fact", - "Record a fact about an entity in the knowledge memory. Provide EITHER object_entity_id (when the object is a known entity whose ID you have) OR object_value (for free-text attributes). The fact will be linked to the current photo automatically.", + "find_photos_with_entity", + "List photos that have been linked to an entity in the knowledge memory. Use to answer 'when did I last see Sarah' / 'show me photos from the Tahoe trip'. Returns file paths and the role each entity played in the photo (subject / background / location).", serde_json::json!({ "type": "object", - "required": ["subject_entity_id", "predicate"], "properties": { - "subject_entity_id": { + "entity_id": { "type": "integer", - "description": "The ID of the entity this fact is about (returned by store_entity or recall_entities)" + "description": "The entity's ID (preferred — exact match)." }, - "predicate": { + "name": { "type": "string", - "description": "The relationship or attribute (e.g. 'is_friend_of', 'located_in', 'attended_event', 'is_sibling_of')" + "description": "Entity name (case-insensitive). Provide this OR entity_id." }, - "object_entity_id": { + "entity_type": { + "type": "string", + "enum": ["person", "place", "event", "thing"], + "description": "Optional filter when looking up by name." + }, + "limit": { "type": "integer", - "description": "Use when the object is a known entity (e.g. another person's entity ID for 'is_friend_of '). Takes precedence over object_value." - }, - "object_value": { - "type": "string", - "description": "Use for free-text attributes where the object is not a stored entity (e.g. 'Portland, Oregon', 'software engineer')" - }, - "photo_role": { - "type": "string", - "description": "How this entity appears in the photo (e.g. 'subject', 'background', 'location'). Defaults to 'subject'." + "description": "Maximum number of photo paths to return (default: 20, max: 50)" } } }), )); + tools.push(Tool::function( + "get_exif", + "Read the stored EXIF row for a photo file path. Returns camera make/model, lens, focal length, aperture, shutter speed, ISO, dimensions, and the date_taken source. Use to answer photography questions ('what camera was this on?') or to inspect technical metadata. The current photo's GPS, date, and date source are already on the user message — only call this when you need the additional fields.", + serde_json::json!({ + "type": "object", + "required": ["file_path"], + "properties": { + "file_path": { + "type": "string", + "description": "The file path of the photo (defaults to the current photo if you pass its path)." + } + } + }), + )); + + // Knowledge-memory writes are gated by `disable_writes` — when set, + // exploration / chat continuation can run without polluting the + // persistent KB with one-off variants (e.g. caption-style prompts). + if !disable_writes { + tools.push(Tool::function( + "store_entity", + "Store or update a person, place, event, or thing in the knowledge memory. \ + Returns the entity's ID. If similarly-named entities already exist, the \ + response lists them — prefer using an existing ID over creating a duplicate.", + serde_json::json!({ + "type": "object", + "required": ["name", "entity_type"], + "properties": { + "name": { + "type": "string", + "description": "The canonical name of the entity (e.g. 'John Smith', 'Banff National Park')" + }, + "entity_type": { + "type": "string", + "enum": ["person", "place", "event", "thing"], + "description": "The type of entity" + }, + "description": { + "type": "string", + "description": "A brief description of the entity" + } + } + }), + )); + + tools.push(Tool::function( + "store_fact", + "Record a fact about an entity in the knowledge memory. Provide EITHER object_entity_id \ + (when the object is a known entity whose ID you have) OR object_value (for free-text \ + attributes). The fact is linked to the current photo automatically. Predicates use \ + snake_case present-tense verbs/relations: is_friend_of, is_sibling_of, is_parent_of, \ + located_in, works_at, attended_event, visited_in, owns. Symmetric relations (friendship, \ + sibling) need only be stored once.", + serde_json::json!({ + "type": "object", + "required": ["subject_entity_id", "predicate"], + "properties": { + "subject_entity_id": { + "type": "integer", + "description": "The ID of the entity this fact is about (returned by store_entity or recall_entities)" + }, + "predicate": { + "type": "string", + "description": "snake_case verb/relation in present tense (e.g. 'is_friend_of', 'located_in', 'attended_event', 'is_sibling_of', 'works_at')" + }, + "object_entity_id": { + "type": "integer", + "description": "Use when the object is a known entity (e.g. another person's entity ID for 'is_friend_of '). Takes precedence over object_value." + }, + "object_value": { + "type": "string", + "description": "Use for free-text attributes where the object is not a stored entity (e.g. 'Portland, Oregon', 'software engineer')" + }, + "photo_role": { + "type": "string", + "description": "How this entity appears in the photo (e.g. 'subject', 'background', 'location'). Defaults to 'subject'." + } + } + }), + )); + } + tools.push(Tool::function( "get_current_datetime", - "Get the current date and time. Useful for understanding how long ago the photo was taken.", + "Returns the current date and time. Use ONLY when you need to compute time-since-photo for phrases like 'two years ago' — the photo's date is already in the user message and re-deriving it is wasted budget.", serde_json::json!({ "type": "object", "properties": {} @@ -2957,6 +3320,7 @@ Return ONLY the summary, nothing else."#, backend: Option, fewshot_examples: Vec>, fewshot_source_ids: Vec, + disable_writes: bool, ) -> Result<(Option, Option)> { let tracer = global_tracer(); let current_cx = opentelemetry::Context::current(); @@ -3191,9 +3555,40 @@ Return ONLY the summary, nothing else."#, .map(|dt| dt.date_naive()) .unwrap_or_else(|| Utc::now().date_naive()); + // Date confidence comes from the canonical-date waterfall — one of + // exif/exiftool/filename/fs_time/manual. Surface in the user message + // so the model can hedge appropriately on `fs_time`-sourced dates. + let date_taken_source = exif + .as_ref() + .and_then(|e| e.date_taken_source.clone()) + .unwrap_or_else(|| "unknown".to_string()); + let contact = Self::extract_contact_from_path(&file_path); log::info!("Agentic: date_taken={}, contact={:?}", date_taken, contact); + // Pre-resolve a human-readable location from GPS using the same + // Apollo + Nominatim composer the non-agentic flow uses. Saves the + // agent two iterations re-deriving a string we already have. + let resolved_location: Option = match exif.as_ref() { + Some(e) => match (e.gps_latitude, e.gps_longitude) { + (Some(lat), Some(lon)) => { + let lat = lat as f64; + let lon = lon as f64; + let nominatim = self.reverse_geocode(lat, lon).await; + let apollo_primary = self + .apollo_client + .places_containing(lat, lon) + .await + .into_iter() + .next(); + let combined = compose_location_string(apollo_primary, nominatim, lat, lon); + Some(combined) + } + _ => None, + }, + None => None, + }; + // 5. Fetch tags let tag_names: Vec = { let mut dao = self.tag_dao.lock().expect("Unable to lock TagDao"); @@ -3302,40 +3697,63 @@ Return ONLY the summary, nothing else."#, None => String::new(), }; let fewshot_block = Self::render_fewshot_examples(&fewshot_examples); + // The knowledge-write line gets rewritten when disable_writes is on + // so the model isn't told to call tools that aren't on the menu. + let knowledge_write_line = if disable_writes { + "Knowledge-memory writes are disabled for this run — do not attempt to call store_entity or store_fact (they are not available)." + } else { + "When you identify people, places, events, or notable things in this photo: use store_entity to record them and store_fact to record key facts (relationships, roles, attributes). This builds a persistent memory for future insights." + }; let base_system = format!( "You are a personal photo memory assistant helping to reconstruct a memory from a photo.{owner_id_note}\n\n\ {fewshot_block}\ - IMPORTANT INSTRUCTIONS:\n\ - 1. You MUST call multiple tools to gather context BEFORE writing any final insight. Do not produce a final answer after only one or two tool calls.\n\ - 2. When calling get_sms_messages and search_rag, always make at least one call WITHOUT a contact filter to capture what else was happening in {owner_name}'s life around this date — other conversations, events, and activities provide important wider context even when a specific contact is known.\n\ - 3. Use recall_facts_for_photo to load any previously stored knowledge about subjects in this photo.\n\ - 4. Use recall_entities to look up known people, places, or things that appear in this photo.\n\ - 5. When you identify people, places, events, or notable things in this photo: use store_entity to record them and store_fact to record key facts (relationships, roles, attributes). This builds a persistent memory for future insights.\n\ - 6. Only produce your final insight AFTER you have gathered context from at least 5 tool calls.\n\ - 7. If a tool returns no results, that is useful information — continue calling the remaining tools anyway.\n\ - 8. You have a hard budget of {max_iterations} tool-calling iterations before the loop ends. Plan your context gathering so you can write a complete final insight within that budget.", + ## Tool-use guidance\n\ + - Spend most of your iteration budget on context-gathering tools before writing the final insight. A typical strong run uses 4–8 tool calls; trivial photos may need fewer, rich photos more.\n\ + - When you call get_sms_messages or search_rag and you know the contact, also make at least one call WITHOUT a contact filter so you can see what else was happening in {owner_name}'s life around this date.\n\ + - Use recall_facts_for_photo and recall_entities early to load any prior knowledge about subjects in this photo.\n\ + - {knowledge_write_line}\n\ + - If a tool returns no results, that's useful information — pivot to a different tool, don't retry the same call.\n\ + - You have a hard budget of {max_iterations} tool-calling iterations. When the budget is nearly exhausted, stop calling tools and write the final insight.", owner_id_note = owner_id_note, fewshot_block = fewshot_block, owner_name = owner_name, + knowledge_write_line = knowledge_write_line, max_iterations = max_iterations ); + // Custom prompts are *appended* under an explicit override heading so + // they actually beat the base instructions. Prepending was the wrong + // default — later instructions tend to win attention. let system_content = if let Some(ref custom) = custom_system_prompt { - format!("{}\n\n{}", custom, base_system) + format!( + "{}\n\n## User overrides (these take precedence over the instructions above)\n\n{}", + base_system, custom + ) } else { - base_system.to_string() + base_system }; // 9. Build user message - let gps_info = exif - .as_ref() - .and_then(|e| { - if let (Some(lat), Some(lon)) = (e.gps_latitude, e.gps_longitude) { - Some(format!("GPS: {:.4}, {:.4}", lat, lon)) - } else { - None - } - }) - .unwrap_or_else(|| "GPS: unknown".to_string()); + // Compose a single Location: line with both the resolved name and the + // raw coordinates. Falls back to bare GPS when the geocoders failed, + // and to "Location: unknown" when there are no coordinates at all. + let location_info = match (resolved_location.as_deref(), exif.as_ref()) { + (Some(name), Some(e)) if e.gps_latitude.is_some() && e.gps_longitude.is_some() => { + format!( + "Location: {} (GPS {:.4}, {:.4})", + name, + e.gps_latitude.unwrap(), + e.gps_longitude.unwrap() + ) + } + (None, Some(e)) if e.gps_latitude.is_some() && e.gps_longitude.is_some() => { + format!( + "Location: GPS {:.4}, {:.4} (geocoder unavailable)", + e.gps_latitude.unwrap(), + e.gps_longitude.unwrap() + ) + } + _ => "Location: unknown".to_string(), + }; let tags_info = if tag_names.is_empty() { "Tags: none".to_string() @@ -3348,24 +3766,40 @@ Return ONLY the summary, nothing else."#, .map(|c| format!("Contact/Person: {}", c)) .unwrap_or_else(|| "Contact/Person: unknown".to_string()); + // Hybrid mode: the chat model never receives the image bytes, so we + // inline the visual description as text and explicitly tell the model + // not to call describe_photo (the tool is gated off in hybrid anyway). let visual_block = hybrid_visual_description .as_deref() - .map(|d| format!("Visual description (from local vision model):\n{}\n\n", d)) + .map(|d| { + format!( + "Visual description (already generated for you — do not call describe_photo):\n{}\n\n", + d + ) + }) .unwrap_or_default(); + // Format date with weekday + the canonical-date source so the model + // can temper claims when the date is filename- or fs_time-derived. + let date_line = format!( + "Date taken: {} (source: {})", + date_taken.format("%A, %B %d, %Y"), + date_taken_source + ); + let user_content = format!( "{visual_block}Please analyze this photo and gather any relevant context from the surrounding weeks.\n\n\ Photo file path: {}\n\ - Date taken: {}\n\ + {}\n\ {}\n\ {}\n\ {}\n\n\ Use the available tools to gather more context about this moment (messages, calendar events, location history, etc.), \ then write a detailed insight with a title and summary.", file_path, - date_taken.format("%B %d, %Y"), + date_line, contact_info, - gps_info, + location_info, tags_info, visual_block = visual_block, ); @@ -3373,8 +3807,11 @@ Return ONLY the summary, nothing else."#, // 10. Define tools. Hybrid mode omits `describe_photo` since the // chat model receives the visual description inline. let offer_describe_tool = has_vision && !is_hybrid; - let tools = - Self::build_tool_definitions(offer_describe_tool, self.apollo_client.is_enabled()); + let tools = Self::build_tool_definitions( + offer_describe_tool, + self.apollo_client.is_enabled(), + disable_writes, + ); // 11. Build initial messages. In hybrid mode images are never // attached to the wire message — the description is part of @@ -3397,6 +3834,11 @@ Return ONLY the summary, nothing else."#, let loop_span = tracer.start_with_context("ai.agentic.loop", &insight_cx); let loop_cx = insight_cx.with_span(loop_span); + // Memoize describe_photo for the lifetime of this loop. Vision + // describes are non-deterministic; without a cache, retries leave + // the chat history with conflicting descriptions of the same image. + let describe_cache: tokio::sync::Mutex> = tokio::sync::Mutex::new(None); + let mut final_content = String::new(); let mut iterations_used = 0usize; let mut last_prompt_eval_count: Option = None; @@ -3450,6 +3892,7 @@ Return ONLY the summary, nothing else."#, &image_base64, &file_path, &loop_cx, + Some(&describe_cache), ) .await; messages.push(ChatMessage::tool_result(result)); diff --git a/src/ai/sms_client.rs b/src/ai/sms_client.rs index ad6d28e..1fd8b2d 100644 --- a/src/ai/sms_client.rs +++ b/src/ai/sms_client.rs @@ -20,22 +20,24 @@ impl SmsApiClient { } } - /// Fetch messages for a specific contact within ±4 days of the given timestamp - /// Falls back to all contacts if no messages found for the specific contact - /// Messages are sorted by proximity to the center timestamp + /// Fetch messages for a specific contact within ±`days_radius` days of + /// the given timestamp (defaults to ±4 days when `None`). Falls back to + /// all contacts if no messages are found for the specified contact. + /// Messages are sorted by proximity to the center timestamp. pub async fn fetch_messages_for_contact( &self, contact: Option<&str>, center_timestamp: i64, + days_radius: Option, ) -> Result> { use chrono::Duration; - // Calculate ±4 days range around the center timestamp + let radius = days_radius.unwrap_or(4).clamp(1, 30); let center_dt = chrono::DateTime::from_timestamp(center_timestamp, 0) .ok_or_else(|| anyhow::anyhow!("Invalid timestamp"))?; - let start_dt = center_dt - Duration::days(4); - let end_dt = center_dt + Duration::days(4); + let start_dt = center_dt - Duration::days(radius); + let end_dt = center_dt + Duration::days(radius); let start_ts = start_dt.timestamp(); let end_ts = end_dt.timestamp(); @@ -43,8 +45,9 @@ impl SmsApiClient { // If contact specified, try fetching for that contact first if let Some(contact_name) = contact { log::info!( - "Fetching SMS for contact: {} (±4 days from {})", + "Fetching SMS for contact: {} (±{} days from {})", contact_name, + radius, center_dt.format("%Y-%m-%d %H:%M:%S") ); let messages = self @@ -68,7 +71,8 @@ impl SmsApiClient { // Fallback to all contacts log::info!( - "Fetching all SMS messages (±4 days from {})", + "Fetching all SMS messages (±{} days from {})", + radius, center_dt.format("%Y-%m-%d %H:%M:%S") ); self.fetch_messages(start_ts, end_ts, None, Some(center_timestamp)) diff --git a/src/bin/populate_knowledge.rs b/src/bin/populate_knowledge.rs index 9c55e60..a33a9f1 100644 --- a/src/bin/populate_knowledge.rs +++ b/src/bin/populate_knowledge.rs @@ -331,6 +331,7 @@ async fn main() -> anyhow::Result<()> { None, Vec::new(), Vec::new(), + false, // disable_writes — keep KB writes on for the population job ) .await { diff --git a/src/files.rs b/src/files.rs index 7db246f..90d6eca 100644 --- a/src/files.rs +++ b/src/files.rs @@ -1718,7 +1718,12 @@ mod tests { // Mock — files.rs tests don't exercise the date-override endpoints. // Returning a synthetic row keeps the trait satisfied without // depending on private DbError constructors. - Ok(mock_exif_row(library_id, rel_path, Some(date_taken), Some("manual".to_string()))) + Ok(mock_exif_row( + library_id, + rel_path, + Some(date_taken), + Some("manual".to_string()), + )) } fn clear_manual_date_taken( diff --git a/src/main.rs b/src/main.rs index 68f8f61..3b18fbe 100644 --- a/src/main.rs +++ b/src/main.rs @@ -995,10 +995,8 @@ async fn upload_image( } }; let perceptual = perceptual_hash::compute(&uploaded_path); - let resolved_date = date_resolver::resolve_date_taken( - &uploaded_path, - exif_data.date_taken, - ); + let resolved_date = + date_resolver::resolve_date_taken(&uploaded_path, exif_data.date_taken); let insert_exif = InsertImageExif { library_id: target_library.id, file_path: relative_path.clone(), @@ -1022,8 +1020,7 @@ async fn upload_image( size_bytes, phash_64: perceptual.map(|h| h.phash_64), dhash_64: perceptual.map(|h| h.dhash_64), - date_taken_source: resolved_date - .map(|r| r.source.as_str().to_string()), + date_taken_source: resolved_date.map(|r| r.source.as_str().to_string()), }; if let Ok(mut dao) = exif_dao.lock() {