diff --git a/.idea/sqldialects.xml b/.idea/sqldialects.xml
deleted file mode 100644
index 4e3aa16..0000000
--- a/.idea/sqldialects.xml
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/src/ai/insight_generator.rs b/src/ai/insight_generator.rs
index b032b21..77ac02a 100644
--- a/src/ai/insight_generator.rs
+++ b/src/ai/insight_generator.rs
@@ -1011,18 +1011,7 @@ impl InsightGenerator {
None
};
- // 10. Generate title and summary with Ollama (using multi-source context + image if supported)
- let title = ollama_client
- .generate_photo_title(
- date_taken,
- location.as_deref(),
- contact.as_deref(),
- Some(&combined_context),
- custom_system_prompt.as_deref(),
- image_base64.clone(),
- )
- .await?;
-
+ // 10. Generate summary first, then derive title from the summary
let summary = ollama_client
.generate_photo_summary(
date_taken,
@@ -1034,6 +1023,10 @@ impl InsightGenerator {
)
.await?;
+ let title = ollama_client
+ .generate_photo_title(&summary, custom_system_prompt.as_deref())
+ .await?;
+
log::info!("Generated title: {}", title);
log::info!("Generated summary: {}", summary);
diff --git a/src/ai/ollama.rs b/src/ai/ollama.rs
index c7da48f..0c7f36b 100644
--- a/src/ai/ollama.rs
+++ b/src/ai/ollama.rs
@@ -373,94 +373,25 @@ impl OllamaClient {
Ok(cleaned)
}
- /// Generate a title for a single photo based on its context
+ /// Generate a title for a single photo based on its generated summary
pub async fn generate_photo_title(
&self,
- date: NaiveDate,
- location: Option<&str>,
- contact: Option<&str>,
- sms_summary: Option<&str>,
+ summary: &str,
custom_system: Option<&str>,
- image_base64: Option,
) -> Result {
- let location_str = location.unwrap_or("Unknown location");
- let sms_str = sms_summary.unwrap_or("No messages");
+ let prompt = format!(
+ r#"Create a short title (maximum 8 words) for the following journal entry:
- let prompt = if image_base64.is_some() {
- if let Some(contact_name) = contact {
- format!(
- r#"Create a short title (maximum 8 words) about this moment by analyzing the image and context:
+{}
-Date: {}
-Location: {}
-Person/Contact: {}
-Messages: {}
-
-Analyze the image and use specific details from both the visual content and the context above. The photo is from a folder for {}, so they are likely in or related to this photo. If limited information is available, use a simple descriptive title based on what you see.
-
-Return ONLY the title, nothing else."#,
- date.format("%B %d, %Y"),
- location_str,
- contact_name,
- sms_str,
- contact_name
- )
- } else {
- format!(
- r#"Create a short title (maximum 8 words) about this moment by analyzing the image and context:
-
-Date: {}
-Location: {}
-Messages: {}
-
-Analyze the image and use specific details from both the visual content and the context above. If limited information is available, use a simple descriptive title based on what you see.
-
-Return ONLY the title, nothing else."#,
- date.format("%B %d, %Y"),
- location_str,
- sms_str
- )
- }
- } else if let Some(contact_name) = contact {
- format!(
- r#"Create a short title (maximum 8 words) about this moment:
-
- Date: {}
- Location: {}
- Person/Contact: {}
- Messages: {}
-
- Use specific details from the context above. The photo is from a folder for {}, so they are likely related to this moment. If no specific details are available, use a simple descriptive title.
-
- Return ONLY the title, nothing else."#,
- date.format("%B %d, %Y"),
- location_str,
- contact_name,
- sms_str,
- contact_name
- )
- } else {
- format!(
- r#"Create a short title (maximum 8 words) about this moment:
-
- Date: {}
- Location: {}
- Messages: {}
-
- Use specific details from the context above. If no specific details are available, use a simple descriptive title.
-
- Return ONLY the title, nothing else."#,
- date.format("%B %d, %Y"),
- location_str,
- sms_str
- )
- };
+Capture the key moment or theme. Return ONLY the title, nothing else."#,
+ summary
+ );
let system = custom_system.unwrap_or("You are my long term memory assistant. Use only the information provided. Do not invent details.");
- let images = image_base64.map(|img| vec![img]);
let title = self
- .generate_with_images(&prompt, Some(system), images)
+ .generate_with_images(&prompt, Some(system), None)
.await?;
Ok(title.trim().trim_matches('"').to_string())
}
diff --git a/src/files.rs b/src/files.rs
index 264f796..107b16a 100644
--- a/src/files.rs
+++ b/src/files.rs
@@ -1471,9 +1471,7 @@ mod tests {
Data::new(AppState::test_state()),
Data::new(RealFileSystem::new(temp_dir.to_str().unwrap().to_string())),
Data::new(Mutex::new(SqliteTagDao::default())),
- Data::new(Mutex::new(
- Box::new(MockExifDao) as Box
- )),
+ Data::new(Mutex::new(Box::new(MockExifDao) as Box)),
)
.await;
@@ -1518,9 +1516,7 @@ mod tests {
Data::new(AppState::test_state()),
Data::new(FakeFileSystem::new(HashMap::new())),
Data::new(Mutex::new(tag_dao)),
- Data::new(Mutex::new(
- Box::new(MockExifDao) as Box
- )),
+ Data::new(Mutex::new(Box::new(MockExifDao) as Box)),
)
.await;
@@ -1581,9 +1577,7 @@ mod tests {
Data::new(AppState::test_state()),
Data::new(FakeFileSystem::new(HashMap::new())),
Data::new(Mutex::new(tag_dao)),
- Data::new(Mutex::new(
- Box::new(MockExifDao) as Box
- )),
+ Data::new(Mutex::new(Box::new(MockExifDao) as Box)),
)
.await;
diff --git a/src/main.rs b/src/main.rs
index 529b97d..c610362 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -1044,10 +1044,12 @@ fn cleanup_orphaned_playlists() {
.filter(|e| e.file_type().is_file())
{
if let Some(entry_stem) = entry.path().file_stem()
- && entry_stem == filename && is_video_file(entry.path()) {
- video_exists = true;
- break;
- }
+ && entry_stem == filename
+ && is_video_file(entry.path())
+ {
+ video_exists = true;
+ break;
+ }
}
if !video_exists {
@@ -1078,27 +1080,27 @@ fn cleanup_orphaned_playlists() {
{
let entry_path = entry.path();
if let Some(ext) = entry_path.extension()
- && ext.eq_ignore_ascii_case("ts") {
- // Check if this .ts file belongs to our playlist
- if let Some(ts_stem) = entry_path.file_stem() {
- let ts_name = ts_stem.to_string_lossy();
- if ts_name.starts_with(&*video_filename) {
- if let Err(e) = std::fs::remove_file(entry_path)
- {
- debug!(
- "Failed to delete segment {}: {}",
- entry_path.display(),
- e
- );
- } else {
- debug!(
- "Deleted segment: {}",
- entry_path.display()
- );
- }
+ && ext.eq_ignore_ascii_case("ts")
+ {
+ // Check if this .ts file belongs to our playlist
+ if let Some(ts_stem) = entry_path.file_stem() {
+ let ts_name = ts_stem.to_string_lossy();
+ if ts_name.starts_with(&*video_filename) {
+ if let Err(e) = std::fs::remove_file(entry_path) {
+ debug!(
+ "Failed to delete segment {}: {}",
+ entry_path.display(),
+ e
+ );
+ } else {
+ debug!(
+ "Deleted segment: {}",
+ entry_path.display()
+ );
}
}
}
+ }
}
}
}
@@ -1206,12 +1208,11 @@ fn playlist_needs_generation(video_path: &Path, playlist_path: &Path) -> bool {
if let (Ok(video_meta), Ok(playlist_meta)) = (
std::fs::metadata(video_path),
std::fs::metadata(playlist_path),
- )
- && let (Ok(video_modified), Ok(playlist_modified)) =
- (video_meta.modified(), playlist_meta.modified())
- {
- return video_modified > playlist_modified;
- }
+ ) && let (Ok(video_modified), Ok(playlist_modified)) =
+ (video_meta.modified(), playlist_meta.modified())
+ {
+ return video_modified > playlist_modified;
+ }
// If we can't determine, assume it needs generation
true
diff --git a/src/video/actors.rs b/src/video/actors.rs
index 5aae520..32bb35f 100644
--- a/src/video/actors.rs
+++ b/src/video/actors.rs
@@ -156,20 +156,20 @@ async fn get_video_rotation(video_path: &str) -> i32 {
.output()
.await;
- if let Ok(output) = output {
- if output.status.success() {
+ if let Ok(output) = output
+ && output.status.success() {
let rotation_str = String::from_utf8_lossy(&output.stdout);
let rotation_str = rotation_str.trim();
- if !rotation_str.is_empty() {
- if let Ok(rotation) = rotation_str.parse::() {
- if rotation != 0 {
- debug!("Detected rotation {}° from stream tag for {}", rotation, video_path);
+ if !rotation_str.is_empty()
+ && let Ok(rotation) = rotation_str.parse::()
+ && rotation != 0 {
+ debug!(
+ "Detected rotation {}° from stream tag for {}",
+ rotation, video_path
+ );
return rotation;
}
- }
- }
}
- }
// Check display matrix side data (modern videos, e.g. iPhone)
let output = tokio::process::Command::new("ffprobe")
@@ -185,21 +185,22 @@ async fn get_video_rotation(video_path: &str) -> i32 {
.output()
.await;
- if let Ok(output) = output {
- if output.status.success() {
+ if let Ok(output) = output
+ && output.status.success() {
let rotation_str = String::from_utf8_lossy(&output.stdout);
let rotation_str = rotation_str.trim();
- if !rotation_str.is_empty() {
- if let Ok(rotation) = rotation_str.parse::() {
+ if !rotation_str.is_empty()
+ && let Ok(rotation) = rotation_str.parse::() {
let rotation = rotation.abs() as i32;
if rotation != 0 {
- debug!("Detected rotation {}° from display matrix for {}", rotation, video_path);
+ debug!(
+ "Detected rotation {}° from display matrix for {}",
+ rotation, video_path
+ );
return rotation;
}
}
- }
}
- }
0
}