chore: apply cargo fmt + clippy cleanup across crate

Silence forward-looking dead_code on unused DAO modules, annotate
individual placeholder items, rewrite tautological assert!(true/false)
in token tests as panic! arms, and pick up fmt drift.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
Cameron
2026-04-18 16:50:15 -04:00
parent bf4a8a1b43
commit 187c5685af
22 changed files with 106 additions and 67 deletions

View File

@@ -503,7 +503,10 @@ pub async fn export_training_data_handler(
HttpResponse::Ok() HttpResponse::Ok()
.content_type("application/jsonl") .content_type("application/jsonl")
.insert_header(("Content-Disposition", "attachment; filename=\"training_data.jsonl\"")) .insert_header((
"Content-Disposition",
"attachment; filename=\"training_data.jsonl\"",
))
.body(jsonl) .body(jsonl)
} }
Err(e) => { Err(e) => {

View File

@@ -1827,8 +1827,9 @@ Return ONLY the summary, nothing else."#,
// For each linked entity, fetch its facts // For each linked entity, fetch its facts
for entity_id in entity_ids { for entity_id in entity_ids {
if let Ok(entity) = kdao.get_entity_by_id(cx, entity_id) { if let Ok(entity) = kdao.get_entity_by_id(cx, entity_id)
if let Some(e) = entity { && let Some(e) = entity
{
let role = links let role = links
.iter() .iter()
.find(|l| l.entity_id == entity_id) .find(|l| l.entity_id == entity_id)
@@ -1856,7 +1857,6 @@ Return ONLY the summary, nothing else."#,
} }
} }
} }
}
if output_lines.is_empty() { if output_lines.is_empty() {
"No active knowledge facts found for this photo.".to_string() "No active knowledge facts found for this photo.".to_string()
@@ -1902,8 +1902,8 @@ Return ONLY the summary, nothing else."#,
// those already). Results are appended to the tool response so the // those already). Results are appended to the tool response so the
// model can choose to use an existing entity's ID instead. // model can choose to use an existing entity's ID instead.
let similar_entities: Vec<String> = { let similar_entities: Vec<String> = {
use crate::database::EntityFilter;
use crate::database::knowledge_dao::normalize_entity_type; use crate::database::knowledge_dao::normalize_entity_type;
use crate::database::{EntityFilter, KnowledgeDao};
let normalised_type = normalize_entity_type(&entity_type); let normalised_type = normalize_entity_type(&entity_type);
let first_token = name.split_whitespace().next().unwrap_or(&name).to_string(); let first_token = name.split_whitespace().next().unwrap_or(&name).to_string();
let filter = EntityFilter { let filter = EntityFilter {

View File

@@ -120,6 +120,7 @@ impl OllamaClient {
/// Replace the HTTP client with one using a custom request timeout. /// Replace the HTTP client with one using a custom request timeout.
/// Useful for slow models where the default 120s may be insufficient. /// Useful for slow models where the default 120s may be insufficient.
#[allow(dead_code)]
pub fn with_request_timeout(mut self, secs: u64) -> Self { pub fn with_request_timeout(mut self, secs: u64) -> Self {
self.client = Client::builder() self.client = Client::builder()
.connect_timeout(Duration::from_secs(5)) .connect_timeout(Duration::from_secs(5))
@@ -174,6 +175,7 @@ impl OllamaClient {
} }
/// Clear the model list cache for a specific URL or all URLs /// Clear the model list cache for a specific URL or all URLs
#[allow(dead_code)]
pub fn clear_model_cache(url: Option<&str>) { pub fn clear_model_cache(url: Option<&str>) {
let mut cache = MODEL_LIST_CACHE.lock().unwrap(); let mut cache = MODEL_LIST_CACHE.lock().unwrap();
if let Some(url) = url { if let Some(url) = url {
@@ -186,6 +188,7 @@ impl OllamaClient {
} }
/// Clear the model capabilities cache for a specific URL or all URLs /// Clear the model capabilities cache for a specific URL or all URLs
#[allow(dead_code)]
pub fn clear_capabilities_cache(url: Option<&str>) { pub fn clear_capabilities_cache(url: Option<&str>) {
let mut cache = MODEL_CAPABILITIES_CACHE.lock().unwrap(); let mut cache = MODEL_CAPABILITIES_CACHE.lock().unwrap();
if let Some(url) = url { if let Some(url) = url {
@@ -992,7 +995,6 @@ struct OllamaEmbedResponse {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
#[test] #[test]
fn generate_photo_description_prompt_is_concise() { fn generate_photo_description_prompt_is_concise() {

View File

@@ -68,8 +68,7 @@ fn main() -> anyhow::Result<()> {
.join(", ") .join(", ")
); );
let dao: Arc<Mutex<Box<dyn ExifDao>>> = let dao: Arc<Mutex<Box<dyn ExifDao>>> = Arc::new(Mutex::new(Box::new(SqliteExifDao::new())));
Arc::new(Mutex::new(Box::new(SqliteExifDao::new())));
let ctx = opentelemetry::Context::new(); let ctx = opentelemetry::Context::new();
let mut total_hashed = 0u64; let mut total_hashed = 0u64;
@@ -98,15 +97,13 @@ fn main() -> anyhow::Result<()> {
.get(&library_id) .get(&library_id)
.map(|lib| Path::new(&lib.root_path).join(&rel_path)); .map(|lib| Path::new(&lib.root_path).join(&rel_path));
match abs { match abs {
Some(abs_path) if abs_path.exists() => { Some(abs_path) if abs_path.exists() => match content_hash::compute(&abs_path) {
match content_hash::compute(&abs_path) {
Ok(id) => (library_id, rel_path, Some(id)), Ok(id) => (library_id, rel_path, Some(id)),
Err(e) => { Err(e) => {
eprintln!("hash error for {}: {:?}", abs_path.display(), e); eprintln!("hash error for {}: {:?}", abs_path.display(), e);
(library_id, rel_path, None) (library_id, rel_path, None)
} }
} },
}
Some(_) => (library_id, rel_path, None), // file missing on disk Some(_) => (library_id, rel_path, None), // file missing on disk
None => { None => {
eprintln!("Row refers to unknown library_id {}", library_id); eprintln!("Row refers to unknown library_id {}", library_id);

View File

@@ -53,13 +53,18 @@ pub fn thumbnail_path(thumbs_dir: &Path, hash: &str) -> PathBuf {
/// Hash-keyed HLS output directory: `<video_dir>/<hash[..2]>/<hash>/`. /// Hash-keyed HLS output directory: `<video_dir>/<hash[..2]>/<hash>/`.
/// The playlist lives at `playlist.m3u8` inside this directory and its /// The playlist lives at `playlist.m3u8` inside this directory and its
/// segments are co-located so HLS relative references Just Work. /// segments are co-located so HLS relative references Just Work.
#[allow(dead_code)]
pub fn hls_dir(video_dir: &Path, hash: &str) -> PathBuf { pub fn hls_dir(video_dir: &Path, hash: &str) -> PathBuf {
let shard = shard_prefix(hash); let shard = shard_prefix(hash);
video_dir.join(shard).join(hash) video_dir.join(shard).join(hash)
} }
fn shard_prefix(hash: &str) -> &str { fn shard_prefix(hash: &str) -> &str {
let end = hash.char_indices().nth(2).map(|(i, _)| i).unwrap_or(hash.len()); let end = hash
.char_indices()
.nth(2)
.map(|(i, _)| i)
.unwrap_or(hash.len());
&hash[..end] &hash[..end]
} }

View File

@@ -191,6 +191,7 @@ pub struct ThumbnailRequest {
#[allow(dead_code)] // Part of API contract, may be used in future #[allow(dead_code)] // Part of API contract, may be used in future
pub(crate) format: Option<ThumbnailFormat>, pub(crate) format: Option<ThumbnailFormat>,
#[serde(default)] #[serde(default)]
#[allow(dead_code)] // Part of API contract, may be used in future
pub(crate) shape: Option<ThumbnailShape>, pub(crate) shape: Option<ThumbnailShape>,
/// Optional library filter. Accepts a library id (e.g. "1") or name /// Optional library filter. Accepts a library id (e.g. "1") or name
/// (e.g. "main"). When omitted, defaults to the primary library. /// (e.g. "main"). When omitted, defaults to the primary library.
@@ -434,11 +435,8 @@ mod tests {
); );
match err.unwrap_err().into_kind() { match err.unwrap_err().into_kind() {
ErrorKind::ExpiredSignature => assert!(true), ErrorKind::ExpiredSignature => {}
kind => { kind => panic!("Unexpected error: {:?}", kind),
println!("Unexpected error: {:?}", kind);
assert!(false)
}
} }
} }
@@ -447,11 +445,8 @@ mod tests {
let err = Claims::from_str("uni-֍ՓՓՓՓՓՓՓՓՓՓՓՓՓՓՓ"); let err = Claims::from_str("uni-֍ՓՓՓՓՓՓՓՓՓՓՓՓՓՓՓ");
match err.unwrap_err().into_kind() { match err.unwrap_err().into_kind() {
ErrorKind::InvalidToken => assert!(true), ErrorKind::InvalidToken => {}
kind => { kind => panic!("Unexpected error: {:?}", kind),
println!("Unexpected error: {:?}", kind);
assert!(false)
}
} }
} }

View File

@@ -1,3 +1,5 @@
#![allow(dead_code)]
use diesel::prelude::*; use diesel::prelude::*;
use diesel::sqlite::SqliteConnection; use diesel::sqlite::SqliteConnection;
use serde::Serialize; use serde::Serialize;

View File

@@ -1,3 +1,5 @@
#![allow(dead_code)]
use chrono::NaiveDate; use chrono::NaiveDate;
use diesel::prelude::*; use diesel::prelude::*;
use diesel::sqlite::SqliteConnection; use diesel::sqlite::SqliteConnection;

View File

@@ -31,6 +31,7 @@ pub trait InsightDao: Sync + Send {
paths: &[String], paths: &[String],
) -> Result<Option<PhotoInsight>, DbError>; ) -> Result<Option<PhotoInsight>, DbError>;
#[allow(dead_code)]
fn get_insight_history( fn get_insight_history(
&mut self, &mut self,
context: &opentelemetry::Context, context: &opentelemetry::Context,
@@ -79,6 +80,7 @@ impl SqliteInsightDao {
} }
#[cfg(test)] #[cfg(test)]
#[allow(dead_code)]
pub fn from_connection(conn: Arc<Mutex<SqliteConnection>>) -> Self { pub fn from_connection(conn: Arc<Mutex<SqliteConnection>>) -> Self {
SqliteInsightDao { connection: conn } SqliteInsightDao { connection: conn }
} }

View File

@@ -1,3 +1,5 @@
#![allow(dead_code)]
use diesel::prelude::*; use diesel::prelude::*;
use diesel::sqlite::SqliteConnection; use diesel::sqlite::SqliteConnection;
use std::ops::DerefMut; use std::ops::DerefMut;
@@ -230,7 +232,7 @@ impl SqliteKnowledgeDao {
} }
fn deserialize_embedding(bytes: &[u8]) -> Result<Vec<f32>, DbError> { fn deserialize_embedding(bytes: &[u8]) -> Result<Vec<f32>, DbError> {
if bytes.len() % 4 != 0 { if !bytes.len().is_multiple_of(4) {
return Err(DbError::new(DbErrorKind::QueryError)); return Err(DbError::new(DbErrorKind::QueryError));
} }
Ok(bytes Ok(bytes
@@ -535,7 +537,6 @@ impl KnowledgeDao for SqliteKnowledgeDao {
conn.transaction::<(i64, i64), diesel::result::Error, _>(|conn| { conn.transaction::<(i64, i64), diesel::result::Error, _>(|conn| {
use schema::entity_facts::dsl as ef; use schema::entity_facts::dsl as ef;
use schema::entity_photo_links::dsl as epl;
// 1. Re-point facts where source is subject // 1. Re-point facts where source is subject
let facts_updated = let facts_updated =

View File

@@ -1,3 +1,5 @@
#![allow(dead_code)]
use diesel::prelude::*; use diesel::prelude::*;
use diesel::sqlite::SqliteConnection; use diesel::sqlite::SqliteConnection;
use serde::Serialize; use serde::Serialize;

View File

@@ -155,7 +155,9 @@ pub trait FavoriteDao: Sync + Send {
fn add_favorite(&mut self, user_id: i32, favorite_path: &str) -> Result<usize, DbError>; fn add_favorite(&mut self, user_id: i32, favorite_path: &str) -> Result<usize, DbError>;
fn remove_favorite(&mut self, user_id: i32, favorite_path: String); fn remove_favorite(&mut self, user_id: i32, favorite_path: String);
fn get_favorites(&mut self, user_id: i32) -> Result<Vec<Favorite>, DbError>; fn get_favorites(&mut self, user_id: i32) -> Result<Vec<Favorite>, DbError>;
#[allow(dead_code)]
fn update_path(&mut self, old_path: &str, new_path: &str) -> Result<(), DbError>; fn update_path(&mut self, old_path: &str, new_path: &str) -> Result<(), DbError>;
#[allow(dead_code)]
fn get_all_paths(&mut self) -> Result<Vec<String>, DbError>; fn get_all_paths(&mut self) -> Result<Vec<String>, DbError>;
} }
@@ -239,6 +241,7 @@ impl FavoriteDao for SqliteFavoriteDao {
} }
} }
#[allow(dead_code)]
pub trait ExifDao: Sync + Send { pub trait ExifDao: Sync + Send {
fn store_exif( fn store_exif(
&mut self, &mut self,
@@ -306,6 +309,7 @@ pub trait ExifDao: Sync + Send {
/// Get all photos with GPS coordinates /// Get all photos with GPS coordinates
/// Returns Vec<(file_path, latitude, longitude, date_taken)> /// Returns Vec<(file_path, latitude, longitude, date_taken)>
#[allow(clippy::type_complexity)]
fn get_all_with_gps( fn get_all_with_gps(
&mut self, &mut self,
context: &opentelemetry::Context, context: &opentelemetry::Context,
@@ -680,7 +684,6 @@ impl ExifDao for SqliteExifDao {
.map_err(|_| DbError::new(DbErrorKind::QueryError)) .map_err(|_| DbError::new(DbErrorKind::QueryError))
} }
fn get_all_with_gps( fn get_all_with_gps(
&mut self, &mut self,
context: &opentelemetry::Context, context: &opentelemetry::Context,

View File

@@ -1,3 +1,5 @@
#![allow(dead_code)]
use diesel::prelude::*; use diesel::prelude::*;
use diesel::sqlite::SqliteConnection; use diesel::sqlite::SqliteConnection;
use std::ops::DerefMut; use std::ops::DerefMut;

View File

@@ -1,3 +1,5 @@
#![allow(dead_code)]
use diesel::prelude::*; use diesel::prelude::*;
use diesel::sqlite::SqliteConnection; use diesel::sqlite::SqliteConnection;
use serde::Serialize; use serde::Serialize;

View File

@@ -16,7 +16,6 @@ use crate::file_types;
use crate::geo::{gps_bounding_box, haversine_distance}; use crate::geo::{gps_bounding_box, haversine_distance};
use crate::memories::extract_date_from_filename; use crate::memories::extract_date_from_filename;
use crate::{AppState, create_thumbnails}; use crate::{AppState, create_thumbnails};
use actix_web::dev::ResourcePath;
use actix_web::web::Data; use actix_web::web::Data;
use actix_web::{ use actix_web::{
HttpRequest, HttpResponse, HttpRequest, HttpResponse,
@@ -1242,6 +1241,7 @@ mod tests {
} }
impl FakeFileSystem { impl FakeFileSystem {
#[allow(dead_code)]
fn with_error() -> FakeFileSystem { fn with_error() -> FakeFileSystem {
FakeFileSystem { FakeFileSystem {
files: HashMap::new(), files: HashMap::new(),

View File

@@ -1,3 +1,5 @@
#![allow(clippy::too_many_arguments)]
#[macro_use] #[macro_use]
extern crate diesel; extern crate diesel;

View File

@@ -32,6 +32,7 @@ impl Library {
/// Resolve a library-relative path into an absolute `PathBuf` under the /// Resolve a library-relative path into an absolute `PathBuf` under the
/// library root. Does not validate traversal — use `is_valid_full_path` /// library root. Does not validate traversal — use `is_valid_full_path`
/// for untrusted input. /// for untrusted input.
#[allow(dead_code)]
pub fn resolve(&self, rel_path: &str) -> PathBuf { pub fn resolve(&self, rel_path: &str) -> PathBuf {
Path::new(&self.root_path).join(rel_path) Path::new(&self.root_path).join(rel_path)
} }
@@ -39,6 +40,7 @@ impl Library {
/// Inverse of `resolve`: given an absolute path under this library's /// Inverse of `resolve`: given an absolute path under this library's
/// root, return the root-relative portion. Returns `None` if the path /// root, return the root-relative portion. Returns `None` if the path
/// is not under the library. /// is not under the library.
#[allow(dead_code)]
pub fn strip_root(&self, abs_path: &Path) -> Option<String> { pub fn strip_root(&self, abs_path: &Path) -> Option<String> {
abs_path abs_path
.strip_prefix(&self.root_path) .strip_prefix(&self.root_path)
@@ -99,10 +101,7 @@ pub fn seed_or_patch_from_env(conn: &mut SqliteConnection, base_path: &str) {
// If no rows exist at all (e.g. table created outside the seeded migration), // If no rows exist at all (e.g. table created outside the seeded migration),
// insert a primary library pointing at BASE_PATH. // insert a primary library pointing at BASE_PATH.
let total: i64 = libraries::table let total: i64 = libraries::table.count().get_result(conn).unwrap_or(0);
.count()
.get_result(conn)
.unwrap_or(0);
if total == 0 { if total == 0 {
let now = Utc::now().timestamp(); let now = Utc::now().timestamp();
let result = diesel::insert_into(libraries::table) let result = diesel::insert_into(libraries::table)
@@ -113,7 +112,10 @@ pub fn seed_or_patch_from_env(conn: &mut SqliteConnection, base_path: &str) {
}) })
.execute(conn); .execute(conn);
match result { match result {
Ok(_) => info!("Seeded primary library 'main' with BASE_PATH='{}'", base_path), Ok(_) => info!(
"Seeded primary library 'main' with BASE_PATH='{}'",
base_path
),
Err(e) => warn!("Failed to seed primary library: {:?}", e), Err(e) => warn!("Failed to seed primary library: {:?}", e),
} }
} }

View File

@@ -1,3 +1,5 @@
#![allow(clippy::too_many_arguments)]
#[macro_use] #[macro_use]
extern crate diesel; extern crate diesel;
extern crate rayon; extern crate rayon;

View File

@@ -539,10 +539,7 @@ pub async fn list_memories(
// Resolve the optional library filter. Unknown values are a 400; None // Resolve the optional library filter. Unknown values are a 400; None
// means "all libraries" — currently equivalent to the primary library // means "all libraries" — currently equivalent to the primary library
// while only one is configured. // while only one is configured.
let library = match crate::libraries::resolve_library_param( let library = match crate::libraries::resolve_library_param(&app_state, q.library.as_deref()) {
&app_state,
q.library.as_deref(),
) {
Ok(lib) => lib, Ok(lib) => lib,
Err(msg) => { Err(msg) => {
warn!("Rejecting /memories request: {}", msg); warn!("Rejecting /memories request: {}", msg);
@@ -823,7 +820,7 @@ mod tests {
// Verify timestamp is within expected range (should be around 1422489671) // Verify timestamp is within expected range (should be around 1422489671)
let timestamp = date_time.timestamp(); let timestamp = date_time.timestamp();
assert!(timestamp >= 1422480000 && timestamp <= 1422576000); // Jan 28-29, 2015 assert!((1422480000..=1422576000).contains(&timestamp)); // Jan 28-29, 2015
} }
#[test] #[test]
@@ -841,7 +838,7 @@ mod tests {
// Verify timestamp is within expected range (should be around 1422489664) // Verify timestamp is within expected range (should be around 1422489664)
let timestamp = date_time.timestamp(); let timestamp = date_time.timestamp();
assert!(timestamp >= 1422480000 && timestamp <= 1422576000); // Jan 28-29, 2015 assert!((1422480000..=1422576000).contains(&timestamp)); // Jan 28-29, 2015
} }
#[test] #[test]
@@ -1120,7 +1117,7 @@ mod tests {
.and_utc() .and_utc()
.timestamp(); .timestamp();
let mut memories_with_dates = vec![ let mut memories_with_dates = [
( (
MemoryItem { MemoryItem {
path: "photo1.jpg".to_string(), path: "photo1.jpg".to_string(),

View File

@@ -342,12 +342,14 @@ pub trait TagDao: Send + Sync {
exclude_tag_ids: Vec<i32>, exclude_tag_ids: Vec<i32>,
context: &opentelemetry::Context, context: &opentelemetry::Context,
) -> anyhow::Result<Vec<FileWithTagCount>>; ) -> anyhow::Result<Vec<FileWithTagCount>>;
#[allow(dead_code)]
fn update_photo_name( fn update_photo_name(
&mut self, &mut self,
old_name: &str, old_name: &str,
new_name: &str, new_name: &str,
context: &opentelemetry::Context, context: &opentelemetry::Context,
) -> anyhow::Result<()>; ) -> anyhow::Result<()>;
#[allow(dead_code)]
fn get_all_photo_names( fn get_all_photo_names(
&mut self, &mut self,
context: &opentelemetry::Context, context: &opentelemetry::Context,
@@ -364,6 +366,7 @@ pub struct SqliteTagDao {
} }
impl SqliteTagDao { impl SqliteTagDao {
#[allow(dead_code)]
pub(crate) fn new(connection: Arc<Mutex<SqliteConnection>>) -> Self { pub(crate) fn new(connection: Arc<Mutex<SqliteConnection>>) -> Self {
SqliteTagDao { connection } SqliteTagDao { connection }
} }

View File

@@ -14,6 +14,12 @@ pub struct TestUserDao {
pub user_map: RefCell<Vec<User>>, pub user_map: RefCell<Vec<User>>,
} }
impl Default for TestUserDao {
fn default() -> Self {
Self::new()
}
}
impl TestUserDao { impl TestUserDao {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
@@ -71,6 +77,12 @@ pub struct TestPreviewDao {
next_id: StdMutex<i32>, next_id: StdMutex<i32>,
} }
impl Default for TestPreviewDao {
fn default() -> Self {
Self::new()
}
}
impl TestPreviewDao { impl TestPreviewDao {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {

View File

@@ -40,7 +40,10 @@ pub struct Ffmpeg;
pub enum GifType { pub enum GifType {
Overview, Overview,
OverviewVideo { duration: u32 }, #[allow(dead_code)]
OverviewVideo {
duration: u32,
},
} }
impl Ffmpeg { impl Ffmpeg {