chore: apply cargo fmt + clippy cleanup across crate

Silence forward-looking dead_code on unused DAO modules, annotate
individual placeholder items, rewrite tautological assert!(true/false)
in token tests as panic! arms, and pick up fmt drift.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
Cameron
2026-04-18 16:50:15 -04:00
committed by cameron
parent a0f3bfab5f
commit c2ee3996be
22 changed files with 106 additions and 67 deletions

View File

@@ -503,7 +503,10 @@ pub async fn export_training_data_handler(
HttpResponse::Ok()
.content_type("application/jsonl")
.insert_header(("Content-Disposition", "attachment; filename=\"training_data.jsonl\""))
.insert_header((
"Content-Disposition",
"attachment; filename=\"training_data.jsonl\"",
))
.body(jsonl)
}
Err(e) => {

View File

@@ -1827,32 +1827,32 @@ Return ONLY the summary, nothing else."#,
// For each linked entity, fetch its facts
for entity_id in entity_ids {
if let Ok(entity) = kdao.get_entity_by_id(cx, entity_id) {
if let Some(e) = entity {
let role = links
.iter()
.find(|l| l.entity_id == entity_id)
.map(|l| l.role.as_str())
.unwrap_or("subject");
output_lines.push(format!(
"Entity: {} ({}, role: {})",
e.name, e.entity_type, role
));
if let Ok(facts) = kdao.get_facts_for_entity(cx, entity_id) {
for f in facts.iter().filter(|f| f.status == "active") {
let obj = if let Some(ref v) = f.object_value {
v.clone()
} else if let Some(oid) = f.object_entity_id {
kdao.get_entity_by_id(cx, oid)
.ok()
.flatten()
.map(|e| format!("{} (entity ID: {})", e.name, e.id))
.unwrap_or_else(|| format!("entity:{}", oid))
} else {
"(unknown)".to_string()
};
output_lines.push(format!(" - {} {}", f.predicate, obj));
}
if let Ok(entity) = kdao.get_entity_by_id(cx, entity_id)
&& let Some(e) = entity
{
let role = links
.iter()
.find(|l| l.entity_id == entity_id)
.map(|l| l.role.as_str())
.unwrap_or("subject");
output_lines.push(format!(
"Entity: {} ({}, role: {})",
e.name, e.entity_type, role
));
if let Ok(facts) = kdao.get_facts_for_entity(cx, entity_id) {
for f in facts.iter().filter(|f| f.status == "active") {
let obj = if let Some(ref v) = f.object_value {
v.clone()
} else if let Some(oid) = f.object_entity_id {
kdao.get_entity_by_id(cx, oid)
.ok()
.flatten()
.map(|e| format!("{} (entity ID: {})", e.name, e.id))
.unwrap_or_else(|| format!("entity:{}", oid))
} else {
"(unknown)".to_string()
};
output_lines.push(format!(" - {} {}", f.predicate, obj));
}
}
}
@@ -1902,8 +1902,8 @@ Return ONLY the summary, nothing else."#,
// those already). Results are appended to the tool response so the
// model can choose to use an existing entity's ID instead.
let similar_entities: Vec<String> = {
use crate::database::EntityFilter;
use crate::database::knowledge_dao::normalize_entity_type;
use crate::database::{EntityFilter, KnowledgeDao};
let normalised_type = normalize_entity_type(&entity_type);
let first_token = name.split_whitespace().next().unwrap_or(&name).to_string();
let filter = EntityFilter {

View File

@@ -120,6 +120,7 @@ impl OllamaClient {
/// Replace the HTTP client with one using a custom request timeout.
/// Useful for slow models where the default 120s may be insufficient.
#[allow(dead_code)]
pub fn with_request_timeout(mut self, secs: u64) -> Self {
self.client = Client::builder()
.connect_timeout(Duration::from_secs(5))
@@ -174,6 +175,7 @@ impl OllamaClient {
}
/// Clear the model list cache for a specific URL or all URLs
#[allow(dead_code)]
pub fn clear_model_cache(url: Option<&str>) {
let mut cache = MODEL_LIST_CACHE.lock().unwrap();
if let Some(url) = url {
@@ -186,6 +188,7 @@ impl OllamaClient {
}
/// Clear the model capabilities cache for a specific URL or all URLs
#[allow(dead_code)]
pub fn clear_capabilities_cache(url: Option<&str>) {
let mut cache = MODEL_CAPABILITIES_CACHE.lock().unwrap();
if let Some(url) = url {
@@ -992,7 +995,6 @@ struct OllamaEmbedResponse {
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn generate_photo_description_prompt_is_concise() {

View File

@@ -68,8 +68,7 @@ fn main() -> anyhow::Result<()> {
.join(", ")
);
let dao: Arc<Mutex<Box<dyn ExifDao>>> =
Arc::new(Mutex::new(Box::new(SqliteExifDao::new())));
let dao: Arc<Mutex<Box<dyn ExifDao>>> = Arc::new(Mutex::new(Box::new(SqliteExifDao::new())));
let ctx = opentelemetry::Context::new();
let mut total_hashed = 0u64;
@@ -98,15 +97,13 @@ fn main() -> anyhow::Result<()> {
.get(&library_id)
.map(|lib| Path::new(&lib.root_path).join(&rel_path));
match abs {
Some(abs_path) if abs_path.exists() => {
match content_hash::compute(&abs_path) {
Ok(id) => (library_id, rel_path, Some(id)),
Err(e) => {
eprintln!("hash error for {}: {:?}", abs_path.display(), e);
(library_id, rel_path, None)
}
Some(abs_path) if abs_path.exists() => match content_hash::compute(&abs_path) {
Ok(id) => (library_id, rel_path, Some(id)),
Err(e) => {
eprintln!("hash error for {}: {:?}", abs_path.display(), e);
(library_id, rel_path, None)
}
}
},
Some(_) => (library_id, rel_path, None), // file missing on disk
None => {
eprintln!("Row refers to unknown library_id {}", library_id);

View File

@@ -53,13 +53,18 @@ pub fn thumbnail_path(thumbs_dir: &Path, hash: &str) -> PathBuf {
/// Hash-keyed HLS output directory: `<video_dir>/<hash[..2]>/<hash>/`.
/// The playlist lives at `playlist.m3u8` inside this directory and its
/// segments are co-located so HLS relative references Just Work.
#[allow(dead_code)]
pub fn hls_dir(video_dir: &Path, hash: &str) -> PathBuf {
let shard = shard_prefix(hash);
video_dir.join(shard).join(hash)
}
fn shard_prefix(hash: &str) -> &str {
let end = hash.char_indices().nth(2).map(|(i, _)| i).unwrap_or(hash.len());
let end = hash
.char_indices()
.nth(2)
.map(|(i, _)| i)
.unwrap_or(hash.len());
&hash[..end]
}

View File

@@ -191,6 +191,7 @@ pub struct ThumbnailRequest {
#[allow(dead_code)] // Part of API contract, may be used in future
pub(crate) format: Option<ThumbnailFormat>,
#[serde(default)]
#[allow(dead_code)] // Part of API contract, may be used in future
pub(crate) shape: Option<ThumbnailShape>,
/// Optional library filter. Accepts a library id (e.g. "1") or name
/// (e.g. "main"). When omitted, defaults to the primary library.
@@ -434,11 +435,8 @@ mod tests {
);
match err.unwrap_err().into_kind() {
ErrorKind::ExpiredSignature => assert!(true),
kind => {
println!("Unexpected error: {:?}", kind);
assert!(false)
}
ErrorKind::ExpiredSignature => {}
kind => panic!("Unexpected error: {:?}", kind),
}
}
@@ -447,11 +445,8 @@ mod tests {
let err = Claims::from_str("uni-֍ՓՓՓՓՓՓՓՓՓՓՓՓՓՓՓ");
match err.unwrap_err().into_kind() {
ErrorKind::InvalidToken => assert!(true),
kind => {
println!("Unexpected error: {:?}", kind);
assert!(false)
}
ErrorKind::InvalidToken => {}
kind => panic!("Unexpected error: {:?}", kind),
}
}

View File

@@ -1,3 +1,5 @@
#![allow(dead_code)]
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use serde::Serialize;

View File

@@ -1,3 +1,5 @@
#![allow(dead_code)]
use chrono::NaiveDate;
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;

View File

@@ -31,6 +31,7 @@ pub trait InsightDao: Sync + Send {
paths: &[String],
) -> Result<Option<PhotoInsight>, DbError>;
#[allow(dead_code)]
fn get_insight_history(
&mut self,
context: &opentelemetry::Context,
@@ -79,6 +80,7 @@ impl SqliteInsightDao {
}
#[cfg(test)]
#[allow(dead_code)]
pub fn from_connection(conn: Arc<Mutex<SqliteConnection>>) -> Self {
SqliteInsightDao { connection: conn }
}

View File

@@ -1,3 +1,5 @@
#![allow(dead_code)]
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::ops::DerefMut;
@@ -230,7 +232,7 @@ impl SqliteKnowledgeDao {
}
fn deserialize_embedding(bytes: &[u8]) -> Result<Vec<f32>, DbError> {
if bytes.len() % 4 != 0 {
if !bytes.len().is_multiple_of(4) {
return Err(DbError::new(DbErrorKind::QueryError));
}
Ok(bytes
@@ -535,7 +537,6 @@ impl KnowledgeDao for SqliteKnowledgeDao {
conn.transaction::<(i64, i64), diesel::result::Error, _>(|conn| {
use schema::entity_facts::dsl as ef;
use schema::entity_photo_links::dsl as epl;
// 1. Re-point facts where source is subject
let facts_updated =

View File

@@ -1,3 +1,5 @@
#![allow(dead_code)]
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use serde::Serialize;

View File

@@ -155,7 +155,9 @@ pub trait FavoriteDao: Sync + Send {
fn add_favorite(&mut self, user_id: i32, favorite_path: &str) -> Result<usize, DbError>;
fn remove_favorite(&mut self, user_id: i32, favorite_path: String);
fn get_favorites(&mut self, user_id: i32) -> Result<Vec<Favorite>, DbError>;
#[allow(dead_code)]
fn update_path(&mut self, old_path: &str, new_path: &str) -> Result<(), DbError>;
#[allow(dead_code)]
fn get_all_paths(&mut self) -> Result<Vec<String>, DbError>;
}
@@ -239,6 +241,7 @@ impl FavoriteDao for SqliteFavoriteDao {
}
}
#[allow(dead_code)]
pub trait ExifDao: Sync + Send {
fn store_exif(
&mut self,
@@ -306,6 +309,7 @@ pub trait ExifDao: Sync + Send {
/// Get all photos with GPS coordinates
/// Returns Vec<(file_path, latitude, longitude, date_taken)>
#[allow(clippy::type_complexity)]
fn get_all_with_gps(
&mut self,
context: &opentelemetry::Context,
@@ -680,7 +684,6 @@ impl ExifDao for SqliteExifDao {
.map_err(|_| DbError::new(DbErrorKind::QueryError))
}
fn get_all_with_gps(
&mut self,
context: &opentelemetry::Context,

View File

@@ -1,3 +1,5 @@
#![allow(dead_code)]
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::ops::DerefMut;

View File

@@ -1,3 +1,5 @@
#![allow(dead_code)]
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use serde::Serialize;

View File

@@ -16,7 +16,6 @@ use crate::file_types;
use crate::geo::{gps_bounding_box, haversine_distance};
use crate::memories::extract_date_from_filename;
use crate::{AppState, create_thumbnails};
use actix_web::dev::ResourcePath;
use actix_web::web::Data;
use actix_web::{
HttpRequest, HttpResponse,
@@ -1242,6 +1241,7 @@ mod tests {
}
impl FakeFileSystem {
#[allow(dead_code)]
fn with_error() -> FakeFileSystem {
FakeFileSystem {
files: HashMap::new(),

View File

@@ -1,3 +1,5 @@
#![allow(clippy::too_many_arguments)]
#[macro_use]
extern crate diesel;

View File

@@ -32,6 +32,7 @@ impl Library {
/// Resolve a library-relative path into an absolute `PathBuf` under the
/// library root. Does not validate traversal — use `is_valid_full_path`
/// for untrusted input.
#[allow(dead_code)]
pub fn resolve(&self, rel_path: &str) -> PathBuf {
Path::new(&self.root_path).join(rel_path)
}
@@ -39,6 +40,7 @@ impl Library {
/// Inverse of `resolve`: given an absolute path under this library's
/// root, return the root-relative portion. Returns `None` if the path
/// is not under the library.
#[allow(dead_code)]
pub fn strip_root(&self, abs_path: &Path) -> Option<String> {
abs_path
.strip_prefix(&self.root_path)
@@ -99,10 +101,7 @@ pub fn seed_or_patch_from_env(conn: &mut SqliteConnection, base_path: &str) {
// If no rows exist at all (e.g. table created outside the seeded migration),
// insert a primary library pointing at BASE_PATH.
let total: i64 = libraries::table
.count()
.get_result(conn)
.unwrap_or(0);
let total: i64 = libraries::table.count().get_result(conn).unwrap_or(0);
if total == 0 {
let now = Utc::now().timestamp();
let result = diesel::insert_into(libraries::table)
@@ -113,7 +112,10 @@ pub fn seed_or_patch_from_env(conn: &mut SqliteConnection, base_path: &str) {
})
.execute(conn);
match result {
Ok(_) => info!("Seeded primary library 'main' with BASE_PATH='{}'", base_path),
Ok(_) => info!(
"Seeded primary library 'main' with BASE_PATH='{}'",
base_path
),
Err(e) => warn!("Failed to seed primary library: {:?}", e),
}
}

View File

@@ -1,3 +1,5 @@
#![allow(clippy::too_many_arguments)]
#[macro_use]
extern crate diesel;
extern crate rayon;

View File

@@ -539,10 +539,7 @@ pub async fn list_memories(
// Resolve the optional library filter. Unknown values are a 400; None
// means "all libraries" — currently equivalent to the primary library
// while only one is configured.
let library = match crate::libraries::resolve_library_param(
&app_state,
q.library.as_deref(),
) {
let library = match crate::libraries::resolve_library_param(&app_state, q.library.as_deref()) {
Ok(lib) => lib,
Err(msg) => {
warn!("Rejecting /memories request: {}", msg);
@@ -823,7 +820,7 @@ mod tests {
// Verify timestamp is within expected range (should be around 1422489671)
let timestamp = date_time.timestamp();
assert!(timestamp >= 1422480000 && timestamp <= 1422576000); // Jan 28-29, 2015
assert!((1422480000..=1422576000).contains(&timestamp)); // Jan 28-29, 2015
}
#[test]
@@ -841,7 +838,7 @@ mod tests {
// Verify timestamp is within expected range (should be around 1422489664)
let timestamp = date_time.timestamp();
assert!(timestamp >= 1422480000 && timestamp <= 1422576000); // Jan 28-29, 2015
assert!((1422480000..=1422576000).contains(&timestamp)); // Jan 28-29, 2015
}
#[test]
@@ -1120,7 +1117,7 @@ mod tests {
.and_utc()
.timestamp();
let mut memories_with_dates = vec![
let mut memories_with_dates = [
(
MemoryItem {
path: "photo1.jpg".to_string(),

View File

@@ -342,12 +342,14 @@ pub trait TagDao: Send + Sync {
exclude_tag_ids: Vec<i32>,
context: &opentelemetry::Context,
) -> anyhow::Result<Vec<FileWithTagCount>>;
#[allow(dead_code)]
fn update_photo_name(
&mut self,
old_name: &str,
new_name: &str,
context: &opentelemetry::Context,
) -> anyhow::Result<()>;
#[allow(dead_code)]
fn get_all_photo_names(
&mut self,
context: &opentelemetry::Context,
@@ -364,6 +366,7 @@ pub struct SqliteTagDao {
}
impl SqliteTagDao {
#[allow(dead_code)]
pub(crate) fn new(connection: Arc<Mutex<SqliteConnection>>) -> Self {
SqliteTagDao { connection }
}

View File

@@ -14,6 +14,12 @@ pub struct TestUserDao {
pub user_map: RefCell<Vec<User>>,
}
impl Default for TestUserDao {
fn default() -> Self {
Self::new()
}
}
impl TestUserDao {
pub fn new() -> Self {
Self {
@@ -71,6 +77,12 @@ pub struct TestPreviewDao {
next_id: StdMutex<i32>,
}
impl Default for TestPreviewDao {
fn default() -> Self {
Self::new()
}
}
impl TestPreviewDao {
pub fn new() -> Self {
Self {

View File

@@ -40,7 +40,10 @@ pub struct Ffmpeg;
pub enum GifType {
Overview,
OverviewVideo { duration: u32 },
#[allow(dead_code)]
OverviewVideo {
duration: u32,
},
}
impl Ffmpeg {