Files
ImageApi/src/main.rs
Cameron Cordes 9f8a69fc6d Split main.rs: extract watcher loop into src/watcher.rs
main.rs drops from 1200 → 346 lines (90% smaller than the pre-branch
3542). What's left is the startup wiring it was always meant to be:
.env, migrations, AppState construction, route registration, server
bind. The four background-loop functions move into src/watcher.rs:

- watch_files (310 lines) — quick/full scan tick, per-library probe,
  backfill drain dispatch, missing-file scan, back-ref refresh,
  orphan GC.
- process_new_files (351 lines) — file walk → EXIF write →
  face-candidate build → HLS / preview-clip queueing →
  reconciliation. The "biggest untested chunk" from the earlier
  audit.
- cleanup_orphaned_playlists (167 lines) — separate slower-tick
  thread.
- playlist_needs_generation — small mtime-comparison helper.

Plus 4 unit tests for playlist_needs_generation (covers missing
playlist, newer playlist, newer video, video-missing-metadata
fallback).

main.rs's imports correspondingly shrink — Addr, HashSet, WalkDir,
Utc, InsertImageExif, and the bulk of video::actors all leave with
the watcher. CLAUDE.md updated to reflect the new module layout
(layered architecture box + module map for the face-detection
section).

cargo test --bin image-api: 329 passing (no regression).

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-12 12:54:37 -04:00

346 lines
14 KiB
Rust

#![allow(clippy::too_many_arguments)]
#[macro_use]
extern crate diesel;
extern crate rayon;
use actix_web::web::Data;
use actix_web_prom::PrometheusMetricsBuilder;
use diesel_migrations::{EmbeddedMigrations, MigrationHarness, embed_migrations};
use std::collections::HashMap;
use std::env;
use std::sync::{Arc, Mutex};
use actix_cors::Cors;
use actix_governor::{Governor, GovernorConfigBuilder};
use actix_multipart as mp;
use actix_web::{App, HttpResponse, HttpServer, middleware, web};
use diesel::sqlite::Sqlite;
use std::error::Error;
use crate::ai::InsightGenerator;
use crate::auth::login;
use crate::data::*;
use crate::database::*;
use crate::files::{RealFileSystem, move_file};
use crate::service::ServiceBuilder;
use crate::state::AppState;
use crate::tags::*;
use crate::video::actors::ScanDirectoryMessage;
use log::{error, info};
mod ai;
mod auth;
mod backfill;
mod content_hash;
mod data;
mod database;
mod date_resolver;
mod duplicates;
mod error;
mod exif;
mod face_watch;
mod faces;
mod file_scan;
mod file_types;
mod files;
mod geo;
mod handlers;
mod libraries;
mod library_maintenance;
mod perceptual_hash;
mod state;
mod tags;
mod thumbnails;
mod utils;
mod video;
mod watcher;
mod knowledge;
mod memories;
mod otel;
mod personas;
mod service;
#[cfg(test)]
mod testhelpers;
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!();
fn main() -> std::io::Result<()> {
if let Err(err) = dotenv::dotenv() {
println!("Error parsing .env {:?}", err);
}
run_migrations(&mut connect()).expect("Failed to run migrations");
let system = actix::System::new();
system.block_on(async {
// Just use basic logger when running a non-release build
#[cfg(debug_assertions)]
{
env_logger::init();
}
#[cfg(not(debug_assertions))]
{
otel::init_logs();
otel::init_tracing();
}
// AppState construction loads (and seeds if needed) the libraries
// table; we use that list to drive the initial thumbnail sweep.
let app_data = Data::new(AppState::default());
// Kick thumbnail generation onto a background thread so the HTTP
// server can accept traffic while large libraries are backfilling.
// Existing thumbs are re-used (exists() check inside the walk),
// so missed files are filled in over successive scans.
{
let libs = app_data.libraries.clone();
let excluded = app_data.excluded_dirs.clone();
std::thread::spawn(move || {
thumbnails::create_thumbnails(&libs, &excluded);
});
}
// generate_video_gifs().await;
let labels = HashMap::new();
let prometheus = PrometheusMetricsBuilder::new("api")
.const_labels(labels)
.build()
.expect("Unable to build prometheus metrics middleware");
prometheus
.registry
.register(Box::new(thumbnails::IMAGE_GAUGE.clone()))
.unwrap();
prometheus
.registry
.register(Box::new(thumbnails::VIDEO_GAUGE.clone()))
.unwrap();
let app_state = app_data.clone();
for lib in &app_state.libraries {
app_state.playlist_manager.do_send(ScanDirectoryMessage {
directory: lib.root_path.clone(),
});
}
// Start file watcher with playlist manager and preview generator
let playlist_mgr_for_watcher = app_state.playlist_manager.as_ref().clone();
let preview_gen_for_watcher = app_state.preview_clip_generator.as_ref().clone();
watcher::watch_files(
app_state.libraries.clone(),
playlist_mgr_for_watcher,
preview_gen_for_watcher,
app_state.face_client.clone(),
app_state.excluded_dirs.clone(),
app_state.library_health.clone(),
);
// Start orphaned playlist cleanup job. Multi-library aware: walks
// every configured library when looking for the source video, and
// skips the whole cycle while any library is stale (a missing
// source is indistinguishable from a transiently-unmounted share).
watcher::cleanup_orphaned_playlists(
app_state.libraries.clone(),
app_state.excluded_dirs.clone(),
app_state.library_health.clone(),
);
// Spawn background job to generate daily conversation summaries
{
use crate::ai::generate_daily_summaries;
use crate::database::{DailySummaryDao, SqliteDailySummaryDao};
use chrono::NaiveDate;
// Configure date range for summary generation
// Default: August 2024 ±30 days (July 1 - September 30, 2024)
// To expand: change start_date and end_date
let start_date = Some(NaiveDate::from_ymd_opt(2015, 10, 1).unwrap());
let end_date = Some(NaiveDate::from_ymd_opt(2020, 1, 1).unwrap());
// let contacts_to_summarize = vec!["Domenique", "Zach", "Paul"]; // Add more contacts as needed
let contacts_to_summarize = vec![]; // Add more contacts as needed
let ollama = app_state.ollama.clone();
let sms_client = app_state.sms_client.clone();
for contact in contacts_to_summarize {
let ollama_clone = ollama.clone();
let sms_client_clone = sms_client.clone();
let summary_dao: Arc<Mutex<Box<dyn DailySummaryDao>>> =
Arc::new(Mutex::new(Box::new(SqliteDailySummaryDao::new())));
let start = start_date;
let end = end_date;
tokio::spawn(async move {
info!("Starting daily summary generation for {}", contact);
if let Err(e) = generate_daily_summaries(
contact,
start,
end,
&ollama_clone,
&sms_client_clone,
summary_dao,
)
.await
{
error!("Daily summary generation failed for {}: {:?}", contact, e);
} else {
info!("Daily summary generation completed for {}", contact);
}
});
}
}
HttpServer::new(move || {
let user_dao = SqliteUserDao::new();
let favorites_dao = SqliteFavoriteDao::new();
let tag_dao = SqliteTagDao::default();
let exif_dao = SqliteExifDao::new();
let insight_dao = SqliteInsightDao::new();
let preview_dao = SqlitePreviewDao::new();
let face_dao = faces::SqliteFaceDao::new();
let cors = Cors::default()
.allowed_origin_fn(|origin, _req_head| {
// Allow all origins in development, or check against CORS_ALLOWED_ORIGINS env var
if let Ok(allowed_origins) = env::var("CORS_ALLOWED_ORIGINS") {
allowed_origins
.split(',')
.any(|allowed| origin.as_bytes() == allowed.trim().as_bytes())
} else {
// Default: allow all origins if not configured
true
}
})
.allowed_methods(vec!["GET", "POST", "PUT", "DELETE", "OPTIONS"])
.allowed_headers(vec![
actix_web::http::header::AUTHORIZATION,
actix_web::http::header::ACCEPT,
actix_web::http::header::CONTENT_TYPE,
])
.supports_credentials()
.max_age(3600);
// Configure rate limiting for login endpoint (2 requests/sec, burst of 5)
let governor_conf = GovernorConfigBuilder::default()
.per_second(2)
.burst_size(5)
.finish()
.unwrap();
App::new()
.wrap(middleware::Logger::default())
.wrap(cors)
.service(
web::resource("/login")
.wrap(Governor::new(&governor_conf))
.route(web::post().to(login::<SqliteUserDao>)),
)
.service(
web::resource("/photos")
.route(web::get().to(files::list_photos::<SqliteTagDao, RealFileSystem>)),
)
.service(
web::resource("/photos/gps-summary")
.route(web::get().to(files::get_gps_summary)),
)
.service(
web::resource("/photos/exif").route(web::get().to(files::list_exif_summary)),
)
.service(web::resource("/file/move").post(move_file::<RealFileSystem>))
.service(handlers::image::get_image)
.service(handlers::image::upload_image)
.service(handlers::video::generate_video)
.service(handlers::video::stream_video)
.service(handlers::video::get_video_preview)
.service(handlers::video::get_preview_status)
.service(handlers::video::get_video_part)
.service(handlers::favorites::favorites)
.service(handlers::favorites::put_add_favorite)
.service(handlers::favorites::delete_favorite)
.service(handlers::image::get_file_metadata)
.service(handlers::image::set_image_gps)
.service(handlers::image::set_image_date)
.service(handlers::image::clear_image_date)
.service(handlers::image::get_full_exif)
.service(memories::list_memories)
.service(ai::generate_insight_handler)
.service(ai::generate_agentic_insight_handler)
.service(ai::get_insight_handler)
.service(ai::delete_insight_handler)
.service(ai::get_all_insights_handler)
.service(ai::get_available_models_handler)
.service(ai::get_openrouter_models_handler)
.service(ai::chat_turn_handler)
.service(ai::chat_stream_handler)
.service(ai::chat_history_handler)
.service(ai::chat_rewind_handler)
.service(ai::rate_insight_handler)
.service(ai::export_training_data_handler)
.service(libraries::list_libraries)
.add_feature(add_tag_services::<_, SqliteTagDao>)
.add_feature(knowledge::add_knowledge_services::<_, SqliteKnowledgeDao>)
.add_feature(personas::add_persona_services)
.add_feature(faces::add_face_services::<_, faces::SqliteFaceDao>)
.add_feature(duplicates::add_duplicate_services)
.app_data(app_data.clone())
.app_data::<Data<RealFileSystem>>(Data::new(RealFileSystem::new(
app_data.base_path.clone(),
)))
.app_data::<Data<Mutex<SqliteUserDao>>>(Data::new(Mutex::new(user_dao)))
.app_data::<Data<Mutex<Box<dyn FavoriteDao>>>>(Data::new(Mutex::new(Box::new(
favorites_dao,
))))
.app_data::<Data<Mutex<SqliteTagDao>>>(Data::new(Mutex::new(tag_dao)))
.app_data::<Data<Mutex<Box<dyn ExifDao>>>>(Data::new(Mutex::new(Box::new(
exif_dao,
))))
.app_data::<Data<Mutex<Box<dyn InsightDao>>>>(Data::new(Mutex::new(Box::new(
insight_dao,
))))
.app_data::<Data<Mutex<Box<dyn PreviewDao>>>>(Data::new(Mutex::new(Box::new(
preview_dao,
))))
.app_data::<Data<Mutex<SqliteKnowledgeDao>>>(Data::new(Mutex::new(
SqliteKnowledgeDao::new(),
)))
.app_data::<Data<Mutex<Box<dyn database::PersonaDao>>>>(Data::new(Mutex::new(
Box::new(database::SqlitePersonaDao::new()),
)))
.app_data::<Data<Mutex<faces::SqliteFaceDao>>>(Data::new(Mutex::new(face_dao)))
.app_data::<Data<crate::ai::face_client::FaceClient>>(Data::new(
app_data.face_client.clone(),
))
.app_data(mp::form::MultipartFormConfig::default().total_limit(1024 * 1024 * 1024)) // 1GB upload limit
.app_data(web::JsonConfig::default().error_handler(|err, req| {
let detail = err.to_string();
log::warn!(
"JSON parse error on {} {}: {}",
req.method(),
req.uri(),
detail
);
let response =
HttpResponse::BadRequest().json(serde_json::json!({"error": detail}));
actix_web::error::InternalError::from_response(err, response).into()
}))
.app_data::<Data<InsightGenerator>>(Data::new(app_data.insight_generator.clone()))
.wrap(prometheus.clone())
})
.bind(dotenv::var("BIND_URL").unwrap())?
.bind("localhost:8088")?
.run()
.await
})
}
fn run_migrations(
connection: &mut impl MigrationHarness<Sqlite>,
) -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
connection.run_pending_migrations(MIGRATIONS)?;
Ok(())
}