Add comprehensive testing for preview clip and status handling

- Implement unit tests for PreviewClipRequest/PreviewStatusRequest serialization and deserialization.
- Add tests for PreviewDao (insert, update, batch retrieval, and status-based queries).
- Extend Actix-web integration tests for `/video/preview/status` endpoint scenarios.
- Introduce in-memory TestPreviewDao for mock database interactions.
- Update README with new config parameters for preview clips.
This commit is contained in:
Cameron
2026-02-26 10:06:21 -05:00
parent 842ed4ed66
commit 0d05033b38
6 changed files with 505 additions and 2 deletions

View File

@@ -1,4 +1,4 @@
# CLAUDE.md
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.

View File

@@ -9,6 +9,7 @@ Upon first run it will generate thumbnails for all images and videos at `BASE_PA
- Video streaming with HLS
- Tag-based organization
- Memories API for browsing photos by date
- **Video Wall** - Auto-generated short preview clips for videos, served via a grid view
- **AI-Powered Photo Insights** - Generate contextual insights from photos using LLMs
- **RAG-based Context Retrieval** - Semantic search over daily conversation summaries
- **Automatic Daily Summaries** - LLM-generated summaries of daily conversations with embeddings
@@ -22,10 +23,12 @@ You must have `ffmpeg` installed for streaming video and generating video thumbn
- `BASE_PATH` is the root from which you want to serve images and videos
- `THUMBNAILS` is a path where generated thumbnails should be stored
- `VIDEO_PATH` is a path where HLS playlists and video parts should be stored
- `GIFS_DIRECTORY` is a path where generated video GIF thumbnails should be stored
- `BIND_URL` is the url and port to bind to (typically your own IP address)
- `SECRET_KEY` is the *hopefully* random string to sign Tokens with
- `RUST_LOG` is one of `off, error, warn, info, debug, trace`, from least to most noisy [error is default]
- `EXCLUDED_DIRS` is a comma separated list of directories to exclude from the Memories API
- `PREVIEW_CLIPS_DIRECTORY` (optional) is a path where generated video preview clips should be stored [default: `preview_clips`]
- `WATCH_QUICK_INTERVAL_SECONDS` (optional) is the interval in seconds for quick file scans [default: 60]
- `WATCH_FULL_INTERVAL_SECONDS` (optional) is the interval in seconds for full file scans [default: 3600]

View File

@@ -442,4 +442,54 @@ mod tests {
}
}
}
#[test]
fn test_preview_clip_request_deserialize() {
use super::PreviewClipRequest;
let json = r#"{"path":"photos/2024/video.mp4"}"#;
let req: PreviewClipRequest = serde_json::from_str(json).unwrap();
assert_eq!(req.path, "photos/2024/video.mp4");
}
#[test]
fn test_preview_status_request_deserialize() {
use super::PreviewStatusRequest;
let json = r#"{"paths":["a/one.mp4","b/two.mp4","c/three.mp4"]}"#;
let req: PreviewStatusRequest = serde_json::from_str(json).unwrap();
assert_eq!(req.paths.len(), 3);
assert_eq!(req.paths[0], "a/one.mp4");
assert_eq!(req.paths[2], "c/three.mp4");
}
#[test]
fn test_preview_status_response_serialize() {
use super::{PreviewStatusItem, PreviewStatusResponse};
let response = PreviewStatusResponse {
previews: vec![
PreviewStatusItem {
path: "a/one.mp4".to_string(),
status: "complete".to_string(),
preview_url: Some("/video/preview?path=a%2Fone.mp4".to_string()),
},
PreviewStatusItem {
path: "b/two.mp4".to_string(),
status: "pending".to_string(),
preview_url: None,
},
],
};
let json = serde_json::to_value(&response).unwrap();
let previews = json["previews"].as_array().unwrap();
assert_eq!(previews.len(), 2);
// Complete item should have preview_url
assert_eq!(previews[0]["status"], "complete");
assert!(previews[0]["preview_url"].is_string());
// Pending item should not have preview_url (skip_serializing_if)
assert_eq!(previews[1]["status"], "pending");
assert!(previews[1].get("preview_url").is_none());
}
}

View File

@@ -60,6 +60,13 @@ impl SqlitePreviewDao {
connection: Arc::new(Mutex::new(connect())),
}
}
#[cfg(test)]
pub fn from_connection(conn: SqliteConnection) -> Self {
SqlitePreviewDao {
connection: Arc::new(Mutex::new(conn)),
}
}
}
impl PreviewDao for SqlitePreviewDao {
@@ -181,3 +188,167 @@ impl PreviewDao for SqlitePreviewDao {
.map_err(|_| DbError::new(DbErrorKind::QueryError))
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::database::test::in_memory_db_connection;
fn setup_dao() -> SqlitePreviewDao {
SqlitePreviewDao::from_connection(in_memory_db_connection())
}
fn ctx() -> opentelemetry::Context {
opentelemetry::Context::new()
}
#[test]
fn test_insert_and_get_preview() {
let mut dao = setup_dao();
let ctx = ctx();
dao.insert_preview(&ctx, "photos/video.mp4", "pending")
.unwrap();
let result = dao.get_preview(&ctx, "photos/video.mp4").unwrap();
assert!(result.is_some());
let clip = result.unwrap();
assert_eq!(clip.file_path, "photos/video.mp4");
assert_eq!(clip.status, "pending");
assert!(clip.duration_seconds.is_none());
assert!(clip.file_size_bytes.is_none());
assert!(clip.error_message.is_none());
}
#[test]
fn test_insert_duplicate_ignored() {
let mut dao = setup_dao();
let ctx = ctx();
dao.insert_preview(&ctx, "photos/video.mp4", "pending")
.unwrap();
// Second insert with same path should not error (INSERT OR IGNORE)
dao.insert_preview(&ctx, "photos/video.mp4", "processing")
.unwrap();
// Status should remain "pending" from the first insert
let clip = dao
.get_preview(&ctx, "photos/video.mp4")
.unwrap()
.unwrap();
assert_eq!(clip.status, "pending");
}
#[test]
fn test_update_status_to_complete() {
let mut dao = setup_dao();
let ctx = ctx();
dao.insert_preview(&ctx, "photos/video.mp4", "pending")
.unwrap();
dao.update_status(
&ctx,
"photos/video.mp4",
"complete",
Some(9.5),
Some(1024000),
None,
)
.unwrap();
let clip = dao
.get_preview(&ctx, "photos/video.mp4")
.unwrap()
.unwrap();
assert_eq!(clip.status, "complete");
assert_eq!(clip.duration_seconds, Some(9.5));
assert_eq!(clip.file_size_bytes, Some(1024000));
assert!(clip.error_message.is_none());
}
#[test]
fn test_update_status_to_failed() {
let mut dao = setup_dao();
let ctx = ctx();
dao.insert_preview(&ctx, "photos/video.mp4", "pending")
.unwrap();
dao.update_status(
&ctx,
"photos/video.mp4",
"failed",
None,
None,
Some("ffmpeg exited with code 1"),
)
.unwrap();
let clip = dao
.get_preview(&ctx, "photos/video.mp4")
.unwrap()
.unwrap();
assert_eq!(clip.status, "failed");
assert_eq!(
clip.error_message.as_deref(),
Some("ffmpeg exited with code 1")
);
}
#[test]
fn test_get_preview_not_found() {
let mut dao = setup_dao();
let ctx = ctx();
let result = dao.get_preview(&ctx, "nonexistent/path.mp4").unwrap();
assert!(result.is_none());
}
#[test]
fn test_get_previews_batch() {
let mut dao = setup_dao();
let ctx = ctx();
dao.insert_preview(&ctx, "a/one.mp4", "complete").unwrap();
dao.insert_preview(&ctx, "b/two.mp4", "pending").unwrap();
dao.insert_preview(&ctx, "c/three.mp4", "failed").unwrap();
// Query only two of the three
let paths = vec!["a/one.mp4".to_string(), "c/three.mp4".to_string()];
let results = dao.get_previews_batch(&ctx, &paths).unwrap();
assert_eq!(results.len(), 2);
let statuses: Vec<&str> = results.iter().map(|c| c.status.as_str()).collect();
assert!(statuses.contains(&"complete"));
assert!(statuses.contains(&"failed"));
}
#[test]
fn test_get_previews_batch_empty_input() {
let mut dao = setup_dao();
let ctx = ctx();
let results = dao.get_previews_batch(&ctx, &[]).unwrap();
assert!(results.is_empty());
}
#[test]
fn test_get_by_status() {
let mut dao = setup_dao();
let ctx = ctx();
dao.insert_preview(&ctx, "a.mp4", "pending").unwrap();
dao.insert_preview(&ctx, "b.mp4", "complete").unwrap();
dao.insert_preview(&ctx, "c.mp4", "pending").unwrap();
dao.insert_preview(&ctx, "d.mp4", "failed").unwrap();
let pending = dao.get_by_status(&ctx, "pending").unwrap();
assert_eq!(pending.len(), 2);
let complete = dao.get_by_status(&ctx, "complete").unwrap();
assert_eq!(complete.len(), 1);
assert_eq!(complete[0].file_path, "b.mp4");
let processing = dao.get_by_status(&ctx, "processing").unwrap();
assert!(processing.is_empty());
}
}

View File

@@ -1679,3 +1679,176 @@ fn process_new_files(
create_thumbnails();
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::data::Claims;
use crate::database::PreviewDao;
use crate::testhelpers::TestPreviewDao;
use actix_web::web::Data;
fn make_token() -> String {
let claims = Claims::valid_user("1".to_string());
jsonwebtoken::encode(
&jsonwebtoken::Header::default(),
&claims,
&jsonwebtoken::EncodingKey::from_secret(b"test_key"),
)
.unwrap()
}
fn make_preview_dao(dao: TestPreviewDao) -> Data<Mutex<Box<dyn PreviewDao>>> {
Data::new(Mutex::new(Box::new(dao) as Box<dyn PreviewDao>))
}
#[actix_rt::test]
async fn test_get_preview_status_returns_pending_for_unknown() {
let dao = TestPreviewDao::new();
let preview_dao = make_preview_dao(dao);
let app_state = Data::new(AppState::test_state());
let token = make_token();
let app = actix_web::test::init_service(
App::new()
.service(get_preview_status)
.app_data(app_state)
.app_data(preview_dao.clone()),
)
.await;
let req = actix_web::test::TestRequest::post()
.uri("/video/preview/status")
.insert_header(("Authorization", format!("Bearer {}", token)))
.set_json(serde_json::json!({"paths": ["photos/new_video.mp4"]}))
.to_request();
let resp = actix_web::test::call_service(&app, req).await;
assert_eq!(resp.status(), 200);
let body: serde_json::Value = actix_web::test::read_body_json(resp).await;
let previews = body["previews"].as_array().unwrap();
assert_eq!(previews.len(), 1);
assert_eq!(previews[0]["status"], "pending");
// Verify the DAO now has a pending record
let mut dao_lock = preview_dao.lock().unwrap();
let ctx = opentelemetry::Context::new();
let clip = dao_lock
.get_preview(&ctx, "photos/new_video.mp4")
.unwrap();
assert!(clip.is_some());
assert_eq!(clip.unwrap().status, "pending");
}
#[actix_rt::test]
async fn test_get_preview_status_returns_complete_with_url() {
let mut dao = TestPreviewDao::new();
let ctx = opentelemetry::Context::new();
dao.insert_preview(&ctx, "photos/done.mp4", "pending")
.unwrap();
dao.update_status(&ctx, "photos/done.mp4", "complete", Some(9.5), Some(500000), None)
.unwrap();
let preview_dao = make_preview_dao(dao);
let app_state = Data::new(AppState::test_state());
let token = make_token();
let app = actix_web::test::init_service(
App::new()
.service(get_preview_status)
.app_data(app_state)
.app_data(preview_dao),
)
.await;
let req = actix_web::test::TestRequest::post()
.uri("/video/preview/status")
.insert_header(("Authorization", format!("Bearer {}", token)))
.set_json(serde_json::json!({"paths": ["photos/done.mp4"]}))
.to_request();
let resp = actix_web::test::call_service(&app, req).await;
assert_eq!(resp.status(), 200);
let body: serde_json::Value = actix_web::test::read_body_json(resp).await;
let previews = body["previews"].as_array().unwrap();
assert_eq!(previews.len(), 1);
assert_eq!(previews[0]["status"], "complete");
assert!(previews[0]["preview_url"].as_str().unwrap().contains("photos%2Fdone.mp4"));
}
#[actix_rt::test]
async fn test_get_preview_status_rejects_over_200_paths() {
let dao = TestPreviewDao::new();
let preview_dao = make_preview_dao(dao);
let app_state = Data::new(AppState::test_state());
let token = make_token();
let app = actix_web::test::init_service(
App::new()
.service(get_preview_status)
.app_data(app_state)
.app_data(preview_dao),
)
.await;
let paths: Vec<String> = (0..201).map(|i| format!("video_{}.mp4", i)).collect();
let req = actix_web::test::TestRequest::post()
.uri("/video/preview/status")
.insert_header(("Authorization", format!("Bearer {}", token)))
.set_json(serde_json::json!({"paths": paths}))
.to_request();
let resp = actix_web::test::call_service(&app, req).await;
assert_eq!(resp.status(), 400);
}
#[actix_rt::test]
async fn test_get_preview_status_mixed_statuses() {
let mut dao = TestPreviewDao::new();
let ctx = opentelemetry::Context::new();
dao.insert_preview(&ctx, "a.mp4", "pending").unwrap();
dao.insert_preview(&ctx, "b.mp4", "pending").unwrap();
dao.update_status(&ctx, "b.mp4", "complete", Some(10.0), Some(100000), None)
.unwrap();
let preview_dao = make_preview_dao(dao);
let app_state = Data::new(AppState::test_state());
let token = make_token();
let app = actix_web::test::init_service(
App::new()
.service(get_preview_status)
.app_data(app_state)
.app_data(preview_dao),
)
.await;
let req = actix_web::test::TestRequest::post()
.uri("/video/preview/status")
.insert_header(("Authorization", format!("Bearer {}", token)))
.set_json(serde_json::json!({"paths": ["a.mp4", "b.mp4", "c.mp4"]}))
.to_request();
let resp = actix_web::test::call_service(&app, req).await;
assert_eq!(resp.status(), 200);
let body: serde_json::Value = actix_web::test::read_body_json(resp).await;
let previews = body["previews"].as_array().unwrap();
assert_eq!(previews.len(), 3);
// a.mp4 is pending
assert_eq!(previews[0]["path"], "a.mp4");
assert_eq!(previews[0]["status"], "pending");
// b.mp4 is complete with URL
assert_eq!(previews[1]["path"], "b.mp4");
assert_eq!(previews[1]["status"], "complete");
assert!(previews[1]["preview_url"].is_string());
// c.mp4 was not found — handler inserts pending
assert_eq!(previews[2]["path"], "c.mp4");
assert_eq!(previews[2]["status"], "pending");
}
}

View File

@@ -3,9 +3,12 @@ use actix_web::{
body::{BoxBody, MessageBody},
};
use crate::database::{UserDao, models::User};
use crate::database::models::{User, VideoPreviewClip};
use crate::database::{DbError, DbErrorKind, PreviewDao, UserDao};
use std::cell::RefCell;
use std::collections::HashMap;
use std::option::Option;
use std::sync::Mutex as StdMutex;
pub struct TestUserDao {
pub user_map: RefCell<Vec<User>>,
@@ -62,3 +65,106 @@ impl BodyReader for HttpResponse<BoxBody> {
std::str::from_utf8(&body).unwrap().to_string()
}
}
pub struct TestPreviewDao {
pub clips: StdMutex<HashMap<String, VideoPreviewClip>>,
next_id: StdMutex<i32>,
}
impl TestPreviewDao {
pub fn new() -> Self {
Self {
clips: StdMutex::new(HashMap::new()),
next_id: StdMutex::new(1),
}
}
}
impl PreviewDao for TestPreviewDao {
fn insert_preview(
&mut self,
_context: &opentelemetry::Context,
file_path_val: &str,
status_val: &str,
) -> Result<(), DbError> {
let mut clips = self.clips.lock().unwrap();
// insert_or_ignore semantics: skip if key already exists
if clips.contains_key(file_path_val) {
return Ok(());
}
let mut id = self.next_id.lock().unwrap();
let now = chrono::Utc::now().to_rfc3339();
clips.insert(
file_path_val.to_string(),
VideoPreviewClip {
id: *id,
file_path: file_path_val.to_string(),
status: status_val.to_string(),
duration_seconds: None,
file_size_bytes: None,
error_message: None,
created_at: now.clone(),
updated_at: now,
},
);
*id += 1;
Ok(())
}
fn update_status(
&mut self,
_context: &opentelemetry::Context,
file_path_val: &str,
status_val: &str,
duration: Option<f32>,
size: Option<i32>,
error: Option<&str>,
) -> Result<(), DbError> {
let mut clips = self.clips.lock().unwrap();
if let Some(clip) = clips.get_mut(file_path_val) {
clip.status = status_val.to_string();
clip.duration_seconds = duration;
clip.file_size_bytes = size;
clip.error_message = error.map(|s| s.to_string());
clip.updated_at = chrono::Utc::now().to_rfc3339();
Ok(())
} else {
Err(DbError {
kind: DbErrorKind::UpdateError,
})
}
}
fn get_preview(
&mut self,
_context: &opentelemetry::Context,
file_path_val: &str,
) -> Result<Option<VideoPreviewClip>, DbError> {
Ok(self.clips.lock().unwrap().get(file_path_val).cloned())
}
fn get_previews_batch(
&mut self,
_context: &opentelemetry::Context,
file_paths: &[String],
) -> Result<Vec<VideoPreviewClip>, DbError> {
let clips = self.clips.lock().unwrap();
Ok(file_paths
.iter()
.filter_map(|p| clips.get(p).cloned())
.collect())
}
fn get_by_status(
&mut self,
_context: &opentelemetry::Context,
status_val: &str,
) -> Result<Vec<VideoPreviewClip>, DbError> {
let clips = self.clips.lock().unwrap();
Ok(clips
.values()
.filter(|c| c.status == status_val)
.cloned()
.collect())
}
}