Add VideoWall feature: server-side preview clip generation and mobile grid view

Backend (Rust/Actix-web):
- Add video_preview_clips table and PreviewDao for tracking preview generation
- Add ffmpeg preview clip generator: 10 equally-spaced 1s segments at 480p with CUDA NVENC auto-detection
- Add PreviewClipGenerator actor with semaphore-limited concurrent processing
- Add GET /video/preview and POST /video/preview/status endpoints
- Extend file watcher to detect and queue previews for new videos
- Use relative paths consistently for DB storage (matching EXIF convention)

Frontend (React Native/Expo):
- Add VideoWall grid view with 2-3 column layout of looping preview clips
- Add VideoWallItem component with ActiveVideoPlayer sub-component for lifecycle management
- Add useVideoWall hook for batch status polling with 5s refresh
- Add navigation button in grid header (visible when videos exist)
- Use TextureView surface type to fix Android z-ordering issues
- Optimize memory: players only mount while visible via FlatList windowSize
- Configure ExoPlayer buffer options and caching for short clips
- Tap to toggle audio focus, long press to open in full viewer

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Cameron
2026-02-25 19:40:17 -05:00
parent 7a0da1ab4a
commit 19c099360e
19 changed files with 1691 additions and 12 deletions

View File

@@ -1,5 +1,7 @@
use crate::database::PreviewDao;
use crate::is_video;
use crate::otel::global_tracer;
use crate::video::ffmpeg::generate_preview_clip;
use actix::prelude::*;
use futures::TryFutureExt;
use log::{debug, error, info, trace, warn};
@@ -8,7 +10,7 @@ use opentelemetry::trace::{Span, Status, Tracer};
use std::io::Result;
use std::path::{Path, PathBuf};
use std::process::{Child, Command, ExitStatus, Stdio};
use std::sync::Arc;
use std::sync::{Arc, Mutex};
use tokio::sync::Semaphore;
use walkdir::{DirEntry, WalkDir};
// ffmpeg -i test.mp4 -c:v h264 -flags +cgop -g 30 -hls_time 3 out.m3u8
@@ -484,3 +486,118 @@ impl Handler<GeneratePlaylistMessage> for PlaylistGenerator {
})
}
}
#[derive(Message)]
#[rtype(result = "()")]
pub struct GeneratePreviewClipMessage {
pub video_path: String,
}
pub struct PreviewClipGenerator {
semaphore: Arc<Semaphore>,
preview_clips_dir: String,
base_path: String,
preview_dao: Arc<Mutex<Box<dyn PreviewDao>>>,
}
impl PreviewClipGenerator {
pub fn new(
preview_clips_dir: String,
base_path: String,
preview_dao: Arc<Mutex<Box<dyn PreviewDao>>>,
) -> Self {
PreviewClipGenerator {
semaphore: Arc::new(Semaphore::new(2)),
preview_clips_dir,
base_path,
preview_dao,
}
}
}
impl Actor for PreviewClipGenerator {
type Context = Context<Self>;
}
impl Handler<GeneratePreviewClipMessage> for PreviewClipGenerator {
type Result = ResponseFuture<()>;
fn handle(
&mut self,
msg: GeneratePreviewClipMessage,
_ctx: &mut Self::Context,
) -> Self::Result {
let semaphore = self.semaphore.clone();
let preview_clips_dir = self.preview_clips_dir.clone();
let base_path = self.base_path.clone();
let preview_dao = self.preview_dao.clone();
let video_path = msg.video_path;
Box::pin(async move {
let permit = semaphore
.acquire_owned()
.await
.expect("Unable to acquire preview semaphore");
// Compute relative path (from BASE_PATH) for DB operations, consistent with EXIF convention
let relative_path = video_path
.strip_prefix(&base_path)
.unwrap_or(&video_path)
.trim_start_matches(['/', '\\'])
.to_string();
// Update status to processing
{
let otel_ctx = opentelemetry::Context::current();
let mut dao = preview_dao.lock().expect("Unable to lock PreviewDao");
let _ = dao.update_status(&otel_ctx, &relative_path, "processing", None, None, None);
}
// Compute output path: join preview_clips_dir with relative path, change ext to .mp4
let output_path = PathBuf::from(&preview_clips_dir)
.join(&relative_path)
.with_extension("mp4");
let output_str = output_path.to_string_lossy().to_string();
let video_path_owned = video_path.clone();
let relative_path_owned = relative_path.clone();
tokio::spawn(async move {
match generate_preview_clip(&video_path_owned, &output_str).await {
Ok((duration, size)) => {
info!(
"Preview clip complete for '{}' ({:.1}s, {} bytes)",
relative_path_owned, duration, size
);
let otel_ctx = opentelemetry::Context::current();
let mut dao = preview_dao.lock().expect("Unable to lock PreviewDao");
let _ = dao.update_status(
&otel_ctx,
&relative_path_owned,
"complete",
Some(duration as f32),
Some(size as i32),
None,
);
}
Err(e) => {
error!(
"Failed to generate preview clip for '{}': {}",
relative_path_owned, e
);
let otel_ctx = opentelemetry::Context::current();
let mut dao = preview_dao.lock().expect("Unable to lock PreviewDao");
let _ = dao.update_status(
&otel_ctx,
&relative_path_owned,
"failed",
None,
None,
Some(&e.to_string()),
);
}
}
drop(permit);
});
})
}
}

View File

@@ -2,9 +2,40 @@ use futures::TryFutureExt;
use log::{debug, error, info, warn};
use std::io::Result;
use std::process::{Output, Stdio};
use std::sync::OnceLock;
use std::time::Instant;
use tokio::process::Command;
static NVENC_AVAILABLE: OnceLock<bool> = OnceLock::new();
/// Check if NVIDIA NVENC hardware encoder is available via ffmpeg.
async fn check_nvenc_available() -> bool {
Command::new("ffmpeg")
.args(["-hide_banner", "-encoders"])
.output()
.await
.map(|out| {
let stdout = String::from_utf8_lossy(&out.stdout);
stdout.contains("h264_nvenc")
})
.unwrap_or(false)
}
/// Returns whether NVENC is available, caching the result after first check.
async fn is_nvenc_available() -> bool {
if let Some(&available) = NVENC_AVAILABLE.get() {
return available;
}
let available = check_nvenc_available().await;
let _ = NVENC_AVAILABLE.set(available);
if available {
info!("CUDA NVENC hardware acceleration detected and enabled for preview clips");
} else {
info!("NVENC not available, using CPU encoding for preview clips");
}
available
}
pub struct Ffmpeg;
pub enum GifType {
@@ -152,7 +183,7 @@ impl Ffmpeg {
Ok(output_file.to_string())
}
async fn create_gif_from_frames(&self, frame_base_dir: &str, output_file: &str) -> Result<i32> {
pub async fn create_gif_from_frames(&self, frame_base_dir: &str, output_file: &str) -> Result<i32> {
let output = Command::new("ffmpeg")
.arg("-y")
.args(["-framerate", "4"])
@@ -183,3 +214,114 @@ impl Ffmpeg {
Ok(output.status.code().unwrap_or(-1))
}
}
/// Get video duration in seconds as f64 for precise interval calculation.
async fn get_duration_seconds(input_file: &str) -> Result<f64> {
Command::new("ffprobe")
.args(["-i", input_file])
.args(["-show_entries", "format=duration"])
.args(["-v", "quiet"])
.args(["-of", "csv=p=0"])
.output()
.await
.map(|out| String::from_utf8_lossy(&out.stdout).trim().to_string())
.and_then(|duration_str| {
duration_str
.parse::<f64>()
.map_err(|e| std::io::Error::other(e.to_string()))
})
}
/// Generate a preview clip from a video file.
///
/// Creates a ~10 second MP4 by extracting up to 10 equally-spaced 1-second segments
/// at 480p with H.264 video and AAC audio. For short videos (<10s), uses fewer segments.
/// For very short videos (<1s), transcodes the entire video.
///
/// Returns (duration_seconds, file_size_bytes) on success.
pub async fn generate_preview_clip(input_file: &str, output_file: &str) -> Result<(f64, u64)> {
info!("Generating preview clip for: '{}'", input_file);
let start = Instant::now();
let duration = get_duration_seconds(input_file).await?;
let use_nvenc = is_nvenc_available().await;
// Create parent directories for output
if let Some(parent) = std::path::Path::new(output_file).parent() {
std::fs::create_dir_all(parent)?;
}
let mut cmd = Command::new("ffmpeg");
cmd.arg("-y");
// Use CUDA hardware-accelerated decoding when available
if use_nvenc {
cmd.args(["-hwaccel", "cuda"]);
}
cmd.arg("-i").arg(input_file);
if duration < 1.0 {
// Very short video (<1s): transcode the whole thing to 480p MP4
cmd.args(["-vf", "scale=-2:480"]);
} else {
let segment_count = if duration < 10.0 {
duration.floor() as u32
} else {
10
};
let interval = duration / segment_count as f64;
let vf = format!(
"select='lt(mod(t,{:.4}),1)',setpts=N/FRAME_RATE/TB,scale=-2:480",
interval
);
let af = format!(
"aselect='lt(mod(t,{:.4}),1)',asetpts=N/SR/TB",
interval
);
cmd.args(["-vf", &vf]);
cmd.args(["-af", &af]);
}
// Use NVENC for encoding when available, otherwise fall back to libx264
if use_nvenc {
cmd.args(["-c:v", "h264_nvenc", "-preset", "p4", "-cq:v", "28"]);
} else {
cmd.args(["-c:v", "libx264", "-crf", "28", "-preset", "veryfast"]);
}
cmd.args(["-c:a", "aac"]);
cmd.arg(output_file);
cmd.stdout(Stdio::null());
cmd.stderr(Stdio::piped());
let output = cmd.output().await?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(std::io::Error::other(format!(
"ffmpeg preview generation failed: {}",
stderr
)));
}
let metadata = std::fs::metadata(output_file)?;
let file_size = metadata.len();
let clip_duration = if duration < 1.0 {
duration
} else if duration < 10.0 {
duration.floor()
} else {
10.0
};
info!(
"Generated preview clip '{}' ({:.1}s, {} bytes) in {:?}",
output_file, clip_duration, file_size, start.elapsed()
);
Ok((clip_duration, file_size))
}