feature/rust-2024-edition #41

Merged
cameron merged 2 commits from feature/rust-2024-edition into master 2025-09-01 17:47:52 +00:00
13 changed files with 1003 additions and 713 deletions

1376
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -2,7 +2,7 @@
name = "image-api"
version = "0.3.0"
authors = ["Cameron Cordes <cameronc.dev@gmail.com>"]
edition = "2021"
edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@@ -1,15 +1,15 @@
use actix_web::Responder;
use actix_web::{
web::{self, Json},
HttpResponse,
web::{self, Json},
};
use chrono::{Duration, Utc};
use jsonwebtoken::{encode, EncodingKey, Header};
use jsonwebtoken::{EncodingKey, Header, encode};
use log::{error, info};
use std::sync::Mutex;
use crate::{
data::{secret_key, Claims, CreateAccountRequest, LoginRequest, Token},
data::{Claims, CreateAccountRequest, LoginRequest, Token, secret_key},
database::UserDao,
};

View File

@@ -1,14 +1,14 @@
use std::{fs, str::FromStr};
use anyhow::{anyhow, Context};
use anyhow::{Context, anyhow};
use chrono::{DateTime, Utc};
use log::error;
use actix_web::error::ErrorUnauthorized;
use actix_web::{dev, http::header, Error, FromRequest, HttpRequest};
use futures::future::{err, ok, Ready};
use jsonwebtoken::{decode, Algorithm, DecodingKey, Validation};
use actix_web::{Error, FromRequest, HttpRequest, dev, http::header};
use futures::future::{Ready, err, ok};
use jsonwebtoken::{Algorithm, DecodingKey, Validation, decode};
use serde::{Deserialize, Serialize};
#[derive(Serialize)]
@@ -226,7 +226,8 @@ mod tests {
#[test]
fn test_expired_token() {
let err = Claims::from_str(
"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiI5IiwiZXhwIjoxNn0.eZnfaNfiD54VMbphIqeBICeG9SzAtwNXntLwtTBihjY");
"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiI5IiwiZXhwIjoxNn0.eZnfaNfiD54VMbphIqeBICeG9SzAtwNXntLwtTBihjY",
);
match err.unwrap_err().into_kind() {
ErrorKind::ExpiredSignature => assert!(true),

View File

@@ -1,4 +1,4 @@
use bcrypt::{hash, verify, DEFAULT_COST};
use bcrypt::{DEFAULT_COST, hash, verify};
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use std::ops::DerefMut;
@@ -30,7 +30,7 @@ impl SqliteUserDao {
#[cfg(test)]
pub mod test {
use diesel::{Connection, SqliteConnection};
use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness};
use diesel_migrations::{EmbeddedMigrations, MigrationHarness, embed_migrations};
const DB_MIGRATIONS: EmbeddedMigrations = embed_migrations!();

View File

@@ -7,18 +7,18 @@ use std::sync::Mutex;
use ::anyhow;
use actix::{Handler, Message};
use anyhow::{anyhow, Context};
use anyhow::{Context, anyhow};
use crate::data::{Claims, FilesRequest, FilterMode, PhotosResponse, SortType};
use crate::{create_thumbnails, AppState};
use crate::{AppState, create_thumbnails};
use actix_web::web::Data;
use actix_web::{
web::{self, Query},
HttpRequest, HttpResponse,
web::{self, Query},
};
use log::{debug, error, info, trace};
use opentelemetry::trace::{Span, Status, TraceContextExt, Tracer};
use opentelemetry::KeyValue;
use opentelemetry::trace::{Span, Status, TraceContextExt, Tracer};
use crate::data::SortType::NameAsc;
use crate::error::IntoHttpError;
@@ -144,103 +144,108 @@ pub async fn list_photos<TagD: TagDao, FS: FileSystemAccess>(
}
}
if let Ok(files) = file_system.get_files_for_path(search_path) {
info!("Found {:?} files in path: {:?}", files.len(), search_path);
match file_system.get_files_for_path(search_path) {
Ok(files) => {
info!("Found {:?} files in path: {:?}", files.len(), search_path);
let photos = files
.iter()
.filter(|&f| {
f.metadata().map_or_else(
|e| {
error!("Failed getting file metadata: {:?}", e);
f.extension().is_some()
},
|md| md.is_file(),
)
let photos = files
.iter()
.filter(|&f| {
f.metadata().map_or_else(
|e| {
error!("Failed getting file metadata: {:?}", e);
f.extension().is_some()
},
|md| md.is_file(),
)
})
.map(|path: &PathBuf| {
let relative = path.strip_prefix(&app_state.base_path).unwrap();
relative.to_path_buf()
})
.map(|f| f.to_str().unwrap().to_string())
.map(|file_name| {
let mut tag_dao = tag_dao.lock().expect("Unable to get TagDao");
let file_tags = tag_dao
.get_tags_for_path(&span_context, &file_name)
.unwrap_or_default();
(file_name, file_tags)
})
.filter(|(_, file_tags)| {
if let Some(tag_ids) = &req.tag_ids {
let tag_ids = tag_ids
.split(',')
.filter_map(|t| t.parse().ok())
.collect::<Vec<i32>>();
let excluded_tag_ids = &req
.exclude_tag_ids
.clone()
.unwrap_or_default()
.split(',')
.filter_map(|t| t.parse().ok())
.collect::<Vec<i32>>();
let filter_mode = &req.tag_filter_mode.unwrap_or(FilterMode::Any);
let excluded = file_tags.iter().any(|t| excluded_tag_ids.contains(&t.id));
return !excluded
&& match filter_mode {
FilterMode::Any => {
file_tags.iter().any(|t| tag_ids.contains(&t.id))
}
FilterMode::All => tag_ids
.iter()
.all(|id| file_tags.iter().any(|tag| &tag.id == id)),
};
}
true
})
.map(|(file_name, tags)| FileWithTagCount {
file_name,
tag_count: tags.len() as i64,
})
.collect::<Vec<FileWithTagCount>>();
let mut response_files = photos
.clone()
.into_iter()
.map(|f| f.file_name)
.collect::<Vec<String>>();
if let Some(sort_type) = req.sort {
debug!("Sorting files: {:?}", sort_type);
response_files = sort(photos, sort_type)
}
let dirs = files
.iter()
.filter(|&f| f.metadata().map_or(false, |md| md.is_dir()))
.map(|path: &PathBuf| {
let relative = path.strip_prefix(&app_state.base_path).unwrap();
relative.to_path_buf()
})
.map(|f| f.to_str().unwrap().to_string())
.collect::<Vec<String>>();
span_context
.span()
.set_attribute(KeyValue::new("file_count", files.len().to_string()));
span_context.span().set_status(Status::Ok);
HttpResponse::Ok().json(PhotosResponse {
photos: response_files,
dirs,
})
.map(|path: &PathBuf| {
let relative = path.strip_prefix(&app_state.base_path).unwrap();
relative.to_path_buf()
})
.map(|f| f.to_str().unwrap().to_string())
.map(|file_name| {
let mut tag_dao = tag_dao.lock().expect("Unable to get TagDao");
let file_tags = tag_dao
.get_tags_for_path(&span_context, &file_name)
.unwrap_or_default();
(file_name, file_tags)
})
.filter(|(_, file_tags)| {
if let Some(tag_ids) = &req.tag_ids {
let tag_ids = tag_ids
.split(',')
.filter_map(|t| t.parse().ok())
.collect::<Vec<i32>>();
let excluded_tag_ids = &req
.exclude_tag_ids
.clone()
.unwrap_or_default()
.split(',')
.filter_map(|t| t.parse().ok())
.collect::<Vec<i32>>();
let filter_mode = &req.tag_filter_mode.unwrap_or(FilterMode::Any);
let excluded = file_tags.iter().any(|t| excluded_tag_ids.contains(&t.id));
return !excluded
&& match filter_mode {
FilterMode::Any => file_tags.iter().any(|t| tag_ids.contains(&t.id)),
FilterMode::All => tag_ids
.iter()
.all(|id| file_tags.iter().any(|tag| &tag.id == id)),
};
}
true
})
.map(|(file_name, tags)| FileWithTagCount {
file_name,
tag_count: tags.len() as i64,
})
.collect::<Vec<FileWithTagCount>>();
let mut response_files = photos
.clone()
.into_iter()
.map(|f| f.file_name)
.collect::<Vec<String>>();
if let Some(sort_type) = req.sort {
debug!("Sorting files: {:?}", sort_type);
response_files = sort(photos, sort_type)
}
let dirs = files
.iter()
.filter(|&f| f.metadata().map_or(false, |md| md.is_dir()))
.map(|path: &PathBuf| {
let relative = path.strip_prefix(&app_state.base_path).unwrap();
relative.to_path_buf()
})
.map(|f| f.to_str().unwrap().to_string())
.collect::<Vec<String>>();
span_context
.span()
.set_attribute(KeyValue::new("file_count", files.len().to_string()));
span_context.span().set_status(Status::Ok);
HttpResponse::Ok().json(PhotosResponse {
photos: response_files,
dirs,
})
} else {
error!("Bad photos request: {}", req.path);
span_context
.span()
.set_status(Status::error("Invalid path"));
HttpResponse::BadRequest().finish()
_ => {
error!("Bad photos request: {}", req.path);
span_context
.span()
.set_status(Status::error("Invalid path"));
HttpResponse::BadRequest().finish()
}
}
}
@@ -505,12 +510,12 @@ mod tests {
mod api {
use super::*;
use actix_web::{web::Query, HttpResponse};
use actix_web::{HttpResponse, web::Query};
use crate::{
AppState,
data::{Claims, PhotosResponse},
testhelpers::BodyReader,
AppState,
};
use crate::database::test::in_memory_db_connection;
@@ -561,14 +566,15 @@ mod tests {
assert!(body.photos.contains(&String::from("photo.jpg")));
assert!(body.dirs.contains(&String::from("test-dir")));
assert!(body
.photos
.iter()
.filter(|filename| !filename.ends_with(".png")
&& !filename.ends_with(".jpg")
&& !filename.ends_with(".jpeg"))
.collect::<Vec<&String>>()
.is_empty());
assert!(
body.photos
.iter()
.filter(|filename| !filename.ends_with(".png")
&& !filename.ends_with(".jpg")
&& !filename.ends_with(".jpeg"))
.collect::<Vec<&String>>()
.is_empty()
);
}
#[actix_rt::test]

View File

@@ -4,13 +4,13 @@ extern crate rayon;
use actix_web::web::Data;
use actix_web_prom::PrometheusMetricsBuilder;
use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness};
use diesel_migrations::{EmbeddedMigrations, MigrationHarness, embed_migrations};
use futures::stream::StreamExt;
use lazy_static::lazy_static;
use prometheus::{self, IntGauge};
use std::error::Error;
use std::sync::mpsc::channel;
use std::sync::Mutex;
use std::sync::mpsc::channel;
use std::{collections::HashMap, io::prelude::*};
use std::{env, fs::File};
use std::{
@@ -22,9 +22,8 @@ use walkdir::{DirEntry, WalkDir};
use actix_files::NamedFile;
use actix_multipart as mp;
use actix_web::{
delete, get, middleware, post, put,
App, HttpRequest, HttpResponse, HttpServer, Responder, delete, get, middleware, post, put,
web::{self, BufMut, BytesMut},
App, HttpRequest, HttpResponse, HttpServer, Responder,
};
use anyhow::Context;
use chrono::Utc;
@@ -36,19 +35,19 @@ use crate::auth::login;
use crate::data::*;
use crate::database::*;
use crate::files::{
is_image_or_video, is_valid_full_path, move_file, RealFileSystem, RefreshThumbnailsMessage,
RealFileSystem, RefreshThumbnailsMessage, is_image_or_video, is_valid_full_path, move_file,
};
use crate::otel::{extract_context_from_request, global_tracer};
use crate::service::ServiceBuilder;
use crate::state::AppState;
use crate::tags::*;
use crate::video::actors::{
create_playlist, generate_video_thumbnail, ProcessMessage, ScanDirectoryMessage,
ProcessMessage, ScanDirectoryMessage, create_playlist, generate_video_thumbnail,
};
use crate::video::generate_video_gifs;
use log::{debug, error, info, trace, warn};
use opentelemetry::trace::{Span, Status, TraceContextExt, Tracer};
use opentelemetry::{global, KeyValue};
use opentelemetry::{KeyValue, global};
mod auth;
mod data;
@@ -332,12 +331,17 @@ async fn stream_video(
span.set_status(Status::error(format!("playlist not valid {}", playlist)));
HttpResponse::BadRequest().finish()
} else if let Ok(file) = NamedFile::open(playlist) {
span.set_status(Status::Ok);
file.into_response(&request)
} else {
span.set_status(Status::error(format!("playlist not found {}", playlist)));
HttpResponse::NotFound().finish()
match NamedFile::open(playlist) {
Ok(file) => {
span.set_status(Status::Ok);
file.into_response(&request)
}
_ => {
span.set_status(Status::error(format!("playlist not found {}", playlist)));
HttpResponse::NotFound().finish()
}
}
}
}
@@ -359,16 +363,19 @@ async fn get_video_part(
file_part.push(app_state.video_path.clone());
file_part.push(part);
// TODO: Do we need to guard against directory attacks here?
if let Ok(file) = NamedFile::open(&file_part) {
span.set_status(Status::Ok);
file.into_response(&request)
} else {
error!("Video part not found: {:?}", file_part);
span.set_status(Status::error(format!(
"Video part not found '{}'",
file_part.to_str().unwrap()
)));
HttpResponse::NotFound().finish()
match NamedFile::open(&file_part) {
Ok(file) => {
span.set_status(Status::Ok);
file.into_response(&request)
}
_ => {
error!("Video part not found: {:?}", file_part);
span.set_status(Status::error(format!(
"Video part not found '{}'",
file_part.to_str().unwrap()
)));
HttpResponse::NotFound().finish()
}
}
}

View File

@@ -1,10 +1,10 @@
use actix_web::web::Data;
use actix_web::{get, web, HttpRequest, HttpResponse, Responder};
use actix_web::{HttpRequest, HttpResponse, Responder, get, web};
use chrono::LocalResult::{Ambiguous, Single};
use chrono::{DateTime, Datelike, FixedOffset, Local, LocalResult, NaiveDate, TimeZone, Utc};
use log::{debug, trace, warn};
use opentelemetry::trace::{Span, Status, Tracer};
use opentelemetry::KeyValue;
use opentelemetry::trace::{Span, Status, Tracer};
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
use std::path::Path;
@@ -135,21 +135,19 @@ fn extract_date_from_filename(filename: &str) -> Option<DateTime<FixedOffset>> {
};
// 1. Screenshot format: Screenshot_2014-06-01-20-44-50.png
if let Some(captures) =
regex::Regex::new(r"(\d{4})-(\d{2})-(\d{2})-(\d{2})-(\d{2})-(\d{2})")
.ok()?
.captures(filename)
.and_then(|c| build_date_from_ymd_capture(&c))
if let Some(captures) = regex::Regex::new(r"(\d{4})-(\d{2})-(\d{2})-(\d{2})-(\d{2})-(\d{2})")
.ok()?
.captures(filename)
.and_then(|c| build_date_from_ymd_capture(&c))
{
return Some(captures);
}
// Screenshot format: Screenshot_20140601[_-]204450.png
if let Some(captures) =
regex::Regex::new(r"(\d{4})(\d{2})(\d{2})[_-](\d{2})(\d{2})(\d{2})")
.ok()?
.captures(filename)
.and_then(|c| build_date_from_ymd_capture(&c))
if let Some(captures) = regex::Regex::new(r"(\d{4})(\d{2})(\d{2})[_-](\d{2})(\d{2})(\d{2})")
.ok()?
.captures(filename)
.and_then(|c| build_date_from_ymd_capture(&c))
{
return Some(captures);
}
@@ -468,7 +466,7 @@ mod tests {
#[test]
fn test_extract_date_from_filename_timestamp_format() {
let filename = "xyz_1401638400.jpeg"; // Unix timestamp for 2014-06-01 16:00:00 UTC
// Timestamps are already in UTC, so timezone doesn't matter for this test
// Timestamps are already in UTC, so timezone doesn't matter for this test
let date_time = extract_date_from_filename(filename).unwrap();
assert_eq!(date_time.year(), 2014);

View File

@@ -1,14 +1,14 @@
use actix_web::http::header::HeaderMap;
use actix_web::HttpRequest;
use actix_web::http::header::HeaderMap;
use opentelemetry::global::{BoxedSpan, BoxedTracer};
use opentelemetry::propagation::TextMapPropagator;
use opentelemetry::trace::{Span, Status, Tracer};
use opentelemetry::{global, Context, KeyValue};
use opentelemetry::{Context, KeyValue, global};
use opentelemetry_appender_log::OpenTelemetryLogBridge;
use opentelemetry_otlp::WithExportConfig;
use opentelemetry_sdk::Resource;
use opentelemetry_sdk::logs::{BatchLogProcessor, SdkLoggerProvider};
use opentelemetry_sdk::propagation::TraceContextPropagator;
use opentelemetry_sdk::Resource;
pub fn global_tracer() -> BoxedTracer {
global::tracer("image-server")

View File

@@ -1,16 +1,16 @@
use crate::data::GetTagsRequest;
use crate::otel::{extract_context_from_request, global_tracer, trace_db_call};
use crate::{connect, data::AddTagRequest, error::IntoHttpError, schema, Claims, ThumbnailRequest};
use crate::{Claims, ThumbnailRequest, connect, data::AddTagRequest, error::IntoHttpError, schema};
use actix_web::dev::{ServiceFactory, ServiceRequest};
use actix_web::{web, App, HttpRequest, HttpResponse, Responder};
use actix_web::{App, HttpRequest, HttpResponse, Responder, web};
use anyhow::Context;
use chrono::Utc;
use diesel::dsl::count_star;
use diesel::prelude::*;
use diesel::sql_types::*;
use log::{debug, info};
use opentelemetry::trace::{Span, Status, TraceContextExt, Tracer};
use opentelemetry::KeyValue;
use opentelemetry::trace::{Span, Status, TraceContextExt, Tracer};
use schema::{tagged_photo, tags};
use serde::{Deserialize, Serialize};
use std::borrow::BorrowMut;

View File

@@ -1,9 +1,9 @@
use actix_web::{
body::{BoxBody, MessageBody},
HttpResponse,
body::{BoxBody, MessageBody},
};
use crate::database::{models::User, UserDao};
use crate::database::{UserDao, models::User};
use std::cell::RefCell;
use std::option::Option;

View File

@@ -3,8 +3,8 @@ use crate::otel::global_tracer;
use actix::prelude::*;
use futures::TryFutureExt;
use log::{debug, error, info, trace, warn};
use opentelemetry::trace::{Span, Status, Tracer};
use opentelemetry::KeyValue;
use opentelemetry::trace::{Span, Status, Tracer};
use std::io::Result;
use std::path::{Path, PathBuf};
use std::process::{Child, Command, ExitStatus, Stdio};