feat: multi-library foundation (schema + libraries module)
Adds a `libraries` registry table and threads library_id through per-instance metadata tables (image_exif, photo_insights, entity_photo_links, video_preview_clips). File-path columns renamed to rel_path to make the relative-to-root semantics explicit. Adds content_hash + size_bytes on image_exif to support future hash-keyed thumbnail/HLS dedup. Tags and favorites stay library-agnostic so they share across libraries by rel_path. Behavior is unchanged: a single primary library (id=1) is seeded from BASE_PATH on first boot; all handlers and DAOs route through it as a transitional shim until the API gains a library query param. Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
This commit is contained in:
155
migrations/2026-04-17-000000_multi_library/down.sql
Normal file
155
migrations/2026-04-17-000000_multi_library/down.sql
Normal file
@@ -0,0 +1,155 @@
|
||||
-- Revert multi-library support.
|
||||
-- Drops library_id/content_hash/size_bytes, renames rel_path back to the
|
||||
-- original column names, and drops the libraries table. Rows originally
|
||||
-- from non-primary libraries (id > 1) would be orphaned, so the rollback
|
||||
-- keeps only rows from library_id=1.
|
||||
|
||||
PRAGMA foreign_keys=OFF;
|
||||
|
||||
-- tagged_photo: rel_path → photo_name.
|
||||
DROP INDEX IF EXISTS idx_tagged_photo_relpath_tag;
|
||||
DROP INDEX IF EXISTS idx_tagged_photo_rel_path;
|
||||
ALTER TABLE tagged_photo RENAME COLUMN rel_path TO photo_name;
|
||||
CREATE INDEX IF NOT EXISTS idx_tagged_photo_photo_name ON tagged_photo(photo_name);
|
||||
CREATE INDEX IF NOT EXISTS idx_tagged_photo_count ON tagged_photo(photo_name, tag_id);
|
||||
|
||||
-- favorites: rel_path → path.
|
||||
DROP INDEX IF EXISTS idx_favorites_unique;
|
||||
DROP INDEX IF EXISTS idx_favorites_rel_path;
|
||||
ALTER TABLE favorites RENAME COLUMN rel_path TO path;
|
||||
CREATE INDEX IF NOT EXISTS idx_favorites_path ON favorites(path);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_favorites_unique ON favorites(userid, path);
|
||||
|
||||
-- video_preview_clips: drop library_id, rel_path → file_path.
|
||||
CREATE TABLE video_preview_clips_old (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
file_path TEXT NOT NULL UNIQUE,
|
||||
status TEXT NOT NULL DEFAULT 'pending',
|
||||
duration_seconds REAL,
|
||||
file_size_bytes INTEGER,
|
||||
error_message TEXT,
|
||||
created_at TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO video_preview_clips_old (
|
||||
id, file_path, status, duration_seconds, file_size_bytes,
|
||||
error_message, created_at, updated_at
|
||||
)
|
||||
SELECT
|
||||
id, rel_path, status, duration_seconds, file_size_bytes,
|
||||
error_message, created_at, updated_at
|
||||
FROM video_preview_clips
|
||||
WHERE library_id = 1;
|
||||
|
||||
DROP TABLE video_preview_clips;
|
||||
ALTER TABLE video_preview_clips_old RENAME TO video_preview_clips;
|
||||
|
||||
CREATE INDEX idx_preview_clips_file_path ON video_preview_clips(file_path);
|
||||
CREATE INDEX idx_preview_clips_status ON video_preview_clips(status);
|
||||
|
||||
-- entity_photo_links: drop library_id, rel_path → file_path.
|
||||
CREATE TABLE entity_photo_links_old (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
entity_id INTEGER NOT NULL,
|
||||
file_path TEXT NOT NULL,
|
||||
role TEXT NOT NULL,
|
||||
CONSTRAINT fk_epl_entity FOREIGN KEY (entity_id) REFERENCES entities(id) ON DELETE CASCADE,
|
||||
UNIQUE(entity_id, file_path, role)
|
||||
);
|
||||
|
||||
INSERT INTO entity_photo_links_old (id, entity_id, file_path, role)
|
||||
SELECT id, entity_id, rel_path, role
|
||||
FROM entity_photo_links
|
||||
WHERE library_id = 1;
|
||||
|
||||
DROP TABLE entity_photo_links;
|
||||
ALTER TABLE entity_photo_links_old RENAME TO entity_photo_links;
|
||||
|
||||
CREATE INDEX idx_entity_photo_links_entity ON entity_photo_links(entity_id);
|
||||
CREATE INDEX idx_entity_photo_links_photo ON entity_photo_links(file_path);
|
||||
|
||||
-- photo_insights: drop library_id, rel_path → file_path.
|
||||
CREATE TABLE photo_insights_old (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
file_path TEXT NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
summary TEXT NOT NULL,
|
||||
generated_at BIGINT NOT NULL,
|
||||
model_version TEXT NOT NULL,
|
||||
is_current BOOLEAN NOT NULL DEFAULT 0,
|
||||
training_messages TEXT,
|
||||
approved BOOLEAN
|
||||
);
|
||||
|
||||
INSERT INTO photo_insights_old (
|
||||
id, file_path, title, summary, generated_at, model_version, is_current,
|
||||
training_messages, approved
|
||||
)
|
||||
SELECT
|
||||
id, rel_path, title, summary, generated_at, model_version, is_current,
|
||||
training_messages, approved
|
||||
FROM photo_insights
|
||||
WHERE library_id = 1;
|
||||
|
||||
DROP TABLE photo_insights;
|
||||
ALTER TABLE photo_insights_old RENAME TO photo_insights;
|
||||
|
||||
CREATE INDEX idx_photo_insights_file_path ON photo_insights(file_path);
|
||||
CREATE INDEX idx_photo_insights_current ON photo_insights(file_path, is_current);
|
||||
|
||||
-- image_exif: drop library_id/content_hash/size_bytes, rel_path → file_path.
|
||||
CREATE TABLE image_exif_old (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
file_path TEXT NOT NULL UNIQUE,
|
||||
camera_make TEXT,
|
||||
camera_model TEXT,
|
||||
lens_model TEXT,
|
||||
width INTEGER,
|
||||
height INTEGER,
|
||||
orientation INTEGER,
|
||||
gps_latitude REAL,
|
||||
gps_longitude REAL,
|
||||
gps_altitude REAL,
|
||||
focal_length REAL,
|
||||
aperture REAL,
|
||||
shutter_speed TEXT,
|
||||
iso INTEGER,
|
||||
date_taken BIGINT,
|
||||
created_time BIGINT NOT NULL,
|
||||
last_modified BIGINT NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO image_exif_old (
|
||||
id, file_path,
|
||||
camera_make, camera_model, lens_model,
|
||||
width, height, orientation,
|
||||
gps_latitude, gps_longitude, gps_altitude,
|
||||
focal_length, aperture, shutter_speed, iso, date_taken,
|
||||
created_time, last_modified
|
||||
)
|
||||
SELECT
|
||||
id, rel_path,
|
||||
camera_make, camera_model, lens_model,
|
||||
width, height, orientation,
|
||||
gps_latitude, gps_longitude, gps_altitude,
|
||||
focal_length, aperture, shutter_speed, iso, date_taken,
|
||||
created_time, last_modified
|
||||
FROM image_exif
|
||||
WHERE library_id = 1;
|
||||
|
||||
DROP TABLE image_exif;
|
||||
ALTER TABLE image_exif_old RENAME TO image_exif;
|
||||
|
||||
CREATE INDEX idx_image_exif_file_path ON image_exif(file_path);
|
||||
CREATE INDEX idx_image_exif_camera ON image_exif(camera_make, camera_model);
|
||||
CREATE INDEX idx_image_exif_gps ON image_exif(gps_latitude, gps_longitude);
|
||||
CREATE INDEX idx_image_exif_date_taken ON image_exif(date_taken);
|
||||
CREATE INDEX idx_image_exif_date_path ON image_exif(date_taken DESC, file_path);
|
||||
|
||||
-- Finally, drop the libraries registry.
|
||||
DROP TABLE libraries;
|
||||
|
||||
PRAGMA foreign_keys=ON;
|
||||
|
||||
ANALYZE;
|
||||
216
migrations/2026-04-17-000000_multi_library/up.sql
Normal file
216
migrations/2026-04-17-000000_multi_library/up.sql
Normal file
@@ -0,0 +1,216 @@
|
||||
-- Multi-library support.
|
||||
-- Adds `libraries` registry table and a `library_id` column on per-instance
|
||||
-- metadata tables. Renames `file_path` / `photo_name` to `rel_path` for
|
||||
-- semantic clarity (values already stored relative to BASE_PATH).
|
||||
-- Adds `content_hash` + `size_bytes` to `image_exif` to support
|
||||
-- content-based dedup of thumbnails and HLS output across libraries.
|
||||
--
|
||||
-- SQLite cannot alter column constraints in place, so per-instance tables
|
||||
-- are recreated following the idiom established in
|
||||
-- 2026-04-02-000000_photo_insights_history/up.sql. Existing row `id`s are
|
||||
-- preserved so foreign keys (entity_facts.source_insight_id, etc.) remain
|
||||
-- valid after migration.
|
||||
|
||||
PRAGMA foreign_keys=OFF;
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 1. Libraries registry.
|
||||
-- Seeded with a placeholder for the primary library; AppState patches
|
||||
-- `root_path` from the BASE_PATH env var on first boot. Subsequent
|
||||
-- prod-to-dev DB syncs update this row via a single SQL UPDATE.
|
||||
-- ---------------------------------------------------------------------------
|
||||
CREATE TABLE libraries (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
root_path TEXT NOT NULL,
|
||||
created_at BIGINT NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO libraries (id, name, root_path, created_at)
|
||||
VALUES (1, 'main', 'BASE_PATH_PLACEHOLDER', strftime('%s','now'));
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 2. image_exif: + library_id, file_path → rel_path, + content_hash/size_bytes.
|
||||
-- ---------------------------------------------------------------------------
|
||||
CREATE TABLE image_exif_new (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
library_id INTEGER NOT NULL REFERENCES libraries(id),
|
||||
rel_path TEXT NOT NULL,
|
||||
|
||||
-- Camera information
|
||||
camera_make TEXT,
|
||||
camera_model TEXT,
|
||||
lens_model TEXT,
|
||||
|
||||
-- Image properties
|
||||
width INTEGER,
|
||||
height INTEGER,
|
||||
orientation INTEGER,
|
||||
|
||||
-- GPS
|
||||
gps_latitude REAL,
|
||||
gps_longitude REAL,
|
||||
gps_altitude REAL,
|
||||
|
||||
-- Capture settings
|
||||
focal_length REAL,
|
||||
aperture REAL,
|
||||
shutter_speed TEXT,
|
||||
iso INTEGER,
|
||||
date_taken BIGINT,
|
||||
|
||||
-- Housekeeping
|
||||
created_time BIGINT NOT NULL,
|
||||
last_modified BIGINT NOT NULL,
|
||||
|
||||
-- Content identity (backfilled by the `backfill_hashes` binary and by the watcher for new files)
|
||||
content_hash TEXT,
|
||||
size_bytes BIGINT,
|
||||
|
||||
UNIQUE(library_id, rel_path)
|
||||
);
|
||||
|
||||
INSERT INTO image_exif_new (
|
||||
id, library_id, rel_path,
|
||||
camera_make, camera_model, lens_model,
|
||||
width, height, orientation,
|
||||
gps_latitude, gps_longitude, gps_altitude,
|
||||
focal_length, aperture, shutter_speed, iso, date_taken,
|
||||
created_time, last_modified
|
||||
)
|
||||
SELECT
|
||||
id, 1, file_path,
|
||||
camera_make, camera_model, lens_model,
|
||||
width, height, orientation,
|
||||
gps_latitude, gps_longitude, gps_altitude,
|
||||
focal_length, aperture, shutter_speed, iso, date_taken,
|
||||
created_time, last_modified
|
||||
FROM image_exif;
|
||||
|
||||
DROP TABLE image_exif;
|
||||
ALTER TABLE image_exif_new RENAME TO image_exif;
|
||||
|
||||
CREATE INDEX idx_image_exif_rel_path ON image_exif(rel_path);
|
||||
CREATE INDEX idx_image_exif_camera ON image_exif(camera_make, camera_model);
|
||||
CREATE INDEX idx_image_exif_gps ON image_exif(gps_latitude, gps_longitude);
|
||||
CREATE INDEX idx_image_exif_date_taken ON image_exif(date_taken);
|
||||
CREATE INDEX idx_image_exif_date_path ON image_exif(date_taken DESC, rel_path);
|
||||
CREATE INDEX idx_image_exif_lib_date ON image_exif(library_id, date_taken);
|
||||
CREATE INDEX idx_image_exif_content_hash ON image_exif(content_hash);
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 3. photo_insights: + library_id, file_path → rel_path.
|
||||
-- Preserve `id` so entity_facts.source_insight_id FKs remain valid.
|
||||
-- ---------------------------------------------------------------------------
|
||||
CREATE TABLE photo_insights_new (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
library_id INTEGER NOT NULL REFERENCES libraries(id),
|
||||
rel_path TEXT NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
summary TEXT NOT NULL,
|
||||
generated_at BIGINT NOT NULL,
|
||||
model_version TEXT NOT NULL,
|
||||
is_current BOOLEAN NOT NULL DEFAULT 0,
|
||||
training_messages TEXT,
|
||||
approved BOOLEAN
|
||||
);
|
||||
|
||||
INSERT INTO photo_insights_new (
|
||||
id, library_id, rel_path, title, summary, generated_at, model_version,
|
||||
is_current, training_messages, approved
|
||||
)
|
||||
SELECT
|
||||
id, 1, file_path, title, summary, generated_at, model_version,
|
||||
is_current, training_messages, approved
|
||||
FROM photo_insights;
|
||||
|
||||
DROP TABLE photo_insights;
|
||||
ALTER TABLE photo_insights_new RENAME TO photo_insights;
|
||||
|
||||
CREATE INDEX idx_photo_insights_rel_path ON photo_insights(rel_path);
|
||||
CREATE INDEX idx_photo_insights_current ON photo_insights(library_id, rel_path, is_current);
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 4. entity_photo_links: + library_id, file_path → rel_path.
|
||||
-- Preserves entity FK; UNIQUE now includes library_id to allow the same
|
||||
-- rel_path to link entities in multiple libraries independently.
|
||||
-- ---------------------------------------------------------------------------
|
||||
CREATE TABLE entity_photo_links_new (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
entity_id INTEGER NOT NULL,
|
||||
library_id INTEGER NOT NULL REFERENCES libraries(id),
|
||||
rel_path TEXT NOT NULL,
|
||||
role TEXT NOT NULL,
|
||||
CONSTRAINT fk_epl_entity FOREIGN KEY (entity_id) REFERENCES entities(id) ON DELETE CASCADE,
|
||||
UNIQUE(entity_id, library_id, rel_path, role)
|
||||
);
|
||||
|
||||
INSERT INTO entity_photo_links_new (id, entity_id, library_id, rel_path, role)
|
||||
SELECT id, entity_id, 1, file_path, role FROM entity_photo_links;
|
||||
|
||||
DROP TABLE entity_photo_links;
|
||||
ALTER TABLE entity_photo_links_new RENAME TO entity_photo_links;
|
||||
|
||||
CREATE INDEX idx_entity_photo_links_entity ON entity_photo_links(entity_id);
|
||||
CREATE INDEX idx_entity_photo_links_photo ON entity_photo_links(library_id, rel_path);
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 5. video_preview_clips: + library_id, file_path → rel_path.
|
||||
-- ---------------------------------------------------------------------------
|
||||
CREATE TABLE video_preview_clips_new (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
library_id INTEGER NOT NULL REFERENCES libraries(id),
|
||||
rel_path TEXT NOT NULL,
|
||||
status TEXT NOT NULL DEFAULT 'pending',
|
||||
duration_seconds REAL,
|
||||
file_size_bytes INTEGER,
|
||||
error_message TEXT,
|
||||
created_at TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL,
|
||||
UNIQUE(library_id, rel_path)
|
||||
);
|
||||
|
||||
INSERT INTO video_preview_clips_new (
|
||||
id, library_id, rel_path, status, duration_seconds, file_size_bytes,
|
||||
error_message, created_at, updated_at
|
||||
)
|
||||
SELECT
|
||||
id, 1, file_path, status, duration_seconds, file_size_bytes,
|
||||
error_message, created_at, updated_at
|
||||
FROM video_preview_clips;
|
||||
|
||||
DROP TABLE video_preview_clips;
|
||||
ALTER TABLE video_preview_clips_new RENAME TO video_preview_clips;
|
||||
|
||||
CREATE INDEX idx_preview_clips_rel_path ON video_preview_clips(rel_path);
|
||||
CREATE INDEX idx_preview_clips_status ON video_preview_clips(status);
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 6. favorites: path → rel_path. Library-agnostic (cross-library sharing).
|
||||
-- ---------------------------------------------------------------------------
|
||||
ALTER TABLE favorites RENAME COLUMN path TO rel_path;
|
||||
|
||||
DROP INDEX IF EXISTS idx_favorites_path;
|
||||
DROP INDEX IF EXISTS idx_favorites_unique;
|
||||
CREATE INDEX idx_favorites_rel_path ON favorites(rel_path);
|
||||
CREATE UNIQUE INDEX idx_favorites_unique ON favorites(userid, rel_path);
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 7. tagged_photo: photo_name → rel_path. Library-agnostic.
|
||||
-- Dedup first so the (rel_path, tag_id) unique index can be created safely.
|
||||
-- ---------------------------------------------------------------------------
|
||||
ALTER TABLE tagged_photo RENAME COLUMN photo_name TO rel_path;
|
||||
|
||||
DELETE FROM tagged_photo
|
||||
WHERE id NOT IN (
|
||||
SELECT MIN(id) FROM tagged_photo GROUP BY rel_path, tag_id
|
||||
);
|
||||
|
||||
DROP INDEX IF EXISTS idx_tagged_photo_photo_name;
|
||||
DROP INDEX IF EXISTS idx_tagged_photo_count;
|
||||
CREATE INDEX idx_tagged_photo_rel_path ON tagged_photo(rel_path);
|
||||
CREATE UNIQUE INDEX idx_tagged_photo_relpath_tag ON tagged_photo(rel_path, tag_id);
|
||||
|
||||
PRAGMA foreign_keys=ON;
|
||||
|
||||
ANALYZE;
|
||||
@@ -1187,6 +1187,7 @@ impl InsightGenerator {
|
||||
|
||||
// 11. Store in database
|
||||
let insight = InsertPhotoInsight {
|
||||
library_id: crate::libraries::PRIMARY_LIBRARY_ID,
|
||||
file_path: file_path.to_string(),
|
||||
title,
|
||||
summary,
|
||||
@@ -2031,6 +2032,7 @@ Return ONLY the summary, nothing else."#,
|
||||
// Upsert a photo link so this entity is associated with this photo
|
||||
let link = InsertEntityPhotoLink {
|
||||
entity_id: subject_entity_id,
|
||||
library_id: crate::libraries::PRIMARY_LIBRARY_ID,
|
||||
file_path: file_path.to_string(),
|
||||
role: photo_role,
|
||||
};
|
||||
@@ -2742,6 +2744,7 @@ Return ONLY the summary, nothing else."#,
|
||||
|
||||
// 15. Store insight (returns the persisted row including its new id)
|
||||
let insight = InsertPhotoInsight {
|
||||
library_id: crate::libraries::PRIMARY_LIBRARY_ID,
|
||||
file_path: file_path.to_string(),
|
||||
title,
|
||||
summary: final_content,
|
||||
|
||||
@@ -94,6 +94,7 @@ fn main() -> anyhow::Result<()> {
|
||||
Ok(exif_data) => {
|
||||
let timestamp = Utc::now().timestamp();
|
||||
let insert_exif = InsertImageExif {
|
||||
library_id: image_api::libraries::PRIMARY_LIBRARY_ID,
|
||||
file_path: relative_path.clone(),
|
||||
camera_make: exif_data.camera_make,
|
||||
camera_model: exif_data.camera_model,
|
||||
@@ -114,6 +115,8 @@ fn main() -> anyhow::Result<()> {
|
||||
.map(|e| e.created_time)
|
||||
.unwrap_or(timestamp),
|
||||
last_modified: timestamp,
|
||||
content_hash: None,
|
||||
size_bytes: None,
|
||||
};
|
||||
|
||||
// Store or update in database
|
||||
|
||||
@@ -86,7 +86,11 @@ impl InsightDao for SqliteInsightDao {
|
||||
let mut connection = self.connection.lock().expect("Unable to get InsightDao");
|
||||
|
||||
// Mark all existing insights for this file as no longer current
|
||||
diesel::update(photo_insights.filter(file_path.eq(&insight.file_path)))
|
||||
diesel::update(
|
||||
photo_insights
|
||||
.filter(library_id.eq(insight.library_id))
|
||||
.filter(rel_path.eq(&insight.file_path)),
|
||||
)
|
||||
.set(is_current.eq(false))
|
||||
.execute(connection.deref_mut())
|
||||
.map_err(|_| anyhow::anyhow!("Update is_current error"))?;
|
||||
@@ -99,7 +103,8 @@ impl InsightDao for SqliteInsightDao {
|
||||
|
||||
// Retrieve the inserted record (is_current = true)
|
||||
photo_insights
|
||||
.filter(file_path.eq(&insight.file_path))
|
||||
.filter(library_id.eq(insight.library_id))
|
||||
.filter(rel_path.eq(&insight.file_path))
|
||||
.filter(is_current.eq(true))
|
||||
.first::<PhotoInsight>(connection.deref_mut())
|
||||
.map_err(|_| anyhow::anyhow!("Query error"))
|
||||
@@ -118,7 +123,7 @@ impl InsightDao for SqliteInsightDao {
|
||||
let mut connection = self.connection.lock().expect("Unable to get InsightDao");
|
||||
|
||||
photo_insights
|
||||
.filter(file_path.eq(path))
|
||||
.filter(rel_path.eq(path))
|
||||
.filter(is_current.eq(true))
|
||||
.first::<PhotoInsight>(connection.deref_mut())
|
||||
.optional()
|
||||
@@ -138,7 +143,7 @@ impl InsightDao for SqliteInsightDao {
|
||||
let mut connection = self.connection.lock().expect("Unable to get InsightDao");
|
||||
|
||||
photo_insights
|
||||
.filter(file_path.eq(path))
|
||||
.filter(rel_path.eq(path))
|
||||
.order(generated_at.desc())
|
||||
.load::<PhotoInsight>(connection.deref_mut())
|
||||
.map_err(|_| anyhow::anyhow!("Query error"))
|
||||
@@ -156,7 +161,7 @@ impl InsightDao for SqliteInsightDao {
|
||||
|
||||
let mut connection = self.connection.lock().expect("Unable to get InsightDao");
|
||||
|
||||
diesel::delete(photo_insights.filter(file_path.eq(path)))
|
||||
diesel::delete(photo_insights.filter(rel_path.eq(path)))
|
||||
.execute(connection.deref_mut())
|
||||
.map(|_| ())
|
||||
.map_err(|_| anyhow::anyhow!("Delete error"))
|
||||
@@ -195,7 +200,7 @@ impl InsightDao for SqliteInsightDao {
|
||||
|
||||
diesel::update(
|
||||
photo_insights
|
||||
.filter(file_path.eq(path))
|
||||
.filter(rel_path.eq(path))
|
||||
.filter(is_current.eq(true)),
|
||||
)
|
||||
.set(approved.eq(Some(is_approved)))
|
||||
|
||||
@@ -550,8 +550,8 @@ impl KnowledgeDao for SqliteKnowledgeDao {
|
||||
|
||||
// 3. Copy photo links to target (INSERT OR IGNORE to skip duplicates)
|
||||
let links_updated = diesel::sql_query(
|
||||
"INSERT OR IGNORE INTO entity_photo_links (entity_id, file_path, role) \
|
||||
SELECT ?, file_path, role FROM entity_photo_links WHERE entity_id = ?",
|
||||
"INSERT OR IGNORE INTO entity_photo_links (entity_id, library_id, rel_path, role) \
|
||||
SELECT ?, library_id, rel_path, role FROM entity_photo_links WHERE entity_id = ?",
|
||||
)
|
||||
.bind::<diesel::sql_types::Integer, _>(target_id)
|
||||
.bind::<diesel::sql_types::Integer, _>(source_id)
|
||||
@@ -781,11 +781,12 @@ impl KnowledgeDao for SqliteKnowledgeDao {
|
||||
) -> Result<(), DbError> {
|
||||
trace_db_call(cx, "insert", "upsert_photo_link", |_span| {
|
||||
let mut conn = self.connection.lock().expect("KnowledgeDao lock");
|
||||
// INSERT OR IGNORE respects the UNIQUE(entity_id, file_path, role) constraint
|
||||
// INSERT OR IGNORE respects the UNIQUE(entity_id, library_id, rel_path, role) constraint
|
||||
diesel::sql_query(
|
||||
"INSERT OR IGNORE INTO entity_photo_links (entity_id, file_path, role) VALUES (?, ?, ?)"
|
||||
"INSERT OR IGNORE INTO entity_photo_links (entity_id, library_id, rel_path, role) VALUES (?, ?, ?, ?)"
|
||||
)
|
||||
.bind::<diesel::sql_types::Integer, _>(link.entity_id)
|
||||
.bind::<diesel::sql_types::Integer, _>(link.library_id)
|
||||
.bind::<diesel::sql_types::Text, _>(&link.file_path)
|
||||
.bind::<diesel::sql_types::Text, _>(&link.role)
|
||||
.execute(conn.deref_mut())
|
||||
@@ -803,7 +804,7 @@ impl KnowledgeDao for SqliteKnowledgeDao {
|
||||
trace_db_call(cx, "delete", "delete_photo_links_for_file", |_span| {
|
||||
use schema::entity_photo_links::dsl::*;
|
||||
let mut conn = self.connection.lock().expect("KnowledgeDao lock");
|
||||
diesel::delete(entity_photo_links.filter(file_path.eq(file_path_val)))
|
||||
diesel::delete(entity_photo_links.filter(rel_path.eq(file_path_val)))
|
||||
.execute(conn.deref_mut())
|
||||
.map(|_| ())
|
||||
.map_err(|e| anyhow::anyhow!("Delete error: {}", e))
|
||||
@@ -820,7 +821,7 @@ impl KnowledgeDao for SqliteKnowledgeDao {
|
||||
use schema::entity_photo_links::dsl::*;
|
||||
let mut conn = self.connection.lock().expect("KnowledgeDao lock");
|
||||
entity_photo_links
|
||||
.filter(file_path.eq(file_path_val))
|
||||
.filter(rel_path.eq(file_path_val))
|
||||
.load::<EntityPhotoLink>(conn.deref_mut())
|
||||
.map_err(|e| anyhow::anyhow!("Query error: {}", e))
|
||||
})
|
||||
|
||||
@@ -184,7 +184,7 @@ impl FavoriteDao for SqliteFavoriteDao {
|
||||
let mut connection = self.connection.lock().expect("Unable to get FavoriteDao");
|
||||
|
||||
if favorites
|
||||
.filter(userid.eq(user_id).and(path.eq(&favorite_path)))
|
||||
.filter(userid.eq(user_id).and(rel_path.eq(&favorite_path)))
|
||||
.first::<Favorite>(connection.deref_mut())
|
||||
.is_err()
|
||||
{
|
||||
@@ -204,7 +204,7 @@ impl FavoriteDao for SqliteFavoriteDao {
|
||||
use schema::favorites::dsl::*;
|
||||
|
||||
diesel::delete(favorites)
|
||||
.filter(userid.eq(user_id).and(path.eq(favorite_path)))
|
||||
.filter(userid.eq(user_id).and(rel_path.eq(favorite_path)))
|
||||
.execute(self.connection.lock().unwrap().deref_mut())
|
||||
.unwrap();
|
||||
}
|
||||
@@ -221,8 +221,8 @@ impl FavoriteDao for SqliteFavoriteDao {
|
||||
fn update_path(&mut self, old_path: &str, new_path: &str) -> Result<(), DbError> {
|
||||
use schema::favorites::dsl::*;
|
||||
|
||||
diesel::update(favorites.filter(path.eq(old_path)))
|
||||
.set(path.eq(new_path))
|
||||
diesel::update(favorites.filter(rel_path.eq(old_path)))
|
||||
.set(rel_path.eq(new_path))
|
||||
.execute(self.connection.lock().unwrap().deref_mut())
|
||||
.map_err(|_| DbError::new(DbErrorKind::UpdateError))?;
|
||||
Ok(())
|
||||
@@ -232,7 +232,7 @@ impl FavoriteDao for SqliteFavoriteDao {
|
||||
use schema::favorites::dsl::*;
|
||||
|
||||
favorites
|
||||
.select(path)
|
||||
.select(rel_path)
|
||||
.distinct()
|
||||
.load(self.connection.lock().unwrap().deref_mut())
|
||||
.map_err(|_| DbError::new(DbErrorKind::QueryError))
|
||||
@@ -349,7 +349,8 @@ impl ExifDao for SqliteExifDao {
|
||||
.map_err(|_| anyhow::anyhow!("Insert error"))?;
|
||||
|
||||
image_exif
|
||||
.filter(file_path.eq(&exif_data.file_path))
|
||||
.filter(library_id.eq(exif_data.library_id))
|
||||
.filter(rel_path.eq(&exif_data.file_path))
|
||||
.first::<ImageExif>(connection.deref_mut())
|
||||
.map_err(|_| anyhow::anyhow!("Query error"))
|
||||
})
|
||||
@@ -372,7 +373,7 @@ impl ExifDao for SqliteExifDao {
|
||||
let windows_path = path.replace('/', "\\");
|
||||
|
||||
match image_exif
|
||||
.filter(file_path.eq(&normalized).or(file_path.eq(&windows_path)))
|
||||
.filter(rel_path.eq(&normalized).or(rel_path.eq(&windows_path)))
|
||||
.first::<ImageExif>(connection.deref_mut())
|
||||
{
|
||||
Ok(exif) => Ok(Some(exif)),
|
||||
@@ -393,7 +394,11 @@ impl ExifDao for SqliteExifDao {
|
||||
|
||||
let mut connection = self.connection.lock().expect("Unable to get ExifDao");
|
||||
|
||||
diesel::update(image_exif.filter(file_path.eq(&exif_data.file_path)))
|
||||
diesel::update(
|
||||
image_exif
|
||||
.filter(library_id.eq(exif_data.library_id))
|
||||
.filter(rel_path.eq(&exif_data.file_path)),
|
||||
)
|
||||
.set((
|
||||
camera_make.eq(&exif_data.camera_make),
|
||||
camera_model.eq(&exif_data.camera_model),
|
||||
@@ -415,7 +420,8 @@ impl ExifDao for SqliteExifDao {
|
||||
.map_err(|_| anyhow::anyhow!("Update error"))?;
|
||||
|
||||
image_exif
|
||||
.filter(file_path.eq(&exif_data.file_path))
|
||||
.filter(library_id.eq(exif_data.library_id))
|
||||
.filter(rel_path.eq(&exif_data.file_path))
|
||||
.first::<ImageExif>(connection.deref_mut())
|
||||
.map_err(|_| anyhow::anyhow!("Query error"))
|
||||
})
|
||||
@@ -426,7 +432,7 @@ impl ExifDao for SqliteExifDao {
|
||||
trace_db_call(context, "delete", "delete_exif", |_span| {
|
||||
use schema::image_exif::dsl::*;
|
||||
|
||||
diesel::delete(image_exif.filter(file_path.eq(path)))
|
||||
diesel::delete(image_exif.filter(rel_path.eq(path)))
|
||||
.execute(self.connection.lock().unwrap().deref_mut())
|
||||
.map(|_| ())
|
||||
.map_err(|_| anyhow::anyhow!("Delete error"))
|
||||
@@ -444,7 +450,7 @@ impl ExifDao for SqliteExifDao {
|
||||
let mut connection = self.connection.lock().expect("Unable to get ExifDao");
|
||||
|
||||
image_exif
|
||||
.select((file_path, date_taken))
|
||||
.select((rel_path, date_taken))
|
||||
.filter(date_taken.is_not_null())
|
||||
.load::<(String, Option<i64>)>(connection.deref_mut())
|
||||
.map(|records| {
|
||||
@@ -473,7 +479,7 @@ impl ExifDao for SqliteExifDao {
|
||||
let mut connection = self.connection.lock().expect("Unable to get ExifDao");
|
||||
|
||||
image_exif
|
||||
.filter(file_path.eq_any(file_paths))
|
||||
.filter(rel_path.eq_any(file_paths))
|
||||
.load::<ImageExif>(connection.deref_mut())
|
||||
.map_err(|_| anyhow::anyhow!("Query error"))
|
||||
})
|
||||
@@ -572,8 +578,8 @@ impl ExifDao for SqliteExifDao {
|
||||
|
||||
let mut connection = self.connection.lock().expect("Unable to get ExifDao");
|
||||
|
||||
diesel::update(image_exif.filter(file_path.eq(old_path)))
|
||||
.set(file_path.eq(new_path))
|
||||
diesel::update(image_exif.filter(rel_path.eq(old_path)))
|
||||
.set(rel_path.eq(new_path))
|
||||
.execute(connection.deref_mut())
|
||||
.map_err(|_| anyhow::anyhow!("Update error"))?;
|
||||
Ok(())
|
||||
@@ -591,7 +597,7 @@ impl ExifDao for SqliteExifDao {
|
||||
let mut connection = self.connection.lock().expect("Unable to get ExifDao");
|
||||
|
||||
image_exif
|
||||
.select(file_path)
|
||||
.select(rel_path)
|
||||
.load(connection.deref_mut())
|
||||
.map_err(|_| anyhow::anyhow!("Query error"))
|
||||
})
|
||||
@@ -627,7 +633,7 @@ impl ExifDao for SqliteExifDao {
|
||||
// Otherwise filter by path prefix
|
||||
if !base_path.is_empty() && base_path != "/" {
|
||||
// Match base path as prefix (with wildcard)
|
||||
query = query.filter(file_path.like(format!("{}%", base_path)));
|
||||
query = query.filter(rel_path.like(format!("{}%", base_path)));
|
||||
|
||||
span.set_attribute(KeyValue::new("path_filter_applied", true));
|
||||
} else {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::database::schema::{
|
||||
entities, entity_facts, entity_photo_links, favorites, image_exif, photo_insights, users,
|
||||
video_preview_clips,
|
||||
entities, entity_facts, entity_photo_links, favorites, image_exif, libraries, photo_insights,
|
||||
users, video_preview_clips,
|
||||
};
|
||||
use serde::Serialize;
|
||||
|
||||
@@ -23,6 +23,7 @@ pub struct User {
|
||||
#[diesel(table_name = favorites)]
|
||||
pub struct InsertFavorite<'a> {
|
||||
pub userid: &'a i32,
|
||||
#[diesel(column_name = rel_path)]
|
||||
pub path: &'a str,
|
||||
}
|
||||
|
||||
@@ -30,12 +31,15 @@ pub struct InsertFavorite<'a> {
|
||||
pub struct Favorite {
|
||||
pub id: i32,
|
||||
pub userid: i32,
|
||||
#[diesel(column_name = rel_path)]
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
#[derive(Insertable)]
|
||||
#[diesel(table_name = image_exif)]
|
||||
pub struct InsertImageExif {
|
||||
pub library_id: i32,
|
||||
#[diesel(column_name = rel_path)]
|
||||
pub file_path: String,
|
||||
pub camera_make: Option<String>,
|
||||
pub camera_model: Option<String>,
|
||||
@@ -53,11 +57,16 @@ pub struct InsertImageExif {
|
||||
pub date_taken: Option<i64>,
|
||||
pub created_time: i64,
|
||||
pub last_modified: i64,
|
||||
pub content_hash: Option<String>,
|
||||
pub size_bytes: Option<i64>,
|
||||
}
|
||||
|
||||
// Field order matches the post-migration column order in `image_exif`.
|
||||
#[derive(Serialize, Queryable, Clone, Debug)]
|
||||
pub struct ImageExif {
|
||||
pub id: i32,
|
||||
pub library_id: i32,
|
||||
#[diesel(column_name = rel_path)]
|
||||
pub file_path: String,
|
||||
pub camera_make: Option<String>,
|
||||
pub camera_model: Option<String>,
|
||||
@@ -75,11 +84,15 @@ pub struct ImageExif {
|
||||
pub date_taken: Option<i64>,
|
||||
pub created_time: i64,
|
||||
pub last_modified: i64,
|
||||
pub content_hash: Option<String>,
|
||||
pub size_bytes: Option<i64>,
|
||||
}
|
||||
|
||||
#[derive(Insertable)]
|
||||
#[diesel(table_name = photo_insights)]
|
||||
pub struct InsertPhotoInsight {
|
||||
pub library_id: i32,
|
||||
#[diesel(column_name = rel_path)]
|
||||
pub file_path: String,
|
||||
pub title: String,
|
||||
pub summary: String,
|
||||
@@ -92,6 +105,8 @@ pub struct InsertPhotoInsight {
|
||||
#[derive(Serialize, Queryable, Clone, Debug)]
|
||||
pub struct PhotoInsight {
|
||||
pub id: i32,
|
||||
pub library_id: i32,
|
||||
#[diesel(column_name = rel_path)]
|
||||
pub file_path: String,
|
||||
pub title: String,
|
||||
pub summary: String,
|
||||
@@ -102,6 +117,24 @@ pub struct PhotoInsight {
|
||||
pub approved: Option<bool>,
|
||||
}
|
||||
|
||||
// --- Libraries ---
|
||||
|
||||
#[derive(Serialize, Queryable, Clone, Debug)]
|
||||
pub struct LibraryRow {
|
||||
pub id: i32,
|
||||
pub name: String,
|
||||
pub root_path: String,
|
||||
pub created_at: i64,
|
||||
}
|
||||
|
||||
#[derive(Insertable)]
|
||||
#[diesel(table_name = libraries)]
|
||||
pub struct InsertLibrary<'a> {
|
||||
pub name: &'a str,
|
||||
pub root_path: &'a str,
|
||||
pub created_at: i64,
|
||||
}
|
||||
|
||||
// --- Knowledge memory models ---
|
||||
|
||||
#[derive(Insertable)]
|
||||
@@ -162,6 +195,8 @@ pub struct EntityFact {
|
||||
#[diesel(table_name = entity_photo_links)]
|
||||
pub struct InsertEntityPhotoLink {
|
||||
pub entity_id: i32,
|
||||
pub library_id: i32,
|
||||
#[diesel(column_name = rel_path)]
|
||||
pub file_path: String,
|
||||
pub role: String,
|
||||
}
|
||||
@@ -170,6 +205,8 @@ pub struct InsertEntityPhotoLink {
|
||||
pub struct EntityPhotoLink {
|
||||
pub id: i32,
|
||||
pub entity_id: i32,
|
||||
pub library_id: i32,
|
||||
#[diesel(column_name = rel_path)]
|
||||
pub file_path: String,
|
||||
pub role: String,
|
||||
}
|
||||
@@ -177,6 +214,8 @@ pub struct EntityPhotoLink {
|
||||
#[derive(Insertable)]
|
||||
#[diesel(table_name = video_preview_clips)]
|
||||
pub struct InsertVideoPreviewClip {
|
||||
pub library_id: i32,
|
||||
#[diesel(column_name = rel_path)]
|
||||
pub file_path: String,
|
||||
pub status: String,
|
||||
pub created_at: String,
|
||||
@@ -186,6 +225,8 @@ pub struct InsertVideoPreviewClip {
|
||||
#[derive(Serialize, Queryable, Clone, Debug)]
|
||||
pub struct VideoPreviewClip {
|
||||
pub id: i32,
|
||||
pub library_id: i32,
|
||||
#[diesel(column_name = rel_path)]
|
||||
pub file_path: String,
|
||||
pub status: String,
|
||||
pub duration_seconds: Option<f32>,
|
||||
|
||||
@@ -84,6 +84,7 @@ impl PreviewDao for SqlitePreviewDao {
|
||||
|
||||
diesel::insert_or_ignore_into(video_preview_clips)
|
||||
.values(InsertVideoPreviewClip {
|
||||
library_id: 1,
|
||||
file_path: file_path_val.to_string(),
|
||||
status: status_val.to_string(),
|
||||
created_at: now.clone(),
|
||||
@@ -111,7 +112,7 @@ impl PreviewDao for SqlitePreviewDao {
|
||||
let mut connection = self.connection.lock().expect("Unable to get PreviewDao");
|
||||
let now = chrono::Utc::now().to_rfc3339();
|
||||
|
||||
diesel::update(video_preview_clips.filter(file_path.eq(file_path_val)))
|
||||
diesel::update(video_preview_clips.filter(rel_path.eq(file_path_val)))
|
||||
.set((
|
||||
status.eq(status_val),
|
||||
duration_seconds.eq(duration),
|
||||
@@ -137,7 +138,7 @@ impl PreviewDao for SqlitePreviewDao {
|
||||
let mut connection = self.connection.lock().expect("Unable to get PreviewDao");
|
||||
|
||||
match video_preview_clips
|
||||
.filter(file_path.eq(file_path_val))
|
||||
.filter(rel_path.eq(file_path_val))
|
||||
.first::<VideoPreviewClip>(connection.deref_mut())
|
||||
{
|
||||
Ok(clip) => Ok(Some(clip)),
|
||||
@@ -163,7 +164,7 @@ impl PreviewDao for SqlitePreviewDao {
|
||||
let mut connection = self.connection.lock().expect("Unable to get PreviewDao");
|
||||
|
||||
video_preview_clips
|
||||
.filter(file_path.eq_any(file_paths))
|
||||
.filter(rel_path.eq_any(file_paths))
|
||||
.load::<VideoPreviewClip>(connection.deref_mut())
|
||||
.map_err(|e| anyhow::anyhow!("Query error: {}", e))
|
||||
})
|
||||
|
||||
@@ -64,7 +64,8 @@ diesel::table! {
|
||||
entity_photo_links (id) {
|
||||
id -> Integer,
|
||||
entity_id -> Integer,
|
||||
file_path -> Text,
|
||||
library_id -> Integer,
|
||||
rel_path -> Text,
|
||||
role -> Text,
|
||||
}
|
||||
}
|
||||
@@ -73,14 +74,15 @@ diesel::table! {
|
||||
favorites (id) {
|
||||
id -> Integer,
|
||||
userid -> Integer,
|
||||
path -> Text,
|
||||
rel_path -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
image_exif (id) {
|
||||
id -> Integer,
|
||||
file_path -> Text,
|
||||
library_id -> Integer,
|
||||
rel_path -> Text,
|
||||
camera_make -> Nullable<Text>,
|
||||
camera_model -> Nullable<Text>,
|
||||
lens_model -> Nullable<Text>,
|
||||
@@ -97,18 +99,17 @@ diesel::table! {
|
||||
date_taken -> Nullable<BigInt>,
|
||||
created_time -> BigInt,
|
||||
last_modified -> BigInt,
|
||||
content_hash -> Nullable<Text>,
|
||||
size_bytes -> Nullable<BigInt>,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
knowledge_embeddings (id) {
|
||||
libraries (id) {
|
||||
id -> Integer,
|
||||
keyword -> Text,
|
||||
description -> Text,
|
||||
category -> Nullable<Text>,
|
||||
embedding -> Binary,
|
||||
name -> Text,
|
||||
root_path -> Text,
|
||||
created_at -> BigInt,
|
||||
model_version -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -129,23 +130,11 @@ diesel::table! {
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
message_embeddings (id) {
|
||||
id -> Integer,
|
||||
contact -> Text,
|
||||
body -> Text,
|
||||
timestamp -> BigInt,
|
||||
is_sent -> Bool,
|
||||
embedding -> Binary,
|
||||
created_at -> BigInt,
|
||||
model_version -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
photo_insights (id) {
|
||||
id -> Integer,
|
||||
file_path -> Text,
|
||||
library_id -> Integer,
|
||||
rel_path -> Text,
|
||||
title -> Text,
|
||||
summary -> Text,
|
||||
generated_at -> BigInt,
|
||||
@@ -171,7 +160,7 @@ diesel::table! {
|
||||
diesel::table! {
|
||||
tagged_photo (id) {
|
||||
id -> Integer,
|
||||
photo_name -> Text,
|
||||
rel_path -> Text,
|
||||
tag_id -> Integer,
|
||||
created_time -> BigInt,
|
||||
}
|
||||
@@ -196,7 +185,8 @@ diesel::table! {
|
||||
diesel::table! {
|
||||
video_preview_clips (id) {
|
||||
id -> Integer,
|
||||
file_path -> Text,
|
||||
library_id -> Integer,
|
||||
rel_path -> Text,
|
||||
status -> Text,
|
||||
duration_seconds -> Nullable<Float>,
|
||||
file_size_bytes -> Nullable<Integer>,
|
||||
@@ -208,7 +198,11 @@ diesel::table! {
|
||||
|
||||
diesel::joinable!(entity_facts -> photo_insights (source_insight_id));
|
||||
diesel::joinable!(entity_photo_links -> entities (entity_id));
|
||||
diesel::joinable!(entity_photo_links -> libraries (library_id));
|
||||
diesel::joinable!(image_exif -> libraries (library_id));
|
||||
diesel::joinable!(photo_insights -> libraries (library_id));
|
||||
diesel::joinable!(tagged_photo -> tags (tag_id));
|
||||
diesel::joinable!(video_preview_clips -> libraries (library_id));
|
||||
|
||||
diesel::allow_tables_to_appear_in_same_query!(
|
||||
calendar_events,
|
||||
@@ -218,9 +212,8 @@ diesel::allow_tables_to_appear_in_same_query!(
|
||||
entity_photo_links,
|
||||
favorites,
|
||||
image_exif,
|
||||
knowledge_embeddings,
|
||||
libraries,
|
||||
location_history,
|
||||
message_embeddings,
|
||||
photo_insights,
|
||||
search_history,
|
||||
tagged_photo,
|
||||
|
||||
@@ -1212,6 +1212,7 @@ mod tests {
|
||||
// Return a dummy ImageExif for tests
|
||||
Ok(crate::database::models::ImageExif {
|
||||
id: 1,
|
||||
library_id: data.library_id,
|
||||
file_path: data.file_path.to_string(),
|
||||
camera_make: data.camera_make.map(|s| s.to_string()),
|
||||
camera_model: data.camera_model.map(|s| s.to_string()),
|
||||
@@ -1229,6 +1230,8 @@ mod tests {
|
||||
date_taken: data.date_taken,
|
||||
created_time: data.created_time,
|
||||
last_modified: data.last_modified,
|
||||
content_hash: data.content_hash.clone(),
|
||||
size_bytes: data.size_bytes,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1248,6 +1251,7 @@ mod tests {
|
||||
// Return a dummy ImageExif for tests
|
||||
Ok(crate::database::models::ImageExif {
|
||||
id: 1,
|
||||
library_id: data.library_id,
|
||||
file_path: data.file_path.to_string(),
|
||||
camera_make: data.camera_make.map(|s| s.to_string()),
|
||||
camera_model: data.camera_model.map(|s| s.to_string()),
|
||||
@@ -1265,6 +1269,8 @@ mod tests {
|
||||
date_taken: data.date_taken,
|
||||
created_time: data.created_time,
|
||||
last_modified: data.last_modified,
|
||||
content_hash: data.content_hash.clone(),
|
||||
size_bytes: data.size_bytes,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ pub mod exif;
|
||||
pub mod file_types;
|
||||
pub mod files;
|
||||
pub mod geo;
|
||||
pub mod libraries;
|
||||
pub mod memories;
|
||||
pub mod otel;
|
||||
pub mod parsers;
|
||||
|
||||
159
src/libraries.rs
Normal file
159
src/libraries.rs
Normal file
@@ -0,0 +1,159 @@
|
||||
use chrono::Utc;
|
||||
use diesel::prelude::*;
|
||||
use diesel::sqlite::SqliteConnection;
|
||||
use log::{info, warn};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::database::models::{InsertLibrary, LibraryRow};
|
||||
use crate::database::schema::libraries;
|
||||
|
||||
/// Id of the primary library row seeded by the multi-library migration.
|
||||
/// Used as the default `library_id` during the Phase 2 transitional shim,
|
||||
/// before handlers/callers are library-aware.
|
||||
pub const PRIMARY_LIBRARY_ID: i32 = 1;
|
||||
|
||||
/// Placeholder value written into `libraries.root_path` by the migration.
|
||||
/// Replaced on startup with the live `BASE_PATH` env var.
|
||||
pub const ROOT_PATH_PLACEHOLDER: &str = "BASE_PATH_PLACEHOLDER";
|
||||
|
||||
/// A media library mount point: its numeric id, logical name, and absolute
|
||||
/// root on disk. `rel_path` values stored in the DB are relative to this root.
|
||||
#[derive(Clone, Debug, serde::Serialize)]
|
||||
pub struct Library {
|
||||
pub id: i32,
|
||||
pub name: String,
|
||||
pub root_path: String,
|
||||
}
|
||||
|
||||
impl Library {
|
||||
/// Resolve a library-relative path into an absolute `PathBuf` under the
|
||||
/// library root. Does not validate traversal — use `is_valid_full_path`
|
||||
/// for untrusted input.
|
||||
pub fn resolve(&self, rel_path: &str) -> PathBuf {
|
||||
Path::new(&self.root_path).join(rel_path)
|
||||
}
|
||||
|
||||
/// Inverse of `resolve`: given an absolute path under this library's
|
||||
/// root, return the root-relative portion. Returns `None` if the path
|
||||
/// is not under the library.
|
||||
pub fn strip_root(&self, abs_path: &Path) -> Option<String> {
|
||||
abs_path
|
||||
.strip_prefix(&self.root_path)
|
||||
.ok()
|
||||
.map(|p| p.to_string_lossy().replace('\\', "/"))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LibraryRow> for Library {
|
||||
fn from(row: LibraryRow) -> Self {
|
||||
Library {
|
||||
id: row.id,
|
||||
name: row.name,
|
||||
root_path: row.root_path,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Load all library rows from the database into `Library` values.
|
||||
pub fn load_all(conn: &mut SqliteConnection) -> Vec<Library> {
|
||||
libraries::table
|
||||
.order(libraries::id.asc())
|
||||
.load::<LibraryRow>(conn)
|
||||
.unwrap_or_else(|e| {
|
||||
warn!("Failed to load libraries table: {:?}", e);
|
||||
Vec::new()
|
||||
})
|
||||
.into_iter()
|
||||
.map(Library::from)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Ensure at least one library exists and that the seeded placeholder row is
|
||||
/// patched with the live `BASE_PATH`. Safe to call on every startup; it only
|
||||
/// writes when the placeholder is still present.
|
||||
pub fn seed_or_patch_from_env(conn: &mut SqliteConnection, base_path: &str) {
|
||||
// Check whether the primary row still carries the placeholder from the
|
||||
// migration. If so, replace it with the live BASE_PATH.
|
||||
let placeholder_count: i64 = libraries::table
|
||||
.filter(libraries::root_path.eq(ROOT_PATH_PLACEHOLDER))
|
||||
.count()
|
||||
.get_result(conn)
|
||||
.unwrap_or(0);
|
||||
|
||||
if placeholder_count > 0 {
|
||||
diesel::update(libraries::table.filter(libraries::root_path.eq(ROOT_PATH_PLACEHOLDER)))
|
||||
.set(libraries::root_path.eq(base_path))
|
||||
.execute(conn)
|
||||
.map(|rows| {
|
||||
info!(
|
||||
"Patched {} library row(s) with BASE_PATH='{}'",
|
||||
rows, base_path
|
||||
);
|
||||
})
|
||||
.unwrap_or_else(|e| warn!("Failed to patch library root_path: {:?}", e));
|
||||
return;
|
||||
}
|
||||
|
||||
// If no rows exist at all (e.g. table created outside the seeded migration),
|
||||
// insert a primary library pointing at BASE_PATH.
|
||||
let total: i64 = libraries::table
|
||||
.count()
|
||||
.get_result(conn)
|
||||
.unwrap_or(0);
|
||||
if total == 0 {
|
||||
let now = Utc::now().timestamp();
|
||||
let result = diesel::insert_into(libraries::table)
|
||||
.values(InsertLibrary {
|
||||
name: "main",
|
||||
root_path: base_path,
|
||||
created_at: now,
|
||||
})
|
||||
.execute(conn);
|
||||
match result {
|
||||
Ok(_) => info!("Seeded primary library 'main' with BASE_PATH='{}'", base_path),
|
||||
Err(e) => warn!("Failed to seed primary library: {:?}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::database::test::in_memory_db_connection;
|
||||
|
||||
#[test]
|
||||
fn seed_patches_placeholder() {
|
||||
let mut conn = in_memory_db_connection();
|
||||
// Migration seeds one row with the placeholder.
|
||||
seed_or_patch_from_env(&mut conn, "/tmp/media");
|
||||
let libs = load_all(&mut conn);
|
||||
assert_eq!(libs.len(), 1);
|
||||
assert_eq!(libs[0].id, 1);
|
||||
assert_eq!(libs[0].name, "main");
|
||||
assert_eq!(libs[0].root_path, "/tmp/media");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn seed_is_idempotent() {
|
||||
let mut conn = in_memory_db_connection();
|
||||
seed_or_patch_from_env(&mut conn, "/tmp/media");
|
||||
seed_or_patch_from_env(&mut conn, "/tmp/other");
|
||||
// Second call should not overwrite an already-patched row.
|
||||
let libs = load_all(&mut conn);
|
||||
assert_eq!(libs.len(), 1);
|
||||
assert_eq!(libs[0].root_path, "/tmp/media");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn library_strip_root() {
|
||||
let lib = Library {
|
||||
id: 1,
|
||||
name: "main".into(),
|
||||
root_path: "/tmp/media".into(),
|
||||
};
|
||||
let rel = lib.strip_root(Path::new("/tmp/media/2024/photo.jpg"));
|
||||
assert_eq!(rel.as_deref(), Some("2024/photo.jpg"));
|
||||
let outside = lib.strip_root(Path::new("/etc/passwd"));
|
||||
assert!(outside.is_none());
|
||||
}
|
||||
}
|
||||
@@ -62,6 +62,7 @@ mod exif;
|
||||
mod file_types;
|
||||
mod files;
|
||||
mod geo;
|
||||
mod libraries;
|
||||
mod state;
|
||||
mod tags;
|
||||
mod utils;
|
||||
@@ -391,6 +392,7 @@ async fn upload_image(
|
||||
Ok(exif_data) => {
|
||||
let timestamp = Utc::now().timestamp();
|
||||
let insert_exif = InsertImageExif {
|
||||
library_id: crate::libraries::PRIMARY_LIBRARY_ID,
|
||||
file_path: relative_path.clone(),
|
||||
camera_make: exif_data.camera_make,
|
||||
camera_model: exif_data.camera_model,
|
||||
@@ -408,6 +410,8 @@ async fn upload_image(
|
||||
date_taken: exif_data.date_taken,
|
||||
created_time: timestamp,
|
||||
last_modified: timestamp,
|
||||
content_hash: None,
|
||||
size_bytes: None,
|
||||
};
|
||||
|
||||
if let Ok(mut dao) = exif_dao.lock() {
|
||||
@@ -1587,6 +1591,7 @@ fn process_new_files(
|
||||
Ok(exif_data) => {
|
||||
let timestamp = Utc::now().timestamp();
|
||||
let insert_exif = InsertImageExif {
|
||||
library_id: crate::libraries::PRIMARY_LIBRARY_ID,
|
||||
file_path: relative_path.clone(),
|
||||
camera_make: exif_data.camera_make,
|
||||
camera_model: exif_data.camera_model,
|
||||
@@ -1604,6 +1609,8 @@ fn process_new_files(
|
||||
date_taken: exif_data.date_taken,
|
||||
created_time: timestamp,
|
||||
last_modified: timestamp,
|
||||
content_hash: None,
|
||||
size_bytes: None,
|
||||
};
|
||||
|
||||
let mut dao = exif_dao.lock().expect("Unable to lock ExifDao");
|
||||
|
||||
50
src/state.rs
50
src/state.rs
@@ -3,8 +3,10 @@ use crate::database::{
|
||||
CalendarEventDao, DailySummaryDao, ExifDao, InsightDao, KnowledgeDao, LocationHistoryDao,
|
||||
SearchHistoryDao, SqliteCalendarEventDao, SqliteDailySummaryDao, SqliteExifDao,
|
||||
SqliteInsightDao, SqliteKnowledgeDao, SqliteLocationHistoryDao, SqliteSearchHistoryDao,
|
||||
connect,
|
||||
};
|
||||
use crate::database::{PreviewDao, SqlitePreviewDao};
|
||||
use crate::libraries::{self, Library};
|
||||
use crate::tags::{SqliteTagDao, TagDao};
|
||||
use crate::video::actors::{
|
||||
PlaylistGenerator, PreviewClipGenerator, StreamActor, VideoPlaylistManager,
|
||||
@@ -17,6 +19,11 @@ pub struct AppState {
|
||||
pub stream_manager: Arc<Addr<StreamActor>>,
|
||||
pub playlist_manager: Arc<Addr<VideoPlaylistManager>>,
|
||||
pub preview_clip_generator: Arc<Addr<PreviewClipGenerator>>,
|
||||
/// All configured media libraries. Ordered by `id` ascending; the first
|
||||
/// entry is the primary library.
|
||||
pub libraries: Vec<Library>,
|
||||
/// Legacy shim equal to `libraries[0].root_path`. Phase 2 transitional —
|
||||
/// new code should go through `primary_library()`.
|
||||
pub base_path: String,
|
||||
pub thumbnail_path: String,
|
||||
pub video_path: String,
|
||||
@@ -28,10 +35,26 @@ pub struct AppState {
|
||||
pub insight_generator: InsightGenerator,
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
pub fn primary_library(&self) -> &Library {
|
||||
self.libraries
|
||||
.first()
|
||||
.expect("AppState constructed without any libraries")
|
||||
}
|
||||
|
||||
pub fn library_by_id(&self, id: i32) -> Option<&Library> {
|
||||
self.libraries.iter().find(|l| l.id == id)
|
||||
}
|
||||
|
||||
pub fn library_by_name(&self, name: &str) -> Option<&Library> {
|
||||
self.libraries.iter().find(|l| l.name == name)
|
||||
}
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
pub fn new(
|
||||
stream_manager: Arc<Addr<StreamActor>>,
|
||||
base_path: String,
|
||||
libraries_vec: Vec<Library>,
|
||||
thumbnail_path: String,
|
||||
video_path: String,
|
||||
gif_path: String,
|
||||
@@ -42,6 +65,11 @@ impl AppState {
|
||||
insight_generator: InsightGenerator,
|
||||
preview_dao: Arc<Mutex<Box<dyn PreviewDao>>>,
|
||||
) -> Self {
|
||||
assert!(
|
||||
!libraries_vec.is_empty(),
|
||||
"AppState::new requires at least one library"
|
||||
);
|
||||
let base_path = libraries_vec[0].root_path.clone();
|
||||
let playlist_generator = PlaylistGenerator::new();
|
||||
let video_playlist_manager =
|
||||
VideoPlaylistManager::new(video_path.clone(), playlist_generator.start());
|
||||
@@ -53,6 +81,7 @@ impl AppState {
|
||||
stream_manager,
|
||||
playlist_manager: Arc::new(video_playlist_manager.start()),
|
||||
preview_clip_generator: Arc::new(preview_clip_generator.start()),
|
||||
libraries: libraries_vec,
|
||||
base_path,
|
||||
thumbnail_path,
|
||||
video_path,
|
||||
@@ -122,8 +151,16 @@ impl Default for AppState {
|
||||
let knowledge_dao: Arc<Mutex<Box<dyn KnowledgeDao>>> =
|
||||
Arc::new(Mutex::new(Box::new(SqliteKnowledgeDao::new())));
|
||||
|
||||
// Load base path
|
||||
// Load base path and ensure the primary library row reflects it.
|
||||
let base_path = env::var("BASE_PATH").expect("BASE_PATH was not set in the env");
|
||||
let mut seed_conn = connect();
|
||||
libraries::seed_or_patch_from_env(&mut seed_conn, &base_path);
|
||||
let libraries_vec = libraries::load_all(&mut seed_conn);
|
||||
assert!(
|
||||
!libraries_vec.is_empty(),
|
||||
"libraries table is empty after seed_or_patch_from_env"
|
||||
);
|
||||
drop(seed_conn);
|
||||
|
||||
// Initialize InsightGenerator with all data sources
|
||||
let insight_generator = InsightGenerator::new(
|
||||
@@ -148,7 +185,7 @@ impl Default for AppState {
|
||||
|
||||
Self::new(
|
||||
Arc::new(StreamActor {}.start()),
|
||||
base_path,
|
||||
libraries_vec,
|
||||
env::var("THUMBNAILS").expect("THUMBNAILS was not set in the env"),
|
||||
env::var("VIDEO_PATH").expect("VIDEO_PATH was not set in the env"),
|
||||
env::var("GIFS_DIRECTORY").expect("GIFS_DIRECTORY was not set in the env"),
|
||||
@@ -227,9 +264,14 @@ impl AppState {
|
||||
Arc::new(Mutex::new(Box::new(SqlitePreviewDao::new())));
|
||||
|
||||
// Create the AppState with the temporary paths
|
||||
let test_libraries = vec![Library {
|
||||
id: crate::libraries::PRIMARY_LIBRARY_ID,
|
||||
name: "main".to_string(),
|
||||
root_path: base_path_str.clone(),
|
||||
}];
|
||||
AppState::new(
|
||||
Arc::new(StreamActor {}.start()),
|
||||
base_path_str,
|
||||
test_libraries,
|
||||
thumbnail_path.to_string_lossy().to_string(),
|
||||
video_path.to_string_lossy().to_string(),
|
||||
gif_path.to_string_lossy().to_string(),
|
||||
|
||||
46
src/tags.rs
46
src/tags.rs
@@ -254,6 +254,7 @@ pub struct InsertTag {
|
||||
#[diesel(table_name = tagged_photo)]
|
||||
pub struct InsertTaggedPhoto {
|
||||
pub tag_id: i32,
|
||||
#[diesel(column_name = rel_path)]
|
||||
pub photo_name: String,
|
||||
pub created_time: i64,
|
||||
}
|
||||
@@ -263,6 +264,7 @@ pub struct TaggedPhoto {
|
||||
#[allow(dead_code)] // Part of API contract
|
||||
pub id: i32,
|
||||
#[allow(dead_code)] // Part of API contract
|
||||
#[diesel(column_name = rel_path)]
|
||||
pub photo_name: String,
|
||||
#[allow(dead_code)] // Part of API contract
|
||||
pub tag_id: i32,
|
||||
@@ -368,7 +370,7 @@ impl TagDao for SqliteTagDao {
|
||||
.inner_join(tagged_photo::table)
|
||||
.group_by(tags::id)
|
||||
.select((count_star(), id, name, created_time))
|
||||
.filter(tagged_photo::photo_name.like(path))
|
||||
.filter(tagged_photo::rel_path.like(path))
|
||||
.get_results(conn.deref_mut())
|
||||
.map::<Vec<(i64, Tag)>, _>(|tags_with_count: Vec<(i64, i32, String, i64)>| {
|
||||
tags_with_count
|
||||
@@ -404,7 +406,7 @@ impl TagDao for SqliteTagDao {
|
||||
debug!("Getting Tags for path: {:?}", path);
|
||||
tags::table
|
||||
.left_join(tagged_photo::table)
|
||||
.filter(tagged_photo::photo_name.eq(&path))
|
||||
.filter(tagged_photo::rel_path.eq(&path))
|
||||
.select((tags::id, tags::name, tags::created_time))
|
||||
.get_results::<Tag>(conn.deref_mut())
|
||||
.with_context(|| "Unable to get tags from Sqlite")
|
||||
@@ -474,7 +476,7 @@ impl TagDao for SqliteTagDao {
|
||||
diesel::delete(
|
||||
tagged_photo::table
|
||||
.filter(tagged_photo::tag_id.eq(tag.id))
|
||||
.filter(tagged_photo::photo_name.eq(path)),
|
||||
.filter(tagged_photo::rel_path.eq(path)),
|
||||
)
|
||||
.execute(conn.deref_mut())
|
||||
.with_context(|| format!("Unable to delete tag: '{}'", &tag.name))
|
||||
@@ -558,23 +560,23 @@ impl TagDao for SqliteTagDao {
|
||||
let query = sql_query(format!(
|
||||
r#"
|
||||
WITH filtered_photos AS (
|
||||
SELECT photo_name
|
||||
SELECT rel_path
|
||||
FROM tagged_photo tp
|
||||
WHERE tp.tag_id IN ({})
|
||||
AND tp.photo_name NOT IN (
|
||||
SELECT photo_name
|
||||
AND tp.rel_path NOT IN (
|
||||
SELECT rel_path
|
||||
FROM tagged_photo
|
||||
WHERE tag_id IN ({})
|
||||
)
|
||||
GROUP BY photo_name
|
||||
GROUP BY rel_path
|
||||
HAVING COUNT(DISTINCT tag_id) >= {}
|
||||
)
|
||||
SELECT
|
||||
fp.photo_name as file_name,
|
||||
fp.rel_path as file_name,
|
||||
COUNT(DISTINCT tp2.tag_id) as tag_count
|
||||
FROM filtered_photos fp
|
||||
JOIN tagged_photo tp2 ON fp.photo_name = tp2.photo_name
|
||||
GROUP BY fp.photo_name"#,
|
||||
JOIN tagged_photo tp2 ON fp.rel_path = tp2.rel_path
|
||||
GROUP BY fp.rel_path"#,
|
||||
tag_placeholders,
|
||||
exclude_placeholders,
|
||||
tag_ids.len()
|
||||
@@ -618,21 +620,21 @@ impl TagDao for SqliteTagDao {
|
||||
let query = sql_query(format!(
|
||||
r#"
|
||||
WITH filtered_photos AS (
|
||||
SELECT DISTINCT photo_name
|
||||
SELECT DISTINCT rel_path
|
||||
FROM tagged_photo tp
|
||||
WHERE tp.tag_id IN ({})
|
||||
AND tp.photo_name NOT IN (
|
||||
SELECT photo_name
|
||||
AND tp.rel_path NOT IN (
|
||||
SELECT rel_path
|
||||
FROM tagged_photo
|
||||
WHERE tag_id IN ({})
|
||||
)
|
||||
)
|
||||
SELECT
|
||||
fp.photo_name as file_name,
|
||||
fp.rel_path as file_name,
|
||||
COUNT(DISTINCT tp2.tag_id) as tag_count
|
||||
FROM filtered_photos fp
|
||||
JOIN tagged_photo tp2 ON fp.photo_name = tp2.photo_name
|
||||
GROUP BY fp.photo_name"#,
|
||||
JOIN tagged_photo tp2 ON fp.rel_path = tp2.rel_path
|
||||
GROUP BY fp.rel_path"#,
|
||||
tag_placeholders, exclude_placeholders
|
||||
))
|
||||
.into_boxed();
|
||||
@@ -663,8 +665,8 @@ impl TagDao for SqliteTagDao {
|
||||
.connection
|
||||
.lock()
|
||||
.expect("Unable to lock SqliteTagDao connection");
|
||||
diesel::update(tagged_photo.filter(photo_name.eq(old_name)))
|
||||
.set(photo_name.eq(new_name))
|
||||
diesel::update(tagged_photo.filter(rel_path.eq(old_name)))
|
||||
.set(rel_path.eq(new_name))
|
||||
.execute(conn.deref_mut())?;
|
||||
Ok(())
|
||||
}
|
||||
@@ -680,7 +682,7 @@ impl TagDao for SqliteTagDao {
|
||||
.lock()
|
||||
.expect("Unable to lock SqliteTagDao connection");
|
||||
tagged_photo
|
||||
.select(photo_name)
|
||||
.select(rel_path)
|
||||
.distinct()
|
||||
.load(conn.deref_mut())
|
||||
.with_context(|| "Unable to get photo names")
|
||||
@@ -714,10 +716,10 @@ impl TagDao for SqliteTagDao {
|
||||
|
||||
let query_str = format!(
|
||||
r#"
|
||||
SELECT photo_name, COUNT(DISTINCT tag_id) as tag_count
|
||||
SELECT rel_path AS photo_name, COUNT(DISTINCT tag_id) as tag_count
|
||||
FROM tagged_photo
|
||||
WHERE photo_name IN ({})
|
||||
GROUP BY photo_name
|
||||
WHERE rel_path IN ({})
|
||||
GROUP BY rel_path
|
||||
"#,
|
||||
placeholders
|
||||
);
|
||||
|
||||
@@ -98,6 +98,7 @@ impl PreviewDao for TestPreviewDao {
|
||||
file_path_val.to_string(),
|
||||
VideoPreviewClip {
|
||||
id: *id,
|
||||
library_id: crate::libraries::PRIMARY_LIBRARY_ID,
|
||||
file_path: file_path_val.to_string(),
|
||||
status: status_val.to_string(),
|
||||
duration_seconds: None,
|
||||
|
||||
Reference in New Issue
Block a user