1
0
mirror of https://github.com/osmarks/meme-search-engine.git synced 2024-11-13 23:34:49 +00:00

misc fixes

- thumbnails/OCR off was broken
- problematic video files caused segfaults (I blame ffmpeg for this)
This commit is contained in:
osmarks 2024-05-30 19:05:54 +01:00
parent 3257521068
commit 747058e254
4 changed files with 33 additions and 27 deletions

View File

@ -140,7 +140,7 @@
<div class="result">
<a href={util.getURL(result)}>
{#if util.hasFormat(results, result, "VIDEO")}
<video controls poster={util.thumbnailURL(results, result, "jpegh")} preload="metadata" on:loadstart={updateCounter} on:loadedmetadata={redrawGrid}>
<video controls poster={util.hasFormat("jpegh") ? util.thumbnailURL(results, result, "jpegh") : null} preload="metadata" on:loadstart={updateCounter} on:loadedmetadata={redrawGrid} on:loadeddata={redrawGrid}>
<source src={util.getURL(result)} />
</video>
{:else}

View File

@ -7,7 +7,7 @@ esbuild
.build({
entryPoints: [path.join(__dirname, "app.js")],
bundle: true,
minify: false,
minify: true,
outfile: path.join(__dirname, "../static/app.js"),
plugins: [sveltePlugin({
preprocess: {

View File

@ -14,7 +14,6 @@ use axum::{
http::StatusCode
};
use common::resize_for_embed_sync;
use ffmpeg_the_third::device::input::video;
use image::RgbImage;
use image::{imageops::FilterType, io::Reader as ImageReader, DynamicImage, ImageFormat};
use reqwest::Client;
@ -343,12 +342,14 @@ async fn ingest_files(config: Arc<WConfig>) -> Result<()> {
image: embed_buf,
filename: Filename::VideoFrame(filename.clone(), i)
})?;
to_thumbnail_tx.blocking_send(LoadedImage {
image: frame.clone(),
filename: Filename::VideoFrame(filename.clone(), i),
original_size: None,
fast_thumbnails_only: true
})?;
if config.service.enable_thumbs {
to_thumbnail_tx.blocking_send(LoadedImage {
image: frame.clone(),
filename: Filename::VideoFrame(filename.clone(), i),
original_size: None,
fast_thumbnails_only: true
})?;
}
i += 1;
Ok(())
};
@ -378,23 +379,23 @@ async fn ingest_files(config: Arc<WConfig>) -> Result<()> {
}
if record.needs_thumbnail {
to_thumbnail_tx
.send(LoadedImage {
image: image.clone(),
filename: Filename::Actual(record.filename.clone()),
original_size: Some(std::fs::metadata(&path)?.len() as usize),
fast_thumbnails_only: false
})
.await?;
.send(LoadedImage {
image: image.clone(),
filename: Filename::Actual(record.filename.clone()),
original_size: Some(std::fs::metadata(&path)?.len() as usize),
fast_thumbnails_only: false
})
.await?;
}
if record.needs_ocr {
to_ocr_tx
.send(LoadedImage {
image,
filename: Filename::Actual(record.filename.clone()),
original_size: None,
fast_thumbnails_only: true
})
.await?;
.send(LoadedImage {
image,
filename: Filename::Actual(record.filename.clone()),
original_size: None,
fast_thumbnails_only: true
})
.await?;
}
Ok(())
}
@ -634,8 +635,8 @@ async fn ingest_files(config: Arc<WConfig>) -> Result<()> {
None => Some(FileRecord {
filename: filename.clone(),
needs_embed: true,
needs_ocr: true,
needs_thumbnail: true
needs_ocr: config.service.enable_ocr,
needs_thumbnail: config.service.enable_thumbs
}),
Some(r) => {
let needs_embed = modtime > r.embedding_time.unwrap_or(i64::MIN);

View File

@ -2,7 +2,7 @@ extern crate ffmpeg_the_third as ffmpeg;
use anyhow::{Result, Context};
use image::RgbImage;
use std::env;
use ffmpeg::{codec, filter, format::{self, Pixel}, media::Type, util::frame::video::Video};
use ffmpeg::{codec, filter, format::{self, Pixel}, media::Type, util::frame::video::Video, software::scaling};
const BYTES_PER_PIXEL: usize = 3;
@ -20,7 +20,11 @@ pub fn run<P: AsRef<std::path::Path>, F: FnMut(RgbImage) -> Result<()>>(path: P,
graph.add(&filter::find("buffer").unwrap(), "in",
&format!("video_size={}x{}:pix_fmt={}:time_base={}/{}:pixel_aspect={}/{}", decoder.width(), decoder.height(), decoder.format().descriptor().unwrap().name(), video.time_base().0, video.time_base().1, decoder.aspect_ratio().0, decoder.aspect_ratio().1))?;
graph.add(&filter::find("buffersink").unwrap(), "out", "")?;
graph.output("in", 0)?.input("out", 0)?.parse(&format!("[in] thumbnail=n={}:log=verbose [thumbs]; [thumbs] select='gt(scene,0.05)+eq(n,0)' [out]", afr)).context("filtergraph parse failed")?;
// I don't know exactly where, but some of my videos apparently have the size vary throughout them.
// This causes horrible segfaults somewhere.
// Rescale to initial width to fix this. We could do this with a separate swscaler but this is easier.
let filterspec = format!("[in] scale={}:{} [scaled]; [scaled] thumbnail=n={}:log=verbose [thumbs]; [thumbs] select='gt(scene,0.05)+eq(n,0)' [out]", decoder.width(), decoder.height(), afr);
graph.output("in", 0)?.input("out", 0)?.parse(&filterspec).context("filtergraph parse failed")?;
let mut out = graph.get("out").unwrap();
out.set_pixel_format(Pixel::RGB24);
@ -33,6 +37,7 @@ pub fn run<P: AsRef<std::path::Path>, F: FnMut(RgbImage) -> Result<()>>(path: P,
if !decoder.receive_frame(&mut decoded).is_ok() { break }
let mut in_ctx = filter_graph.get("in").unwrap();
// The filters really do not like
let mut src = in_ctx.source();
src.add(&decoded).context("add frame")?;