I am trying to get snapshots from video data that can be in a MemoryStream OR Byte[] but not located on physical file path. FFMpegCore provide option to use Arguments with PipeSource but not sure how to use it. I have updated code for taking snapshot from Stream as below but it gives
public static async Task<Bitmap> SnapshotAsync(Stream input, IMediaAnalysis source, Size? size = null, TimeSpan? captureTime = null, int? streamIndex = null, int inputFileIndex = 0)
{
input.Seek(0, SeekOrigin.Begin);
FFMpegCore.Pipes.StreamPipeSource streamPipeSource = new FFMpegCore.Pipes.StreamPipeSource(input);
var (arguments, outputOptions) = BuildSnapshotArguments(streamPipeSource, source, size, captureTime, streamIndex, inputFileIndex);
using var ms = new MemoryStream();
await arguments
.OutputToPipe(new StreamPipeSink(ms), options => outputOptions(options
.ForceFormat("rawvideo")))
.ProcessAsynchronously().ConfigureAwait(false);
ms.Position = 0;
return new Bitmap(ms);
}
private static (FFMpegArguments, Action<FFMpegArgumentOptions> outputOptions) BuildSnapshotArguments(
IPipeSource input,
IMediaAnalysis source,
Size? size = null,
TimeSpan? captureTime = null,
int? streamIndex = null,
int inputFileIndex = 0)
{
captureTime ??= TimeSpan.FromSeconds(source.Duration.TotalSeconds / 3);
size = PrepareSnapshotSize(source, size);
streamIndex ??= source.PrimaryVideoStream?.Index
?? source.VideoStreams.FirstOrDefault()?.Index
?? 0;
return (FFMpegArguments
.FromPipeInput(input, options => options
.Seek(captureTime)),
options => options
.SelectStream((int)streamIndex, inputFileIndex)
.WithVideoCodec(VideoCodec.Png)
.WithFrameOutputCount(1)
.Resize(size));
}
private static Size? PrepareSnapshotSize(IMediaAnalysis source, Size? wantedSize)
{
if (wantedSize == null || (wantedSize.Value.Height <= 0 && wantedSize.Value.Width <= 0) || source.PrimaryVideoStream == null)
return null;
var currentSize = new Size(source.PrimaryVideoStream.Width, source.PrimaryVideoStream.Height);
if (source.PrimaryVideoStream.Rotation == 90 || source.PrimaryVideoStream.Rotation == 180)
currentSize = new Size(source.PrimaryVideoStream.Height, source.PrimaryVideoStream.Width);
if (wantedSize.Value.Width != currentSize.Width || wantedSize.Value.Height != currentSize.Height)
{
if (wantedSize.Value.Width <= 0 && wantedSize.Value.Height > 0)
{
var ratio = (double)wantedSize.Value.Height / currentSize.Height;
return new Size((int)(currentSize.Width * ratio), (int)(currentSize.Height * ratio));
}
if (wantedSize.Value.Height <= 0 && wantedSize.Value.Width > 0)
{
var ratio = (double)wantedSize.Value.Width / currentSize.Width;
return new Size((int)(currentSize.Width * ratio), (int)(currentSize.Height * ratio));
}
return wantedSize;
}
return null;
}
it is giving error under SnapShotAsync function at this line
await arguments
.OutputToPipe(new StreamPipeSink(ms), options => outputOptions(options
.ForceFormat("rawvideo")))
.ProcessAsynchronously().ConfigureAwait(false);
here is full error message
ffmpeg exited with non-zero exit-code (1 - ffmpeg version 2021-04-04-git-b1b7cc698b-full_build-www.gyan.dev Copyright (c) 2000-2021 the FFmpeg developers built with gcc 10.2.0 (Rev6, Built by MSYS2 project) configuration: --enable-gpl --enable-version3 --enable-static --disable-w32threads --disable-autodetect --enable-fontconfig --enable-iconv --enable-gnutls --enable-libxml2 --enable-gmp --enable-lzma --enable-libsnappy --enable-zlib --enable-librist --enable-libsrt --enable-libssh --enable-libzmq --enable-avisynth --enable-libbluray --enable-libcaca --enable-sdl2 --enable-libdav1d --enable-libzvbi --enable-librav1e --enable-libsvtav1 --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxvid --enable-libaom --enable-libopenjpeg --enable-libvpx --enable-libass --enable-frei0r --enable-libfreetype --enable-libfribidi --enable-libvidstab --enable-libvmaf --enable-libzimg --enable-amf --enable-cuda-llvm --enable-cuvid --enable-ffnvcodec --enable-nvdec --enable-nvenc --enable-d3d11va --enable-dxva2 --enable-libmfx --enable-libglslang --enable-vulkan --enable-opencl --enable-libcdio --enable-libgme --enable-libmodplug --enable-libopenmpt --enable-libopencore-amrwb --enable-libmp3lame --enable-libshine --enable-libtheora --enable-libtwolame --enable-libvo-amrwbenc --enable-libilbc --enable-libgsm --enable-libopencore-amrnb --enable-libopus --enable-libspeex --enable-libvorbis --enable-ladspa --enable-libbs2b --enable-libflite --enable-libmysofa --enable-librubberband --enable-libsoxr --enable-chromaprint libavutil 56. 72.100 / 56. 72.100 libavcodec 58.135.100 / 58.135.100 libavformat 58. 77.100 / 58. 77.100 libavdevice 58. 14.100 / 58. 14.100 libavfilter 7.111.100 / 7.111.100 libswscale 5. 10.100 / 5. 10.100 libswresample 3. 10.100 / 3. 10.100 libpostproc 55. 10.100 / 55. 10.100 [mov,mp4,m4a,3gp,3g2,mj2 @ 000001c78845f040] Could not find codec parameters for stream 0 (Video: h264 (avc1 / 0x31637661), none, 1280x720, 4716 kb/s): unspecified pixel format Consider increasing the value for the 'analyzeduration' (0) and 'probesize' (5000000) options Input #0, mov,mp4,m4a,3gp,3g2,mj2, from '\.\pipe\FFMpegCore_4599336d-fbf8-430e-ab89-19082c7d3693': Metadata: major_brand : mp42 minor_version : 0 compatible_brands: mp41isom creation_time : 2021-11-17T11:53:33.000000Z Duration: 00:00:03.62, start: 0.000000, bitrate: N/A Stream #0:0(und): Video: h264 (avc1 / 0x31637661), none, 1280x720, 4716 kb/s, 15.20 fps, 15.08 tbr, 30k tbn, 60k tbc (default) Metadata: creation_time : 2021-11-17T11:53:33.000000Z handler_name : VideoHandler vendor_id : [0][0][0][0] encoder : AVC Coding Stream #0:1(und): Audio: aac (mp4a / 0x6134706D), 48000 Hz, stereo, fltp, 170 kb/s (default) Metadata: creation_time : 2021-11-17T11:53:33.000000Z handler_name : SoundHandler vendor_id : [0][0][0][0] Stream mapping: Stream #0:0 -> #0:0 (h264 (native) -> png (native)) Press [q] to stop, [?] for help \.\pipe\FFMpegCore_4599336d-fbf8-430e-ab89-19082c7d3693: Invalid argument Cannot determine format of input stream 0:0 after EOF Error marking filters as finished Conversion failed!)
I am just passing a video file
using (Stream fms = File.OpenRead(txtVideoPath.Text))
{
fms.Seek(0, SeekOrigin.Begin);
using (MemoryStream vms = new MemoryStream())
{
fms.CopyTo(vms);
vms.Seek(0, SeekOrigin.Begin);
--pass vms to snapshot function
....
}
}
is there any way to get snapshot from stream without storing file physically ?