diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/QuickLook.Plugin.VideoViewer.csproj b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/QuickLook.Plugin.VideoViewer.csproj index 8b96fe0..4507bf3 100644 --- a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/QuickLook.Plugin.VideoViewer.csproj +++ b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/QuickLook.Plugin.VideoViewer.csproj @@ -59,8 +59,14 @@ key.snk - - References\ffme.dll + + ffme\ffme.common.dll + + + ffme\ffme.win.dll + + + ffme\FFmpeg.AutoGen.dll ..\..\packages\taglib.2.1.0.0\lib\policy.2.0.taglib-sharp.dll @@ -138,6 +144,9 @@ PreserveNewest + + PreserveNewest + PreserveNewest @@ -165,6 +174,9 @@ PreserveNewest + + PreserveNewest + PreserveNewest diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/References/FFmpeg.AutoGen.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/References/FFmpeg.AutoGen.dll deleted file mode 100644 index ca3a6d7..0000000 Binary files a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/References/FFmpeg.AutoGen.dll and /dev/null differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/References/ffme.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/References/ffme.dll deleted file mode 100644 index 4f6517f..0000000 Binary files a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/References/ffme.dll and /dev/null differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ViewerPanel.xaml b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ViewerPanel.xaml index 6b2b26c..e53c9b7 100644 --- a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ViewerPanel.xaml +++ b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ViewerPanel.xaml @@ -3,7 +3,7 @@ xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml" xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006" xmlns:d="http://schemas.microsoft.com/expression/blend/2008" - xmlns:ffme="clr-namespace:Unosquare.FFME;assembly=ffme" + xmlns:ffme="clr-namespace:Unosquare.FFME;assembly=ffme.win" xmlns:local="clr-namespace:QuickLook.Plugin.VideoViewer" xmlns:glassLayer="clr-namespace:QuickLook.Controls.GlassLayer;assembly=QuickLook" mc:Ignorable="d" diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ViewerPanel.xaml.cs b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ViewerPanel.xaml.cs index 3b05cda..81f2c5e 100644 --- a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ViewerPanel.xaml.cs +++ b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ViewerPanel.xaml.cs @@ -74,7 +74,7 @@ namespace QuickLook.Plugin.VideoViewer { if (_wasPlaying) mediaElement.Play(); }; - mediaElement.MediaFailed += ShowErrorNotification; + //mediaElement.MediaFailed += ShowErrorNotification; mediaElement.MediaOpening += (sender, e) => e.Options.EnableHardwareAcceleration = true; /*mediaElement.MediaEnded += (s, e) => { diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffme/FFmpeg.AutoGen.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffme/FFmpeg.AutoGen.dll new file mode 100644 index 0000000..f3b402a Binary files /dev/null and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffme/FFmpeg.AutoGen.dll differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/References/FFmpeg.AutoGen.xml b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffme/FFmpeg.AutoGen.xml similarity index 75% rename from QuickLook.Plugin/QuickLook.Plugin.VideoViewer/References/FFmpeg.AutoGen.xml rename to QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffme/FFmpeg.AutoGen.xml index 97d17fc..9b80a8c 100644 --- a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/References/FFmpeg.AutoGen.xml +++ b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffme/FFmpeg.AutoGen.xml @@ -4,1253 +4,11 @@ FFmpeg.AutoGen - - Message types used by avdevice_app_to_dev_control_message(). - - - Dummy message. - - - Window size change message. - - - Repaint request message. - - - Request pause/play. - - - Request pause/play. - - - Request pause/play. - - - Volume control message. - - - Mute control messages. - - - Mute control messages. - - - Mute control messages. - - - Get volume/mute messages. - - - Get volume/mute messages. - - - Not part of ABI - - - Location of chroma samples. - - - MPEG-2/4 4:2:0, H.264 default for 4:2:0 - - - MPEG-1 4:2:0, JPEG 4:2:0, H.263 4:2:0 - - - ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2 - - - Not part of ABI - - - not part of ABI/API - - - Identify the syntax and semantics of the bitstream. The principle is roughly: Two decoders with the same ID can decode the same streams. Two encoders with the same ID can encode compatible streams. There may be slight deviations from the principle due to implementation details. - - - preferred ID for MPEG-1/2 video decoding - - - A dummy id pointing at the start of audio codecs - - - preferred ID for decoding MPEG audio layer 1, 2 or 3 - - - as in Berlin toast format - - - A dummy ID pointing at the start of subtitle codecs. - - - raw UTF-8 text - - - A dummy ID pointing at the start of various fake codecs. - - - Contain timestamp estimated through PCR of program stream. - - - codec_id is not known (like AV_CODEC_ID_NONE) but lavf should attempt to identify it - - - _FAKE_ codec to indicate a raw MPEG-2 TS stream (only used by libavformat) - - - _FAKE_ codec to indicate a MPEG-4 Systems stream (only used by libavformat) - - - Dummy codec for streams containing only metadata information. - - - Passthrough codec, AVFrames wrapped in AVPacket - - - Chromaticity coordinates of the source primaries. - - - also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP177 Annex B - - - also FCC Title 47 Code of Federal Regulations 73.682 (a)(20) - - - also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM - - - also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC - - - functionally identical to above - - - colour filters using Illuminant C - - - ITU-R BT2020 - - - SMPTE ST 428-1 (CIE 1931 XYZ) - - - SMPTE ST 431-2 (2011) / DCI P3 - - - SMPTE ST 432-1 (2010) / P3 D65 / Display P3 - - - JEDEC P22 phosphors - - - Not part of ABI - - - MPEG vs JPEG YUV range. - - - the normal 219*2^(n-8) "MPEG" YUV ranges - - - the normal 2^n-1 "JPEG" YUV ranges - - - Not part of ABI - - - YUV colorspace type. - - - order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB) - - - also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / SMPTE RP177 Annex B - - - FCC Title 47 Code of Federal Regulations 73.682 (a)(20) - - - also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601 - - - also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC - - - functionally identical to above - - - Used by Dirac / VC-2 and H.264 FRext, see ITU-T SG16 - - - ITU-R BT2020 non-constant luminance system - - - ITU-R BT2020 constant luminance system - - - SMPTE 2085, Y'D'zD'x - - - Not part of ABI - - - Color Transfer Characteristic. - - - also ITU-R BT1361 - - - also ITU-R BT470M / ITU-R BT1700 625 PAL & SECAM - - - also ITU-R BT470BG - - - also ITU-R BT601-6 525 or 625 / ITU-R BT1358 525 or 625 / ITU-R BT1700 NTSC - - - "Linear transfer characteristics" - - - "Logarithmic transfer characteristic (100:1 range)" - - - "Logarithmic transfer characteristic (100 * Sqrt(10) : 1 range)" - - - IEC 61966-2-4 - - - ITU-R BT1361 Extended Colour Gamut - - - IEC 61966-2-1 (sRGB or sYCC) - - - ITU-R BT2020 for 10-bit system - - - ITU-R BT2020 for 12-bit system - - - SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems - - - SMPTE ST 428-1 - - - ARIB STD-B67, known as "Hybrid log-gamma" - - - Not part of ABI - - - Message types used by avdevice_dev_to_app_control_message(). - - - Dummy message. - - - Create window buffer message. - - - Prepare window buffer message. - - - Display window buffer message. - - - Destroy window buffer message. - - - Buffer fullness status messages. - - - Buffer fullness status messages. - - - Buffer readable/writable. - - - Buffer readable/writable. - - - Mute state change message. - - - Volume level change message. - - - discard nothing - - - discard useless packets like 0 size packets in avi - - - discard all non reference - - - discard all bidirectional frames - - - discard all non intra frames - - - discard all frames except keyframes - - - discard all - - - The duration of a video can be estimated through various ways, and this enum can be used to know how the duration was estimated. - - - Duration accurately estimated from PTSes - - - Duration estimated from a stream with a known duration - - - Duration estimated from bitrate (less accurate) - - - stage of the initialization of the link properties (dimensions, etc) - - - not started - - - started, but incomplete - - - complete - - - @{ AVFrame is an abstraction for reference-counted raw multimedia data. - - - The data is the AVPanScan struct defined in libavcodec. - - - ATSC A53 Part 4 Closed Captions. A53 CC bitstream is stored as uint8_t in AVFrameSideData.data. The number of bytes of CC data is AVFrameSideData.size. - - - Stereoscopic 3d metadata. The data is the AVStereo3D struct defined in libavutil/stereo3d.h. - - - The data is the AVMatrixEncoding enum defined in libavutil/channel_layout.h. - - - Metadata relevant to a downmix procedure. The data is the AVDownmixInfo struct defined in libavutil/downmix_info.h. - - - ReplayGain information in the form of the AVReplayGain struct. - - - This side data contains a 3x3 transformation matrix describing an affine transformation that needs to be applied to the frame for correct presentation. - - - Active Format Description data consisting of a single byte as specified in ETSI TS 101 154 using AVActiveFormatDescription enum. - - - Motion vectors exported by some codecs (on demand through the export_mvs flag set in the libavcodec AVCodecContext flags2 option). The data is the AVMotionVector struct defined in libavutil/motion_vector.h. - - - Recommmends skipping the specified number of samples. This is exported only if the "skip_manual" AVOption is set in libavcodec. This has the same format as AV_PKT_DATA_SKIP_SAMPLES. - - - This side data must be associated with an audio frame and corresponds to enum AVAudioServiceType defined in avcodec.h. - - - Mastering display metadata associated with a video frame. The payload is an AVMasteringDisplayMetadata type and contains information about the mastering display color volume. - - - The GOP timecode in 25 bit timecode format. Data format is 64-bit integer. This is set on the first frame of a GOP that has a temporal reference of 0. - - - The data represents the AVSphericalMapping structure defined in libavutil/spherical.h. - - - Transfer the data from the queried hw frame. - - - Transfer the data to the queried hw frame. - - - Different data types that can be returned via the AVIO write_data_type callback. - - - Header data; this needs to be present for the stream to be decodeable. - - - A point in the output bytestream where a decoder can start decoding (i.e. a keyframe). A demuxer/decoder given the data flagged with AVIO_DATA_MARKER_HEADER, followed by any AVIO_DATA_MARKER_SYNC_POINT, should give decodeable results. - - - A point in the output bytestream where a demuxer can start parsing (for non self synchronizing bytestream formats). That is, any non-keyframe packet start point. - - - This is any, unlabelled data. It can either be a muxer not marking any positions at all, it can be an actual boundary/sync point that the muxer chooses not to mark, or a later part of a packet/fragment that is cut into multiple write callbacks due to limited IO buffer size. - - - Trailer data, which doesn't contain actual content, but only for finalizing the output file. - - - Directory entry types. - - - Lock operation used by lockmgr - - - Create a mutex - - - Lock the mutex - - - Unlock the mutex - - - Free mutex resources - - - Media Type - - - Usually treated as AVMEDIA_TYPE_DATA - - - Opaque data information usually continuous - - - Opaque data information usually sparse - - - @{ AVOptions provide a generic system to declare options on arbitrary structs ("objects"). An option can have a help text, a type and a range of possible values. Options may then be enumerated, read and written to. - - - offset must point to a pointer immediately followed by an int for the length - - - offset must point to two consecutive integers - - - offset must point to AVRational - - - Types and functions for working with AVPacket. @{ - - - An AV_PKT_DATA_PALETTE side data packet contains exactly AVPALETTE_SIZE bytes worth of palette. This side data signals that a new palette is present. - - - The AV_PKT_DATA_NEW_EXTRADATA is used to notify the codec or the format that the extradata buffer was changed and the receiving side should act upon it appropriately. The new extradata is embedded in the side data buffer and should be immediately used for processing the current frame or packet. - - - An AV_PKT_DATA_PARAM_CHANGE side data packet is laid out as follows: - - - An AV_PKT_DATA_H263_MB_INFO side data packet contains a number of structures with info about macroblocks relevant to splitting the packet into smaller packets on macroblock edges (e.g. as for RFC 2190). That is, it does not necessarily contain info about all macroblocks, as long as the distance between macroblocks in the info is smaller than the target payload size. Each MB info structure is 12 bytes, and is laid out as follows: - - - This side data should be associated with an audio stream and contains ReplayGain information in form of the AVReplayGain struct. - - - This side data contains a 3x3 transformation matrix describing an affine transformation that needs to be applied to the decoded video frames for correct presentation. - - - This side data should be associated with a video stream and contains Stereoscopic 3D information in form of the AVStereo3D struct. - - - This side data should be associated with an audio stream and corresponds to enum AVAudioServiceType. - - - This side data contains quality related information from the encoder. - - - This side data contains an integer value representing the stream index of a "fallback" track. A fallback track indicates an alternate track to use when the current track can not be decoded for some reason. e.g. no decoder available for codec. - - - This side data corresponds to the AVCPBProperties struct. - - - Recommmends skipping the specified number of samples - - - An AV_PKT_DATA_JP_DUALMONO side data packet indicates that the packet may contain "dual mono" audio specific to Japanese DTV and if it is true, recommends only the selected channel to be used. - - - A list of zero terminated key/value strings. There is no end marker for the list, so it is required to rely on the side data size to stop. - - - Subtitle event position - - - Data found in BlockAdditional element of matroska container. There is no end marker for the data, so it is required to rely on the side data size to recognize the end. 8 byte id (as found in BlockAddId) followed by data. - - - The optional first identifier line of a WebVTT cue. - - - The optional settings (rendering instructions) that immediately follow the timestamp specifier of a WebVTT cue. - - - A list of zero terminated key/value strings. There is no end marker for the list, so it is required to rely on the side data size to stop. This side data includes updated metadata which appeared in the stream. - - - MPEGTS stream ID, this is required to pass the stream ID information from the demuxer to the corresponding muxer. - - - Mastering display metadata (based on SMPTE-2086:2014). This metadata should be associated with a video stream and containts data in the form of the AVMasteringDisplayMetadata struct. - - - This side data should be associated with a video stream and corresponds to the AVSphericalMapping structure. - - - The number of side data elements (in fact a bit more than it). This is not part of the public API/ABI in the sense that it may change when new side data types are added. This must stay the last enum value. If its value becomes huge, some code using it needs to be updated as it assumes it to be smaller than other limits. - - - @{ - - - @} @} - - - Undefined - - - Intra - - - Predicted - - - Bi-dir predicted - - - S(GMC)-VOP MPEG-4 - - - Switching Intra - - - Switching Predicted - - - BI type - - - Pixel format. - - - planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples) - - - packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr - - - packed RGB 8:8:8, 24bpp, RGBRGB... - - - packed RGB 8:8:8, 24bpp, BGRBGR... - - - planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples) - - - planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples) - - - planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples) - - - planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) - - - Y , 8bpp - - - Y , 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb to the lsb - - - Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb - - - 8 bits with AV_PIX_FMT_RGB32 palette - - - planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting color_range - - - planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting color_range - - - planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting color_range - - - XVideo Motion Acceleration via common packet passing - - - packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1 - - - packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3 - - - packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb) - - - packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits - - - packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb) - - - packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb) - - - packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits - - - packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb) - - - planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V) - - - as above, but U and V bytes are swapped - - - packed ARGB 8:8:8:8, 32bpp, ARGBARGB... - - - packed RGBA 8:8:8:8, 32bpp, RGBARGBA... - - - packed ABGR 8:8:8:8, 32bpp, ABGRABGR... - - - packed BGRA 8:8:8:8, 32bpp, BGRABGRA... - - - Y , 16bpp, big-endian - - - Y , 16bpp, little-endian - - - planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples) - - - planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range - - - planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples) - - - H.264 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers - - - MPEG-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers - - - MPEG-2 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers - - - WMV3 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers - - - VC-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers - - - packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big-endian - - - packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as little-endian - - - packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian - - - packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian - - - packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), big-endian , X=unused/undefined - - - packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), little-endian, X=unused/undefined - - - packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), big-endian - - - packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), little-endian - - - packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), big-endian , X=unused/undefined - - - packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), little-endian, X=unused/undefined - - - HW acceleration through VA API at motion compensation entry-point, Picture.data[3] contains a vaapi_render_state struct which contains macroblocks as well as various fields extracted from headers - - - HW acceleration through VA API at IDCT entry-point, Picture.data[3] contains a vaapi_render_state struct which contains fields extracted from headers - - - HW decoding through VA API, Picture.data[3] contains a VASurfaceID - - - @} - - - planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian - - - planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian - - - planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian - - - planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian - - - planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian - - - planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian - - - MPEG-4 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers - - - HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer - - - packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), little-endian, X=unused/undefined - - - packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), big-endian, X=unused/undefined - - - packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), little-endian, X=unused/undefined - - - packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), big-endian, X=unused/undefined - - - 8 bits gray, 8 bits alpha - - - alias for AV_PIX_FMT_YA8 - - - alias for AV_PIX_FMT_YA8 - - - packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big-endian - - - packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as little-endian - - - planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian - - - planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian - - - planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian - - - planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian - - - planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian - - - planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian - - - planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian - - - planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian - - - planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian - - - planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian - - - planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian - - - planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian - - - hardware decoding through VDA - - - planar GBR 4:4:4 24bpp - - - planar GBR 4:4:4 27bpp, big-endian - - - planar GBR 4:4:4 27bpp, little-endian - - - planar GBR 4:4:4 30bpp, big-endian - - - planar GBR 4:4:4 30bpp, little-endian - - - planar GBR 4:4:4 48bpp, big-endian - - - planar GBR 4:4:4 48bpp, little-endian - - - planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples) - - - planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples) - - - planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), big-endian - - - planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), little-endian - - - planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), big-endian - - - planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), little-endian - - - planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), big-endian - - - planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), little-endian - - - planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian) - - - planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian) - - - planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian) - - - planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian) - - - planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian) - - - planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian) - - - planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian) - - - planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian) - - - planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian) - - - planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian) - - - planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian) - - - planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian) - - - HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface - - - packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as little-endian, the 4 lower bits are set to 0 - - - packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as big-endian, the 4 lower bits are set to 0 - - - interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples) - - - interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian - - - interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian - - - packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian - - - packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian - - - packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian - - - packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian - - - packed YUV 4:2:2, 16bpp, Y0 Cr Y1 Cb - - - HW acceleration through VDA, data[3] contains a CVPixelBufferRef - - - 16 bits gray, 16 bits alpha (big-endian) - - - 16 bits gray, 16 bits alpha (little-endian) - - - planar GBRA 4:4:4:4 32bpp - - - planar GBRA 4:4:4:4 64bpp, big-endian - - - planar GBRA 4:4:4:4 64bpp, little-endian - - - HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure. - - - HW acceleration though MMAL, data[3] contains a pointer to the MMAL_BUFFER_HEADER_T structure. - - - HW decoding through Direct3D11, Picture.data[3] contains a ID3D11VideoDecoderOutputView pointer - - - HW acceleration through CUDA. data[i] contain CUdeviceptr pointers exactly as for system memory frames. - - - packed RGB 8:8:8, 32bpp, XRGBXRGB... X=unused/undefined - - - packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined - - - packed BGR 8:8:8, 32bpp, XBGRXBGR... X=unused/undefined - - - packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined - - - planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian - - - planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian - - - planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian - - - planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian - - - planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian - - - planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian - - - planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian - - - planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian - - - planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian - - - planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian - - - planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian - - - planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian - - - planar GBR 4:4:4 36bpp, big-endian - - - planar GBR 4:4:4 36bpp, little-endian - - - planar GBR 4:4:4 42bpp, big-endian - - - planar GBR 4:4:4 42bpp, little-endian - - - planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV411P and setting color_range - - - bayer, BGBG..(odd line), GRGR..(even line), 8-bit samples */ - - - bayer, RGRG..(odd line), GBGB..(even line), 8-bit samples */ - - - bayer, GBGB..(odd line), RGRG..(even line), 8-bit samples */ - - - bayer, GRGR..(odd line), BGBG..(even line), 8-bit samples */ - - - bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, little-endian */ - - - bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, big-endian */ - - - bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, little-endian */ - - - bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, big-endian */ - - - bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, little-endian */ - - - bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, big-endian */ - - - bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, little-endian */ - - - bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, big-endian */ - - - planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian - - - planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian - - - planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian - - - planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian - - - packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), little-endian - - - packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), big-endian - - - hardware decoding through Videotoolbox - - - like NV12, with 10bpp per component, data in the high bits, zeros in the low bits, little-endian - - - like NV12, with 10bpp per component, data in the high bits, zeros in the low bits, big-endian - - - planar GBR 4:4:4:4 48bpp, big-endian - - - planar GBR 4:4:4:4 48bpp, little-endian - - - planar GBR 4:4:4:4 40bpp, big-endian - - - planar GBR 4:4:4:4 40bpp, little-endian - - - hardware decoding through MediaCodec - - - Y , 12bpp, big-endian - - - Y , 12bpp, little-endian - - - Y , 10bpp, big-endian - - - Y , 10bpp, little-endian - - - like NV12, with 16bpp per component, little-endian - - - like NV12, with 16bpp per component, big-endian - - - number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of formats might differ between versions - - - Rounding methods. - - - Round toward zero. - - - Round away from zero. - - - Round toward -infinity. - - - Round toward +infinity. - - - Round to nearest and halfway cases away from zero. - - - Flag telling rescaling functions to pass `INT64_MIN`/`MAX` through unchanged, avoiding special cases for #AV_NOPTS_VALUE. - - - Audio sample formats - - - unsigned 8 bits - - - signed 16 bits - - - signed 32 bits - - - float - - - double - - - unsigned 8 bits, planar - - - signed 16 bits, planar - - - signed 32 bits, planar - - - float, planar - - - double, planar - - - signed 64 bits - - - signed 64 bits, planar - - - Number of sample formats. DO NOT USE if linking dynamically - - - @} - - - full parsing and repack - - - Only parse headers, do not repack. - - - full parsing and interpolation of timestamps for frames not starting on a packet boundary - - - full parsing and repack of the first frame only, only implemented for H.264 currently - - - full parsing and repack with timestamp and position generation by parser for raw this assumes that each packet in the file contains no demuxer level headers and just codec level data, otherwise position generation would fail - - - A bitmap, pict will be set - - - Plain text, the text field must be set by the decoder and is authoritative. ass and pict fields may contain approximations. - - - Formatted text, the ass field must be set by the decoder and is authoritative. pict and text fields may contain approximations. - - - timecode is drop frame - - - timecode wraps after 24 hours - - - negative time values are allowed - - - no search, that is use 0,0 vector whenever one is needed - - - enhanced predictive zonal search - - - reserved for experiments - - - hexagon based search - - - uneven multi-hexagon search - - - transformed exhaustive search algorithm - - - iterative search - - - Dithering algorithms - - - not part of API/ABI - - - not part of API/ABI - - - Resampling Engines - - - SW Resampler - - - SoX Resampler - - - not part of API/ABI - - - Resampling Filter Types - - - Cubic - - - Blackman Nuttall windowed sinc - - - Kaiser windowed sinc + + + Gets or sets the root path for loading libraries. + + The root path. Free resample context. @@ -1350,7 +108,7 @@ Submit a packet for filtering. - the packet to filter. pkt must contain some payload (i.e data or side data must be present in pkt). The bitstream filter will take ownership of the packet and reset the contents of pkt. pkt is not touched if an error occurs. This parameter may be NULL, which signals the end of the stream (i.e. no more packets will be sent). That will cause the filter to output any packets it may have buffered internally. + the packet to filter. The bitstream filter will take ownership of the packet and reset the contents of pkt. pkt is not touched if an error occurs. This parameter may be NULL, which signals the end of the stream (i.e. no more packets will be sent). That will cause the filter to output any packets it may have buffered internally. Returns a non-zero number if codec is a decoder, zero otherwise @@ -1467,6 +225,7 @@ Free the packet, if the packet is reference counted, it will be unreferenced first. + packet to be freed. The pointer will be set to NULL. Convenience function to free all the side data stored. All the other fields stay untouched. @@ -1695,12 +454,6 @@ Free the codec context and everything associated with it and write NULL to the provided pointer. - - Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor. - the pixel format - store log2_chroma_w - store log2_chroma_h - Get the AVClass for AVCodecContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. @@ -1890,6 +643,9 @@ a frame, or NULL to mark EOF a combination of AV_BUFFERSRC_FLAG_* + + Close the buffer source after EOF. + Get the number of failed requests. @@ -2100,6 +856,12 @@ packet amount of data to read + + Apply a list of bitstream filters to a packet. + AVCodecContext, usually from an AVStream + the packet to apply filters to. If, on success, the returned packet has size == 0 and side_data_elems == 0, it indicates that the packet should be dropped + a NULL-terminated list of filters to apply + Get the AVCodecID for the given codec tag tag. If no codec id is found returns AV_CODEC_ID_NONE. list of supported codec_id-codec_tag pairs, as stored in AVInputFormat.codec_tag and AVOutputFormat.codec_tag @@ -2459,7 +1221,7 @@ the client context, must be unallocated - Allocate and initialize an AVIOContext for buffered I/O. It must be later freed with av_free(). + Allocate and initialize an AVIOContext for buffered I/O. It must be later freed with avio_context_free(). Memory block for input/output operations via AVIOContext. The buffer must be allocated with av_malloc() and friends. It may be freed and replaced with a new buffer by libavformat. AVIOContext.buffer holds the buffer currently in use, which must be later freed with av_free(). The buffer size is very important for performance. For protocols with fixed blocksize it should be set to this blocksize. For others a typical size is a cache page, e.g. 4kb. Set to 1 if the buffer should be writable, 0 otherwise. @@ -2486,6 +1248,10 @@ Close the resource accessed by the AVIOContext *s, free it and set the pointer pointing to it to NULL. This function can only be used if s was opened by avio_open(). + + Free the supplied IO context and everything associated with it. + Double pointer to the IO context. This function will write NULL into s. + Iterate through names of available protocols. A private pointer representing current protocol. It must be a pointer to NULL on first iteration and will be updated by successive calls to avio_enum_protocols. @@ -2572,6 +1338,9 @@ directory read context. next entry or NULL when no more entries. + + Read size bytes from AVIOContext into buf. Unlike avio_read(), this is allowed to read fewer bytes than requested. The missing bytes can be read in the next call. This always tries to read at least 1 byte. Useful to reduce latency in certain cases. + Read contents of h into print buffer, up to max_size bytes, or up to EOF. @@ -2741,18 +1510,33 @@ Get the channel with the given index in channel_layout. + + Returns the AVChromaLocation value for name or an AVError if not found. + Returns the name for provided chroma location or NULL if unknown. + + Returns the AVColorPrimaries value for name or an AVError if not found. + Returns the name for provided color primaries or NULL if unknown. + + Returns the AVColorRange value for name or an AVError if not found. + Returns the name for provided color range or NULL if unknown. + + Returns the AVColorSpace value for name or an AVError if not found. + Returns the name for provided color space or NULL if unknown. + + Returns the AVColorTransferCharacteristic value for name or an AVError if not found. + Returns the name for provided color transfer or NULL if unknown. @@ -2766,6 +1550,9 @@ Returns the number of logical CPU cores present. + + Get the maximum data alignment that may be required by FFmpeg. + Convert a double precision floating point number to a rational. `double` to convert @@ -2952,6 +1739,11 @@ Allocate an AVFrame and set its fields to default values. The resulting struct must be freed using av_frame_free(). + + Crop the given video AVFrame according to its crop_left/crop_top/crop_right/ crop_bottom fields. If cropping is successful, the function will adjust the data pointers and the width/height fields, and set the crop fields to 0. + the frame which should be cropped + Some combination of AV_FRAME_CROP_* flags, or 0. + Create a new frame that references the same data as src. @@ -2971,7 +1763,7 @@ Allocate new buffer(s) for audio or video data. frame in which to store the new buffers. - required buffer size alignment + Required buffer size alignment. If equal to 0, alignment will be chosen automatically for the current CPU. It is highly recommended to pass 0 here unless you know what you are doing. Get the buffer reference a given data plane is stored in. @@ -3132,18 +1924,37 @@ A dictionary of additional (type-specific) options to use in opening the device. The dictionary remains owned by the caller. currently unused + + Create a new device of the specified type from an existing device. + On success, a reference to the newly-created AVHWDeviceContext. + The type of the new device to create. + A reference to an existing AVHWDeviceContext which will be used to create the new device. + Currently unused; should be set to zero. + Finalize the device context before use. This function must be called after the context is filled with all the required information and before it is used in any way. a reference to the AVHWDeviceContext + + Look up an AVHWDeviceType by name. + String name of the device type (case-insensitive). + Get the constraints on HW frames given a device and the HW-specific configuration to be used with that device. If no HW-specific configuration is provided, returns the maximum possible capabilities of the device. + a reference to the associated AVHWDeviceContext. a filled HW-specific configuration structure, or NULL to return the maximum possible capabilities of the device. + + Get the string name of an AVHWDeviceType. + Type from enum AVHWDeviceType. + Allocate a HW-specific configuration structure for a given HW device. After use, the user must free all members as required by the specific hardware structure being used, then free the structure itself with av_free(). a reference to the associated AVHWDeviceContext. + + Iterate over supported device types. + Free an AVHWFrameConstraints structure. The (filled or unfilled) AVHWFrameConstraints structure. @@ -3157,7 +1968,7 @@ On success, a reference to the newly created AVHWFramesContext. A reference to the device to create the new AVHWFramesContext on. A reference to an existing AVHWFramesContext which will be mapped to the derived context. - Currently unused; should be set to zero. + Some combination of AV_HWFRAME_MAP_* flags, defining the mapping parameters to apply to frames which are allocated in the derived device. Finalize the context before use. This function must be called after the context is filled with all the required information and before it is attached to any frames. @@ -3229,6 +2040,7 @@ a buffer into which picture data will be copied the size in bytes of dst pointers containing the source image data + linesizes for the image in src_data the pixel format of the source image the width of the source image in pixels the height of the source image in pixels @@ -3240,12 +2052,22 @@ Setup the data pointers and linesizes based on the specified image parameters and the provided array. data pointers to be filled in + linesizes for the image in dst_data to be filled in buffer which will contain or contains the actual image data, can be NULL the pixel format of the image the width of the image in pixels the height of the image in pixels the value used in src for linesize alignment + + Overwrite the image data with black. This is suitable for filling a sub-rectangle of an image, meaning the padding between the right most pixel and the left most pixel on the next line will not be overwritten. For some formats, the image size might be rounded up due to inherent alignment. + data pointers to destination image + linesizes for the destination image + the pixel format of the image + the color range of the image (important for colorspaces such as YUV) + the width of the image in pixels + the height of the image in pixels + Fill plane linesizes for an image with pixel format pix_fmt and width width. array to be filled with the linesize for each plane @@ -3263,6 +2085,9 @@ Return the size in bytes of the amount of data required to store an image with the given parameters. + the pixel format of the image + the width of the image in pixels + the height of the image in pixels the assumed linesize alignment @@ -3909,7 +2734,7 @@ the height of the destination image the destination image format specify which algorithm and options to use for rescaling - extra parameters to tune the used scaler For SWS_BICUBIC param[0] and [1] tune the shape of the basis function, param[0] tunes f(1) and param[1] f´(1) For SWS_GAUSS param[0] tunes the exponent and thus cutoff frequency For SWS_LANCZOS param[0] tunes the width of the window function + extra parameters to tune the used scaler For SWS_BICUBIC param[0] and [1] tune the shape of the basis function, param[0] tunes f(1) and param[1] f´(1) For SWS_GAUSS param[0] tunes the exponent and thus cutoff frequency For SWS_LANCZOS param[0] tunes the width of the window function Return a normalized Gaussian curve used to filter stuff quality = 3 is high quality, lower is lower quality. @@ -3962,6 +2787,3867 @@ Color conversion and scaling library. + + _WIN32_WINNT = 0x602 + + + AV_BUFFER_FLAG_READONLY = (1 << 0) + + + AV_BUFFERSINK_FLAG_NO_REQUEST = 0x2 + + + AV_BUFFERSINK_FLAG_PEEK = 0x1 + + + AV_CH_BACK_CENTER = 0x00000100 + + + AV_CH_BACK_LEFT = 0x00000010 + + + AV_CH_BACK_RIGHT = 0x00000020 + + + AV_CH_FRONT_CENTER = 0x00000004 + + + AV_CH_FRONT_LEFT = 0x00000001 + + + AV_CH_FRONT_LEFT_OF_CENTER = 0x00000040 + + + AV_CH_FRONT_RIGHT = 0x00000002 + + + AV_CH_FRONT_RIGHT_OF_CENTER = 0x00000080 + + + AV_CH_LAYOUT_2_1 = (AV_CH_LAYOUT_STEREO|AV_CH_BACK_CENTER) + + + AV_CH_LAYOUT_2_2 = (AV_CH_LAYOUT_STEREO|AV_CH_SIDE_LEFT|AV_CH_SIDE_RIGHT) + + + AV_CH_LAYOUT_2POINT1 = (AV_CH_LAYOUT_STEREO|AV_CH_LOW_FREQUENCY) + + + AV_CH_LAYOUT_3POINT1 = (AV_CH_LAYOUT_SURROUND|AV_CH_LOW_FREQUENCY) + + + AV_CH_LAYOUT_4POINT0 = (AV_CH_LAYOUT_SURROUND|AV_CH_BACK_CENTER) + + + AV_CH_LAYOUT_4POINT1 = (AV_CH_LAYOUT_4POINT0|AV_CH_LOW_FREQUENCY) + + + AV_CH_LAYOUT_5POINT0 = (AV_CH_LAYOUT_SURROUND|AV_CH_SIDE_LEFT|AV_CH_SIDE_RIGHT) + + + AV_CH_LAYOUT_5POINT0_BACK = (AV_CH_LAYOUT_SURROUND|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT) + + + AV_CH_LAYOUT_5POINT1 = (AV_CH_LAYOUT_5POINT0|AV_CH_LOW_FREQUENCY) + + + AV_CH_LAYOUT_5POINT1_BACK = (AV_CH_LAYOUT_5POINT0_BACK|AV_CH_LOW_FREQUENCY) + + + AV_CH_LAYOUT_6POINT0 = (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_CENTER) + + + AV_CH_LAYOUT_6POINT0_FRONT = (AV_CH_LAYOUT_2_2|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER) + + + AV_CH_LAYOUT_6POINT1 = (AV_CH_LAYOUT_5POINT1|AV_CH_BACK_CENTER) + + + AV_CH_LAYOUT_6POINT1_BACK = (AV_CH_LAYOUT_5POINT1_BACK|AV_CH_BACK_CENTER) + + + AV_CH_LAYOUT_6POINT1_FRONT = (AV_CH_LAYOUT_6POINT0_FRONT|AV_CH_LOW_FREQUENCY) + + + AV_CH_LAYOUT_7POINT0 = (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT) + + + AV_CH_LAYOUT_7POINT0_FRONT = (AV_CH_LAYOUT_5POINT0|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER) + + + AV_CH_LAYOUT_7POINT1 = (AV_CH_LAYOUT_5POINT1|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT) + + + AV_CH_LAYOUT_7POINT1_WIDE = (AV_CH_LAYOUT_5POINT1|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER) + + + AV_CH_LAYOUT_7POINT1_WIDE_BACK = (AV_CH_LAYOUT_5POINT1_BACK|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER) + + + AV_CH_LAYOUT_HEXADECAGONAL = (AV_CH_LAYOUT_OCTAGONAL|AV_CH_WIDE_LEFT|AV_CH_WIDE_RIGHT|AV_CH_TOP_BACK_LEFT|AV_CH_TOP_BACK_RIGHT|AV_CH_TOP_BACK_CENTER|AV_CH_TOP_FRONT_CENTER|AV_CH_TOP_FRONT_LEFT|AV_CH_TOP_FRONT_RIGHT) + + + AV_CH_LAYOUT_HEXAGONAL = (AV_CH_LAYOUT_5POINT0_BACK|AV_CH_BACK_CENTER) + + + AV_CH_LAYOUT_MONO = (AV_CH_FRONT_CENTER) + + + AV_CH_LAYOUT_NATIVE = 0x8000000000000000ULL + + + AV_CH_LAYOUT_OCTAGONAL = (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_LEFT|AV_CH_BACK_CENTER|AV_CH_BACK_RIGHT) + + + AV_CH_LAYOUT_QUAD = (AV_CH_LAYOUT_STEREO|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT) + + + AV_CH_LAYOUT_STEREO = (AV_CH_FRONT_LEFT|AV_CH_FRONT_RIGHT) + + + AV_CH_LAYOUT_STEREO_DOWNMIX = (AV_CH_STEREO_LEFT|AV_CH_STEREO_RIGHT) + + + AV_CH_LAYOUT_SURROUND = (AV_CH_LAYOUT_STEREO|AV_CH_FRONT_CENTER) + + + AV_CH_LOW_FREQUENCY = 0x00000008 + + + AV_CH_LOW_FREQUENCY_2 = 0x0000000800000000ULL + + + AV_CH_SIDE_LEFT = 0x00000200 + + + AV_CH_SIDE_RIGHT = 0x00000400 + + + AV_CH_STEREO_LEFT = 0x20000000 + + + AV_CH_STEREO_RIGHT = 0x40000000 + + + AV_CH_SURROUND_DIRECT_LEFT = 0x0000000200000000ULL + + + AV_CH_SURROUND_DIRECT_RIGHT = 0x0000000400000000ULL + + + AV_CH_TOP_BACK_CENTER = 0x00010000 + + + AV_CH_TOP_BACK_LEFT = 0x00008000 + + + AV_CH_TOP_BACK_RIGHT = 0x00020000 + + + AV_CH_TOP_CENTER = 0x00000800 + + + AV_CH_TOP_FRONT_CENTER = 0x00002000 + + + AV_CH_TOP_FRONT_LEFT = 0x00001000 + + + AV_CH_TOP_FRONT_RIGHT = 0x00004000 + + + AV_CH_WIDE_LEFT = 0x0000000080000000ULL + + + AV_CH_WIDE_RIGHT = 0x0000000100000000ULL + + + AV_CODEC_CAP_AUTO_THREADS = (1 << 15) + + + AV_CODEC_CAP_AVOID_PROBING = (1 << 17) + + + AV_CODEC_CAP_CHANNEL_CONF = (1 << 10) + + + AV_CODEC_CAP_DELAY = (1 << 5) + + + AV_CODEC_CAP_DR1 = (1 << 1) + + + AV_CODEC_CAP_DRAW_HORIZ_BAND = (1 << 0) + + + AV_CODEC_CAP_EXPERIMENTAL = (1 << 9) + + + AV_CODEC_CAP_FRAME_THREADS = (1 << 12) + + + AV_CODEC_CAP_HWACCEL_VDPAU = (1 << 7) + + + AV_CODEC_CAP_INTRA_ONLY = 0x40000000 + + + AV_CODEC_CAP_LOSSLESS = 0x80000000 + + + AV_CODEC_CAP_PARAM_CHANGE = (1 << 14) + + + AV_CODEC_CAP_SLICE_THREADS = (1 << 13) + + + AV_CODEC_CAP_SMALL_LAST_FRAME = (1 << 6) + + + AV_CODEC_CAP_SUBFRAMES = (1 << 8) + + + AV_CODEC_CAP_TRUNCATED = (1 << 3) + + + AV_CODEC_CAP_VARIABLE_FRAME_SIZE = (1 << 16) + + + AV_CODEC_FLAG_4MV = (1 << 2) + + + AV_CODEC_FLAG_AC_PRED = (1 << 24) + + + AV_CODEC_FLAG_BITEXACT = (1 << 23) + + + AV_CODEC_FLAG_CLOSED_GOP = (1U << 31) + + + AV_CODEC_FLAG_GLOBAL_HEADER = (1 << 22) + + + AV_CODEC_FLAG_GRAY = (1 << 13) + + + AV_CODEC_FLAG_INTERLACED_DCT = (1 << 18) + + + AV_CODEC_FLAG_INTERLACED_ME = (1 << 29) + + + AV_CODEC_FLAG_LOOP_FILTER = (1 << 11) + + + AV_CODEC_FLAG_LOW_DELAY = (1 << 19) + + + AV_CODEC_FLAG_OUTPUT_CORRUPT = (1 << 3) + + + AV_CODEC_FLAG_PASS1 = (1 << 9) + + + AV_CODEC_FLAG_PASS2 = (1 << 10) + + + AV_CODEC_FLAG_PSNR = (1 << 15) + + + AV_CODEC_FLAG_QPEL = (1 << 4) + + + AV_CODEC_FLAG_QSCALE = (1 << 1) + + + AV_CODEC_FLAG_TRUNCATED = (1 << 16) + + + AV_CODEC_FLAG_UNALIGNED = (1 << 0) + + + AV_CODEC_FLAG2_CHUNKS = (1 << 15) + + + AV_CODEC_FLAG2_DROP_FRAME_TIMECODE = (1 << 13) + + + AV_CODEC_FLAG2_EXPORT_MVS = (1 << 28) + + + AV_CODEC_FLAG2_FAST = (1 << 0) + + + AV_CODEC_FLAG2_IGNORE_CROP = (1 << 16) + + + AV_CODEC_FLAG2_LOCAL_HEADER = (1 << 3) + + + AV_CODEC_FLAG2_NO_OUTPUT = (1 << 2) + + + AV_CODEC_FLAG2_RO_FLUSH_NOOP = (1 << 30) + + + AV_CODEC_FLAG2_SHOW_ALL = (1 << 22) + + + AV_CODEC_FLAG2_SKIP_MANUAL = (1 << 29) + + + AV_CODEC_PROP_BITMAP_SUB = (1 << 16) + + + AV_CODEC_PROP_INTRA_ONLY = (1 << 0) + + + AV_CODEC_PROP_LOSSLESS = (1 << 2) + + + AV_CODEC_PROP_LOSSY = (1 << 1) + + + AV_CODEC_PROP_REORDER = (1 << 3) + + + AV_CODEC_PROP_TEXT_SUB = (1 << 17) + + + AV_CPU_FLAG_3DNOW = 0x0004 + + + AV_CPU_FLAG_3DNOWEXT = 0x0020 + + + AV_CPU_FLAG_AESNI = 0x80000 + + + AV_CPU_FLAG_ALTIVEC = 0x0001 + + + AV_CPU_FLAG_ARMV5TE = (1 << 0) + + + AV_CPU_FLAG_ARMV6 = (1 << 1) + + + AV_CPU_FLAG_ARMV6T2 = (1 << 2) + + + AV_CPU_FLAG_ARMV8 = (1 << 6) + + + AV_CPU_FLAG_ATOM = 0x10000000 + + + AV_CPU_FLAG_AVX = 0x4000 + + + AV_CPU_FLAG_AVX2 = 0x8000 + + + AV_CPU_FLAG_AVXSLOW = 0x8000000 + + + AV_CPU_FLAG_BMI1 = 0x20000 + + + AV_CPU_FLAG_BMI2 = 0x40000 + + + AV_CPU_FLAG_CMOV = 0x1000 + + + AV_CPU_FLAG_FMA3 = 0x10000 + + + AV_CPU_FLAG_FMA4 = 0x0800 + + + AV_CPU_FLAG_FORCE = 0x80000000 + + + AV_CPU_FLAG_MMX = 0x0001 + + + AV_CPU_FLAG_MMX2 = 0x0002 + + + AV_CPU_FLAG_MMXEXT = 0x0002 + + + AV_CPU_FLAG_NEON = (1 << 5) + + + AV_CPU_FLAG_POWER8 = 0x0004 + + + AV_CPU_FLAG_SETEND = (1 <<16) + + + AV_CPU_FLAG_SSE = 0x0008 + + + AV_CPU_FLAG_SSE2 = 0x0010 + + + AV_CPU_FLAG_SSE2SLOW = 0x40000000 + + + AV_CPU_FLAG_SSE3 = 0x0040 + + + AV_CPU_FLAG_SSE3SLOW = 0x20000000 + + + AV_CPU_FLAG_SSE4 = 0x0100 + + + AV_CPU_FLAG_SSE42 = 0x0200 + + + AV_CPU_FLAG_SSSE3 = 0x0080 + + + AV_CPU_FLAG_SSSE3SLOW = 0x4000000 + + + AV_CPU_FLAG_VFP = (1 << 3) + + + AV_CPU_FLAG_VFP_VM = (1 << 7) + + + AV_CPU_FLAG_VFPV3 = (1 << 4) + + + AV_CPU_FLAG_VSX = 0x0002 + + + AV_CPU_FLAG_XOP = 0x0400 + + + AV_DICT_APPEND = 32 + + + AV_DICT_DONT_OVERWRITE = 16 + + + AV_DICT_DONT_STRDUP_KEY = 4 + + + AV_DICT_DONT_STRDUP_VAL = 8 + + + AV_DICT_IGNORE_SUFFIX = 2 + + + AV_DICT_MATCH_CASE = 1 + + + AV_DICT_MULTIKEY = 64 + + + AV_DISPOSITION_ATTACHED_PIC = 0x0400 + + + AV_DISPOSITION_CAPTIONS = 0x10000 + + + AV_DISPOSITION_CLEAN_EFFECTS = 0x0200 + + + AV_DISPOSITION_COMMENT = 0x0008 + + + AV_DISPOSITION_DEFAULT = 0x0001 + + + AV_DISPOSITION_DESCRIPTIONS = 0x20000 + + + AV_DISPOSITION_DUB = 0x0002 + + + AV_DISPOSITION_FORCED = 0x0040 + + + AV_DISPOSITION_HEARING_IMPAIRED = 0x0080 + + + AV_DISPOSITION_KARAOKE = 0x0020 + + + AV_DISPOSITION_LYRICS = 0x0010 + + + AV_DISPOSITION_METADATA = 0x40000 + + + AV_DISPOSITION_ORIGINAL = 0x0004 + + + AV_DISPOSITION_TIMED_THUMBNAILS = 0x0800 + + + AV_DISPOSITION_VISUAL_IMPAIRED = 0x0100 + + + AV_EF_AGGRESSIVE = (1<<18) + + + AV_EF_BITSTREAM = (1<<1) + + + AV_EF_BUFFER = (1<<2) + + + AV_EF_CAREFUL = (1<<16) + + + AV_EF_COMPLIANT = (1<<17) + + + AV_EF_CRCCHECK = (1<<0) + + + AV_EF_EXPLODE = (1<<3) + + + AV_EF_IGNORE_ERR = (1<<15) + + + AV_ERROR_MAX_STRING_SIZE = 64 + + + AV_FOURCC_MAX_STRING_SIZE = 32 + + + AV_FRAME_FILENAME_FLAGS_MULTIPLE = 1 + + + AV_FRAME_FLAG_CORRUPT = (1 << 0) + + + AV_FRAME_FLAG_DISCARD = (1 << 2) + + + AV_GET_BUFFER_FLAG_REF = (1 << 0) + + + AV_HAVE_BIGENDIAN = 0 + + + AV_HAVE_FAST_UNALIGNED = 1 + + + AV_HWACCEL_CODEC_CAP_EXPERIMENTAL = 0x0200 + + + AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH = (1 << 1) + + + AV_HWACCEL_FLAG_ALLOW_PROFILE_MISMATCH = (1 << 2) + + + AV_HWACCEL_FLAG_IGNORE_LEVEL = (1 << 0) + + + AV_INPUT_BUFFER_MIN_SIZE = 16384 + + + AV_INPUT_BUFFER_PADDING_SIZE = 32 + + + AV_LOG_DEBUG = 48 + + + AV_LOG_ERROR = 16 + + + AV_LOG_FATAL = 8 + + + AV_LOG_INFO = 32 + + + AV_LOG_MAX_OFFSET = (AV_LOG_TRACE - AV_LOG_QUIET) + + + AV_LOG_PANIC = 0 + + + AV_LOG_PRINT_LEVEL = 2 + + + AV_LOG_QUIET = -8 + + + AV_LOG_SKIP_REPEATED = 1 + + + AV_LOG_TRACE = 56 + + + AV_LOG_VERBOSE = 40 + + + AV_LOG_WARNING = 24 + + + AV_NOPTS_VALUE = ((int64_t)UINT64_C(0x8000000000000000)) + + + AV_NUM_DATA_POINTERS = 8 + + + AV_OPT_ALLOW_NULL = (1 << 2) + + + AV_OPT_FLAG_AUDIO_PARAM = 8 + + + AV_OPT_FLAG_DECODING_PARAM = 2 + + + AV_OPT_FLAG_ENCODING_PARAM = 1 + + + AV_OPT_FLAG_EXPORT = 64 + + + AV_OPT_FLAG_FILTERING_PARAM = (1<<16) + + + AV_OPT_FLAG_METADATA = 4 + + + AV_OPT_FLAG_READONLY = 128 + + + AV_OPT_FLAG_SUBTITLE_PARAM = 32 + + + AV_OPT_FLAG_VIDEO_PARAM = 16 + + + AV_OPT_MULTI_COMPONENT_RANGE = (1 << 12) + + + AV_OPT_SEARCH_CHILDREN = (1 << 0) + + + AV_OPT_SEARCH_FAKE_OBJ = (1 << 1) + + + AV_OPT_SERIALIZE_OPT_FLAGS_EXACT = 0x00000002 + + + AV_OPT_SERIALIZE_SKIP_DEFAULTS = 0x00000001 + + + AV_PARSER_PTS_NB = 4 + + + AV_PIX_FMT_FLAG_ALPHA = 0x1 << 0x7 + + + AV_PIX_FMT_FLAG_BAYER = 0x1 << 0x8 + + + AV_PIX_FMT_FLAG_BE = 0x1 << 0x0 + + + AV_PIX_FMT_FLAG_BITSTREAM = 0x1 << 0x2 + + + AV_PIX_FMT_FLAG_FLOAT = 0x1 << 0x9 + + + AV_PIX_FMT_FLAG_HWACCEL = 0x1 << 0x3 + + + AV_PIX_FMT_FLAG_PAL = 0x1 << 0x1 + + + AV_PIX_FMT_FLAG_PLANAR = 0x1 << 0x4 + + + AV_PIX_FMT_FLAG_PSEUDOPAL = 0x1 << 0x6 + + + AV_PIX_FMT_FLAG_RGB = 0x1 << 0x5 + + + AV_PKT_FLAG_CORRUPT = 0x0002 + + + AV_PKT_FLAG_DISCARD = 0x0004 + + + AV_PKT_FLAG_KEY = 0x0001 + + + AV_PKT_FLAG_TRUSTED = 0x0008 + + + AV_PROGRAM_RUNNING = 1 + + + AV_PTS_WRAP_ADD_OFFSET = 1 + + + AV_PTS_WRAP_IGNORE = 0 + + + AV_PTS_WRAP_SUB_OFFSET = -1 + + + AV_SUBTITLE_FLAG_FORCED = 0x00000001 + + + AV_TIME_BASE = 1000000 + + + AV_TIMECODE_STR_SIZE = 0x17 + + + AVERROR_BSF_NOT_FOUND = FFERRTAG(0xF8,'B','S','F') + + + AVERROR_BUFFER_TOO_SMALL = FFERRTAG( 'B','U','F','S') + + + AVERROR_BUG = FFERRTAG( 'B','U','G','!') + + + AVERROR_BUG2 = FFERRTAG( 'B','U','G',' ') + + + AVERROR_DECODER_NOT_FOUND = FFERRTAG(0xF8,'D','E','C') + + + AVERROR_DEMUXER_NOT_FOUND = FFERRTAG(0xF8,'D','E','M') + + + AVERROR_ENCODER_NOT_FOUND = FFERRTAG(0xF8,'E','N','C') + + + AVERROR_EOF = FFERRTAG( 'E','O','F',' ') + + + AVERROR_EXIT = FFERRTAG( 'E','X','I','T') + + + AVERROR_EXPERIMENTAL = (-0x2bb2afa8) + + + AVERROR_EXTERNAL = FFERRTAG( 'E','X','T',' ') + + + AVERROR_FILTER_NOT_FOUND = FFERRTAG(0xF8,'F','I','L') + + + AVERROR_HTTP_BAD_REQUEST = FFERRTAG(0xF8,'4','0','0') + + + AVERROR_HTTP_FORBIDDEN = FFERRTAG(0xF8,'4','0','3') + + + AVERROR_HTTP_NOT_FOUND = FFERRTAG(0xF8,'4','0','4') + + + AVERROR_HTTP_OTHER_4XX = FFERRTAG(0xF8,'4','X','X') + + + AVERROR_HTTP_SERVER_ERROR = FFERRTAG(0xF8,'5','X','X') + + + AVERROR_HTTP_UNAUTHORIZED = FFERRTAG(0xF8,'4','0','1') + + + AVERROR_INPUT_CHANGED = (-0x636e6701) + + + AVERROR_INVALIDDATA = FFERRTAG( 'I','N','D','A') + + + AVERROR_MUXER_NOT_FOUND = FFERRTAG(0xF8,'M','U','X') + + + AVERROR_OPTION_NOT_FOUND = FFERRTAG(0xF8,'O','P','T') + + + AVERROR_OUTPUT_CHANGED = (-0x636e6702) + + + AVERROR_PATCHWELCOME = FFERRTAG( 'P','A','W','E') + + + AVERROR_PROTOCOL_NOT_FOUND = FFERRTAG(0xF8,'P','R','O') + + + AVERROR_STREAM_NOT_FOUND = FFERRTAG(0xF8,'S','T','R') + + + AVERROR_UNKNOWN = FFERRTAG( 'U','N','K','N') + + + AVFILTER_CMD_FLAG_FAST = 0x2 + + + AVFILTER_CMD_FLAG_ONE = 0x1 + + + AVFILTER_FLAG_DYNAMIC_INPUTS = 0x1 << 0x0 + + + AVFILTER_FLAG_DYNAMIC_OUTPUTS = 0x1 << 0x1 + + + AVFILTER_FLAG_SLICE_THREADS = 0x1 << 0x2 + + + AVFILTER_FLAG_SUPPORT_TIMELINE = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL + + + AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC = 0x1 << 0x10 + + + AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL = 0x1 << 0x11 + + + AVFILTER_THREAD_SLICE = 0x1 << 0x0 + + + AVFMT_ALLOW_FLUSH = 0x10000 + + + AVFMT_AVOID_NEG_TS_AUTO = -1 + + + AVFMT_AVOID_NEG_TS_MAKE_NON_NEGATIVE = 1 + + + AVFMT_AVOID_NEG_TS_MAKE_ZERO = 2 + + + AVFMT_EVENT_FLAG_METADATA_UPDATED = 0x0001 + + + AVFMT_FLAG_AUTO_BSF = 0x200000 + + + AVFMT_FLAG_BITEXACT = 0x0400 + + + AVFMT_FLAG_CUSTOM_IO = 0x0080 + + + AVFMT_FLAG_DISCARD_CORRUPT = 0x0100 + + + AVFMT_FLAG_FAST_SEEK = 0x80000 + + + AVFMT_FLAG_FLUSH_PACKETS = 0x0200 + + + AVFMT_FLAG_GENPTS = 0x0001 + + + AVFMT_FLAG_IGNDTS = 0x0008 + + + AVFMT_FLAG_IGNIDX = 0x0002 + + + AVFMT_FLAG_KEEP_SIDE_DATA = 0x40000 + + + AVFMT_FLAG_MP4A_LATM = 0x8000 + + + AVFMT_FLAG_NOBUFFER = 0x0040 + + + AVFMT_FLAG_NOFILLIN = 0x0010 + + + AVFMT_FLAG_NONBLOCK = 0x0004 + + + AVFMT_FLAG_NOPARSE = 0x0020 + + + AVFMT_FLAG_PRIV_OPT = 0x20000 + + + AVFMT_FLAG_SHORTEST = 0x100000 + + + AVFMT_FLAG_SORT_DTS = 0x10000 + + + AVFMT_GENERIC_INDEX = 0x0100 + + + AVFMT_GLOBALHEADER = 0x0040 + + + AVFMT_NEEDNUMBER = 0x0002 + + + AVFMT_NO_BYTE_SEEK = 0x8000 + + + AVFMT_NOBINSEARCH = 0x2000 + + + AVFMT_NODIMENSIONS = 0x0800 + + + AVFMT_NOFILE = 0x0001 + + + AVFMT_NOGENSEARCH = 0x4000 + + + AVFMT_NOSTREAMS = 0x1000 + + + AVFMT_NOTIMESTAMPS = 0x0080 + + + AVFMT_RAWPICTURE = 0x0020 + + + AVFMT_SEEK_TO_PTS = 0x4000000 + + + AVFMT_SHOW_IDS = 0x0008 + + + AVFMT_TS_DISCONT = 0x0200 + + + AVFMT_TS_NEGATIVE = 0x40000 + + + AVFMT_TS_NONSTRICT = 0x20000 + + + AVFMT_VARIABLE_FPS = 0x0400 + + + AVFMTCTX_NOHEADER = 0x0001 + + + AVINDEX_DISCARD_FRAME = 0x0002 + + + AVINDEX_KEYFRAME = 0x0001 + + + AVIO_FLAG_DIRECT = 0x8000 + + + AVIO_FLAG_NONBLOCK = 8 + + + AVIO_FLAG_READ = 1 + + + AVIO_FLAG_READ_WRITE = (AVIO_FLAG_READ|AVIO_FLAG_WRITE) + + + AVIO_FLAG_WRITE = 2 + + + AVIO_SEEKABLE_NORMAL = (1 << 0) + + + AVIO_SEEKABLE_TIME = (1 << 1) + + + AVPALETTE_COUNT = 256 + + + AVPALETTE_SIZE = 1024 + + + AVPROBE_PADDING_SIZE = 32 + + + AVPROBE_SCORE_EXTENSION = 50 + + + AVPROBE_SCORE_MAX = 100 + + + AVPROBE_SCORE_MIME = 75 + + + AVPROBE_SCORE_RETRY = (AVPROBE_SCORE_MAX/4) + + + AVPROBE_SCORE_STREAM_RETRY = (AVPROBE_SCORE_MAX/4-1) + + + AVSEEK_FLAG_ANY = 4 + + + AVSEEK_FLAG_BACKWARD = 1 + + + AVSEEK_FLAG_BYTE = 2 + + + AVSEEK_FLAG_FRAME = 8 + + + AVSEEK_FORCE = 0x20000 + + + AVSEEK_SIZE = 0x10000 + + + AVSTREAM_EVENT_FLAG_METADATA_UPDATED = 0x0001 + + + AVSTREAM_INIT_IN_INIT_OUTPUT = 1 + + + AVSTREAM_INIT_IN_WRITE_HEADER = 0 + + + CODEC_CAP_AUTO_THREADS = AV_CODEC_CAP_AUTO_THREADS + + + CODEC_CAP_CHANNEL_CONF = AV_CODEC_CAP_CHANNEL_CONF + + + CODEC_CAP_DELAY = AV_CODEC_CAP_DELAY + + + CODEC_CAP_DR1 = AV_CODEC_CAP_DR1 + + + CODEC_CAP_DRAW_HORIZ_BAND = AV_CODEC_CAP_DRAW_HORIZ_BAND + + + CODEC_CAP_EXPERIMENTAL = AV_CODEC_CAP_EXPERIMENTAL + + + CODEC_CAP_FRAME_THREADS = AV_CODEC_CAP_FRAME_THREADS + + + CODEC_CAP_HWACCEL = 0x0010 + + + CODEC_CAP_HWACCEL_VDPAU = AV_CODEC_CAP_HWACCEL_VDPAU + + + CODEC_CAP_INTRA_ONLY = AV_CODEC_CAP_INTRA_ONLY + + + CODEC_CAP_LOSSLESS = AV_CODEC_CAP_LOSSLESS + + + CODEC_CAP_NEG_LINESIZES = 0x0800 + + + CODEC_CAP_PARAM_CHANGE = AV_CODEC_CAP_PARAM_CHANGE + + + CODEC_CAP_SLICE_THREADS = AV_CODEC_CAP_SLICE_THREADS + + + CODEC_CAP_SMALL_LAST_FRAME = AV_CODEC_CAP_SMALL_LAST_FRAME + + + CODEC_CAP_SUBFRAMES = AV_CODEC_CAP_SUBFRAMES + + + CODEC_CAP_TRUNCATED = AV_CODEC_CAP_TRUNCATED + + + CODEC_CAP_VARIABLE_FRAME_SIZE = AV_CODEC_CAP_VARIABLE_FRAME_SIZE + + + CODEC_FLAG_4MV = AV_CODEC_FLAG_4MV + + + CODEC_FLAG_AC_PRED = AV_CODEC_FLAG_AC_PRED + + + CODEC_FLAG_BITEXACT = AV_CODEC_FLAG_BITEXACT + + + CODEC_FLAG_CLOSED_GOP = AV_CODEC_FLAG_CLOSED_GOP + + + CODEC_FLAG_EMU_EDGE = 0x4000 + + + CODEC_FLAG_GLOBAL_HEADER = AV_CODEC_FLAG_GLOBAL_HEADER + + + CODEC_FLAG_GMC = 0x0020 + + + CODEC_FLAG_GRAY = AV_CODEC_FLAG_GRAY + + + CODEC_FLAG_INPUT_PRESERVED = 0x0100 + + + CODEC_FLAG_INTERLACED_DCT = AV_CODEC_FLAG_INTERLACED_DCT + + + CODEC_FLAG_INTERLACED_ME = AV_CODEC_FLAG_INTERLACED_ME + + + CODEC_FLAG_LOOP_FILTER = AV_CODEC_FLAG_LOOP_FILTER + + + CODEC_FLAG_LOW_DELAY = AV_CODEC_FLAG_LOW_DELAY + + + CODEC_FLAG_MV0 = 0x0040 + + + CODEC_FLAG_NORMALIZE_AQP = 0x00020000 + + + CODEC_FLAG_OUTPUT_CORRUPT = AV_CODEC_FLAG_OUTPUT_CORRUPT + + + CODEC_FLAG_PASS1 = AV_CODEC_FLAG_PASS1 + + + CODEC_FLAG_PASS2 = AV_CODEC_FLAG_PASS2 + + + CODEC_FLAG_PSNR = AV_CODEC_FLAG_PSNR + + + CODEC_FLAG_QPEL = AV_CODEC_FLAG_QPEL + + + CODEC_FLAG_QSCALE = AV_CODEC_FLAG_QSCALE + + + CODEC_FLAG_TRUNCATED = AV_CODEC_FLAG_TRUNCATED + + + CODEC_FLAG_UNALIGNED = AV_CODEC_FLAG_UNALIGNED + + + CODEC_FLAG2_CHUNKS = AV_CODEC_FLAG2_CHUNKS + + + CODEC_FLAG2_DROP_FRAME_TIMECODE = AV_CODEC_FLAG2_DROP_FRAME_TIMECODE + + + CODEC_FLAG2_EXPORT_MVS = AV_CODEC_FLAG2_EXPORT_MVS + + + CODEC_FLAG2_FAST = AV_CODEC_FLAG2_FAST + + + CODEC_FLAG2_IGNORE_CROP = AV_CODEC_FLAG2_IGNORE_CROP + + + CODEC_FLAG2_LOCAL_HEADER = AV_CODEC_FLAG2_LOCAL_HEADER + + + CODEC_FLAG2_NO_OUTPUT = AV_CODEC_FLAG2_NO_OUTPUT + + + CODEC_FLAG2_SHOW_ALL = AV_CODEC_FLAG2_SHOW_ALL + + + CODEC_FLAG2_SKIP_MANUAL = AV_CODEC_FLAG2_SKIP_MANUAL + + + FF_API_AC_VLC = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_AFD = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_ARCH_ALPHA = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_ARCH_SH4 = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_ARCH_SPARC = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_ASPECT_EXTENDED = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_ASS_TIMING = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_AUDIO_CONVERT = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_AUDIOENC_DELAY = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_AVCODEC_RESAMPLE = FF_API_AUDIO_CONVERT + + + FF_API_AVCTX_TIMEBASE = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_AVFILTER_INIT_FILTER = LIBAVFILTER_VERSION_MAJOR < 0x7 + + + FF_API_AVFILTER_OPEN = LIBAVFILTER_VERSION_MAJOR < 0x7 + + + FF_API_AVPACKET_OLD_API = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_AVPICTURE = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_BUFS_VDPAU = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_CAP_VDPAU = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_CODEC_NAME = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_CODED_FRAME = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_CODER_TYPE = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_COMPUTE_PKT_FIELDS2 = (LIBAVFORMAT_VERSION_MAJOR < 58) + + + FF_API_CONVERGENCE_DURATION = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_COPY_CONTEXT = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_CRC_BIG_TABLE = (LIBAVUTIL_VERSION_MAJOR < 56) + + + FF_API_CRYPTO_SIZE_T = (LIBAVUTIL_VERSION_MAJOR < 56) + + + FF_API_DEBUG_MV = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_DLOG = (LIBAVUTIL_VERSION_MAJOR < 56) + + + FF_API_EMU_EDGE = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_ERROR_FRAME = (LIBAVUTIL_VERSION_MAJOR < 56) + + + FF_API_ERROR_RATE = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_FRAME_QP = (LIBAVUTIL_VERSION_MAJOR < 56) + + + FF_API_GET_CONTEXT_DEFAULTS = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_GETCHROMA = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_GMC = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_HLS_WRAP = (LIBAVFORMAT_VERSION_MAJOR < 58) + + + FF_API_HTTP_USER_AGENT = (LIBAVFORMAT_VERSION_MAJOR < 58) + + + FF_API_IDCT_XVIDMMX = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_INPUT_PRESERVED = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_LAVF_AVCTX = (LIBAVFORMAT_VERSION_MAJOR < 58) + + + FF_API_LAVF_BITEXACT = (LIBAVFORMAT_VERSION_MAJOR < 58) + + + FF_API_LAVF_CODEC_TB = (LIBAVFORMAT_VERSION_MAJOR < 58) + + + FF_API_LAVF_FMT_RAWPICTURE = (LIBAVFORMAT_VERSION_MAJOR < 58) + + + FF_API_LAVF_FRAC = (LIBAVFORMAT_VERSION_MAJOR < 58) + + + FF_API_LAVF_KEEPSIDE_FLAG = (LIBAVFORMAT_VERSION_MAJOR < 58) + + + FF_API_LAVF_MERGE_SD = (LIBAVFORMAT_VERSION_MAJOR < 58) + + + FF_API_LAVR_OPTS = LIBAVFILTER_VERSION_MAJOR < 0x7 + + + FF_API_LOWRES = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_MAX_BFRAMES = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_MB_TYPE = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_MERGE_SD = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_MERGE_SD_API = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_MISSING_SAMPLE = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_MOTION_EST = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_MPV_OPT = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_MV0 = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_NEG_LINESIZES = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_NOCONST_GET_NAME = LIBAVFILTER_VERSION_MAJOR < 0x7 + + + FF_API_NOCONST_GET_SIDE_DATA = (LIBAVFORMAT_VERSION_MAJOR < 58) + + + FF_API_NORMALIZE_AQP = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_NVENC_OLD_NAME = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_OLD_BSF = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_OLD_FILTER_OPTS = LIBAVFILTER_VERSION_MAJOR < 0x7 + + + FF_API_OLD_FILTER_OPTS_ERROR = LIBAVFILTER_VERSION_MAJOR < 0x7 + + + FF_API_OLD_FILTER_REGISTER = LIBAVFILTER_VERSION_MAJOR < 0x7 + + + FF_API_OLD_MSMPEG4 = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_OLD_OPEN_CALLBACKS = (LIBAVFORMAT_VERSION_MAJOR < 58) + + + FF_API_OLD_ROTATE_API = (LIBAVFORMAT_VERSION_MAJOR < 58) + + + FF_API_OPT_TYPE_METADATA = (LIBAVUTIL_VERSION_MAJOR < 56) + + + FF_API_PKT_PTS = (LIBAVUTIL_VERSION_MAJOR < 56) + + + FF_API_PLUS1_MINUS1 = (LIBAVUTIL_VERSION_MAJOR < 56) + + + FF_API_PRIVATE_OPT = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_QP_TYPE = LIBPOSTPROC_VERSION_MAJOR < 0x37 + + + FF_API_QSCALE_TYPE = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_QUANT_BIAS = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_R_FRAME_RATE = 1 + + + FF_API_RC_STRATEGY = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_RTP_CALLBACK = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_SET_DIMENSIONS = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_SIDEDATA_ONLY_PKT = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_STAT_BITS = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_STREAM_CODEC_TAG = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_STRUCT_VAAPI_CONTEXT = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_SWS_VECTOR = LIBSWSCALE_VERSION_MAJOR < 0x6 + + + FF_API_TAG_STRING = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_UNUSED_MEMBERS = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_URL_FEOF = (LIBAVFORMAT_VERSION_MAJOR < 58) + + + FF_API_VAAPI = (LIBAVUTIL_VERSION_MAJOR < 56) + + + FF_API_VAAPI_CONTEXT = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_VBV_DELAY = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_VDPAU = (LIBAVUTIL_VERSION_MAJOR < 56) + + + FF_API_VDPAU_PROFILE = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_VIMA_DECODER = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_VISMV = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_VOXWARE = (LIBAVCODEC_VERSION_MAJOR < 58) + + + FF_API_WITHOUT_PREFIX = (LIBAVCODEC_VERSION_MAJOR < 59) + + + FF_API_XVMC = (LIBAVUTIL_VERSION_MAJOR < 56) + + + FF_ASPECT_EXTENDED = 15 + + + FF_BUG_AC_VLC = 0 + + + FF_BUG_AMV = 32 + + + FF_BUG_AUTODETECT = 1 + + + FF_BUG_DC_CLIP = 4096 + + + FF_BUG_DIRECT_BLOCKSIZE = 512 + + + FF_BUG_EDGE = 1024 + + + FF_BUG_HPEL_CHROMA = 2048 + + + FF_BUG_IEDGE = 32768 + + + FF_BUG_MS = 8192 + + + FF_BUG_NO_PADDING = 16 + + + FF_BUG_OLD_MSMPEG4 = 2 + + + FF_BUG_QPEL_CHROMA = 64 + + + FF_BUG_QPEL_CHROMA2 = 256 + + + FF_BUG_STD_QPEL = 128 + + + FF_BUG_TRUNCATED = 16384 + + + FF_BUG_UMP4 = 8 + + + FF_BUG_XVID_ILACE = 4 + + + FF_CMP_BIT = 5 + + + FF_CMP_CHROMA = 256 + + + FF_CMP_DCT = 3 + + + FF_CMP_DCT264 = 14 + + + FF_CMP_DCTMAX = 13 + + + FF_CMP_MEDIAN_SAD = 15 + + + FF_CMP_NSSE = 10 + + + FF_CMP_PSNR = 4 + + + FF_CMP_RD = 6 + + + FF_CMP_SAD = 0 + + + FF_CMP_SATD = 2 + + + FF_CMP_SSE = 1 + + + FF_CMP_VSAD = 8 + + + FF_CMP_VSSE = 9 + + + FF_CMP_W53 = 11 + + + FF_CMP_W97 = 12 + + + FF_CMP_ZERO = 7 + + + FF_CODEC_PROPERTY_CLOSED_CAPTIONS = 0x00000002 + + + FF_CODEC_PROPERTY_LOSSLESS = 0x00000001 + + + FF_CODER_TYPE_AC = 1 + + + FF_CODER_TYPE_DEFLATE = 4 + + + FF_CODER_TYPE_RAW = 2 + + + FF_CODER_TYPE_RLE = 3 + + + FF_CODER_TYPE_VLC = 0 + + + FF_COMPLIANCE_EXPERIMENTAL = -2 + + + FF_COMPLIANCE_NORMAL = 0 + + + FF_COMPLIANCE_STRICT = 1 + + + FF_COMPLIANCE_UNOFFICIAL = -1 + + + FF_COMPLIANCE_VERY_STRICT = 2 + + + FF_COMPRESSION_DEFAULT = -1 + + + FF_DCT_ALTIVEC = 5 + + + FF_DCT_AUTO = 0 + + + FF_DCT_FAAN = 6 + + + FF_DCT_FASTINT = 1 + + + FF_DCT_INT = 2 + + + FF_DCT_MMX = 3 + + + FF_DEBUG_BITSTREAM = 4 + + + FF_DEBUG_BUFFERS = 0x00008000 + + + FF_DEBUG_BUGS = 0x00001000 + + + FF_DEBUG_DCT_COEFF = 0x00000040 + + + FF_DEBUG_ER = 0x00000400 + + + FF_DEBUG_GREEN_MD = 0x00800000 + + + FF_DEBUG_MB_TYPE = 8 + + + FF_DEBUG_MMCO = 0x00000800 + + + FF_DEBUG_MV = 32 + + + FF_DEBUG_NOMC = 0x01000000 + + + FF_DEBUG_PICT_INFO = 1 + + + FF_DEBUG_PTS = 0x00000200 + + + FF_DEBUG_QP = 16 + + + FF_DEBUG_RC = 2 + + + FF_DEBUG_SKIP = 0x00000080 + + + FF_DEBUG_STARTCODE = 0x00000100 + + + FF_DEBUG_THREADS = 0x00010000 + + + FF_DEBUG_VIS_MB_TYPE = 0x00004000 + + + FF_DEBUG_VIS_MV_B_BACK = 0x00000004 + + + FF_DEBUG_VIS_MV_B_FOR = 0x00000002 + + + FF_DEBUG_VIS_MV_P_FOR = 0x00000001 + + + FF_DEBUG_VIS_QP = 0x00002000 + + + FF_DECODE_ERROR_INVALID_BITSTREAM = 1 + + + FF_DECODE_ERROR_MISSING_REFERENCE = 2 + + + FF_DEFAULT_QUANT_BIAS = 999999 + + + FF_DTG_AFD_14_9 = 11 + + + FF_DTG_AFD_16_9 = 10 + + + FF_DTG_AFD_16_9_SP_14_9 = 14 + + + FF_DTG_AFD_4_3 = 9 + + + FF_DTG_AFD_4_3_SP_14_9 = 13 + + + FF_DTG_AFD_SAME = 8 + + + FF_DTG_AFD_SP_4_3 = 15 + + + FF_DXVA2_WORKAROUND_INTEL_CLEARVIDEO = 0x2 + + + FF_DXVA2_WORKAROUND_SCALING_LIST_ZIGZAG = 0x1 + + + FF_EC_DEBLOCK = 2 + + + FF_EC_FAVOR_INTER = 256 + + + FF_EC_GUESS_MVS = 1 + + + FF_FDEBUG_TS = 0x0001 + + + FF_IDCT_ALTIVEC = 8 + + + FF_IDCT_ARM = 7 + + + FF_IDCT_AUTO = 0 + + + FF_IDCT_FAAN = 20 + + + FF_IDCT_INT = 1 + + + FF_IDCT_IPP = 13 + + + FF_IDCT_NONE = 24 + + + FF_IDCT_SH4 = 9 + + + FF_IDCT_SIMPLE = 2 + + + FF_IDCT_SIMPLEALPHA = 23 + + + FF_IDCT_SIMPLEARM = 10 + + + FF_IDCT_SIMPLEARMV5TE = 16 + + + FF_IDCT_SIMPLEARMV6 = 17 + + + FF_IDCT_SIMPLEAUTO = 128 + + + FF_IDCT_SIMPLEMMX = 3 + + + FF_IDCT_SIMPLENEON = 22 + + + FF_IDCT_SIMPLEVIS = 18 + + + FF_IDCT_XVID = 14 + + + FF_IDCT_XVIDMMX = 14 + + + FF_INPUT_BUFFER_PADDING_SIZE = 32 + + + FF_LAMBDA_MAX = (256*128-1) + + + FF_LAMBDA_SCALE = (1<<FF_LAMBDA_SHIFT) + + + FF_LAMBDA_SHIFT = 7 + + + FF_LEVEL_UNKNOWN = -99 + + + FF_LOSS_ALPHA = 0x8 + + + FF_LOSS_CHROMA = 0x20 + + + FF_LOSS_COLORQUANT = 0x10 + + + FF_LOSS_COLORSPACE = 0x4 + + + FF_LOSS_DEPTH = 0x2 + + + FF_LOSS_RESOLUTION = 0x1 + + + FF_MAX_B_FRAMES = 16 + + + FF_MB_DECISION_BITS = 1 + + + FF_MB_DECISION_RD = 2 + + + FF_MB_DECISION_SIMPLE = 0 + + + FF_MIN_BUFFER_SIZE = 16384 + + + FF_PRED_LEFT = 0 + + + FF_PRED_MEDIAN = 2 + + + FF_PRED_PLANE = 1 + + + FF_PROFILE_AAC_ELD = 38 + + + FF_PROFILE_AAC_HE = 4 + + + FF_PROFILE_AAC_HE_V2 = 28 + + + FF_PROFILE_AAC_LD = 22 + + + FF_PROFILE_AAC_LOW = 1 + + + FF_PROFILE_AAC_LTP = 3 + + + FF_PROFILE_AAC_MAIN = 0 + + + FF_PROFILE_AAC_SSR = 2 + + + FF_PROFILE_DNXHD = 0 + + + FF_PROFILE_DNXHR_444 = 5 + + + FF_PROFILE_DNXHR_HQ = 3 + + + FF_PROFILE_DNXHR_HQX = 4 + + + FF_PROFILE_DNXHR_LB = 1 + + + FF_PROFILE_DNXHR_SQ = 2 + + + FF_PROFILE_DTS = 20 + + + FF_PROFILE_DTS_96_24 = 40 + + + FF_PROFILE_DTS_ES = 30 + + + FF_PROFILE_DTS_EXPRESS = 70 + + + FF_PROFILE_DTS_HD_HRA = 50 + + + FF_PROFILE_DTS_HD_MA = 60 + + + FF_PROFILE_H264_BASELINE = 66 + + + FF_PROFILE_H264_CAVLC_444 = 44 + + + FF_PROFILE_H264_CONSTRAINED = (1<<9) + + + FF_PROFILE_H264_CONSTRAINED_BASELINE = (66|FF_PROFILE_H264_CONSTRAINED) + + + FF_PROFILE_H264_EXTENDED = 88 + + + FF_PROFILE_H264_HIGH = 100 + + + FF_PROFILE_H264_HIGH_10 = 110 + + + FF_PROFILE_H264_HIGH_10_INTRA = (110|FF_PROFILE_H264_INTRA) + + + FF_PROFILE_H264_HIGH_422 = 122 + + + FF_PROFILE_H264_HIGH_422_INTRA = (122|FF_PROFILE_H264_INTRA) + + + FF_PROFILE_H264_HIGH_444 = 144 + + + FF_PROFILE_H264_HIGH_444_INTRA = (244|FF_PROFILE_H264_INTRA) + + + FF_PROFILE_H264_HIGH_444_PREDICTIVE = 244 + + + FF_PROFILE_H264_INTRA = (1<<11) + + + FF_PROFILE_H264_MAIN = 77 + + + FF_PROFILE_H264_MULTIVIEW_HIGH = 118 + + + FF_PROFILE_H264_STEREO_HIGH = 128 + + + FF_PROFILE_HEVC_MAIN = 1 + + + FF_PROFILE_HEVC_MAIN_10 = 2 + + + FF_PROFILE_HEVC_MAIN_STILL_PICTURE = 3 + + + FF_PROFILE_HEVC_REXT = 4 + + + FF_PROFILE_JPEG2000_CSTREAM_NO_RESTRICTION = 32768 + + + FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_0 = 1 + + + FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_1 = 2 + + + FF_PROFILE_JPEG2000_DCINEMA_2K = 3 + + + FF_PROFILE_JPEG2000_DCINEMA_4K = 4 + + + FF_PROFILE_MPEG2_422 = 0 + + + FF_PROFILE_MPEG2_AAC_HE = 131 + + + FF_PROFILE_MPEG2_AAC_LOW = 128 + + + FF_PROFILE_MPEG2_HIGH = 1 + + + FF_PROFILE_MPEG2_MAIN = 4 + + + FF_PROFILE_MPEG2_SIMPLE = 5 + + + FF_PROFILE_MPEG2_SNR_SCALABLE = 3 + + + FF_PROFILE_MPEG2_SS = 2 + + + FF_PROFILE_MPEG4_ADVANCED_CODING = 11 + + + FF_PROFILE_MPEG4_ADVANCED_CORE = 12 + + + FF_PROFILE_MPEG4_ADVANCED_REAL_TIME = 9 + + + FF_PROFILE_MPEG4_ADVANCED_SCALABLE_TEXTURE = 13 + + + FF_PROFILE_MPEG4_ADVANCED_SIMPLE = 15 + + + FF_PROFILE_MPEG4_BASIC_ANIMATED_TEXTURE = 7 + + + FF_PROFILE_MPEG4_CORE = 2 + + + FF_PROFILE_MPEG4_CORE_SCALABLE = 10 + + + FF_PROFILE_MPEG4_HYBRID = 8 + + + FF_PROFILE_MPEG4_MAIN = 3 + + + FF_PROFILE_MPEG4_N_BIT = 4 + + + FF_PROFILE_MPEG4_SCALABLE_TEXTURE = 5 + + + FF_PROFILE_MPEG4_SIMPLE = 0 + + + FF_PROFILE_MPEG4_SIMPLE_FACE_ANIMATION = 6 + + + FF_PROFILE_MPEG4_SIMPLE_SCALABLE = 1 + + + FF_PROFILE_MPEG4_SIMPLE_STUDIO = 14 + + + FF_PROFILE_RESERVED = -100 + + + FF_PROFILE_UNKNOWN = -99 + + + FF_PROFILE_VC1_ADVANCED = 3 + + + FF_PROFILE_VC1_COMPLEX = 2 + + + FF_PROFILE_VC1_MAIN = 1 + + + FF_PROFILE_VC1_SIMPLE = 0 + + + FF_PROFILE_VP9_0 = 0 + + + FF_PROFILE_VP9_1 = 1 + + + FF_PROFILE_VP9_2 = 2 + + + FF_PROFILE_VP9_3 = 3 + + + FF_QP2LAMBDA = 118 + + + FF_QSCALE_TYPE_H264 = 2 + + + FF_QSCALE_TYPE_MPEG1 = 0 + + + FF_QSCALE_TYPE_MPEG2 = 1 + + + FF_QSCALE_TYPE_VP56 = 3 + + + FF_QUALITY_SCALE = FF_LAMBDA_SCALE + + + FF_RC_STRATEGY_XVID = 1 + + + FF_SUB_CHARENC_MODE_AUTOMATIC = 0 + + + FF_SUB_CHARENC_MODE_DO_NOTHING = -1 + + + FF_SUB_CHARENC_MODE_PRE_DECODER = 1 + + + FF_SUB_TEXT_FMT_ASS = 0 + + + FF_SUB_TEXT_FMT_ASS_WITH_TIMINGS = 1 + + + FF_THREAD_FRAME = 1 + + + FF_THREAD_SLICE = 2 + + + HWACCEL_CODEC_CAP_EXPERIMENTAL = 0x0200 + + + LIBAVCODEC_BUILD = LIBAVCODEC_VERSION_INT + + + LIBAVCODEC_IDENT = "Lavc" AV_STRINGIFY(LIBAVCODEC_VERSION) + + + LIBAVCODEC_VERSION = AV_VERSION(LIBAVCODEC_VERSION_MAJOR, LIBAVCODEC_VERSION_MINOR, LIBAVCODEC_VERSION_MICRO) + + + LIBAVCODEC_VERSION_INT = AV_VERSION_INT(LIBAVCODEC_VERSION_MAJOR, LIBAVCODEC_VERSION_MINOR, LIBAVCODEC_VERSION_MICRO) + + + LIBAVCODEC_VERSION_MAJOR = 57 + + + LIBAVCODEC_VERSION_MICRO = 100 + + + LIBAVCODEC_VERSION_MINOR = 107 + + + LIBAVDEVICE_BUILD = LIBAVDEVICE_VERSION_INT + + + LIBAVDEVICE_IDENT = "Lavd" AV_STRINGIFY(LIBAVDEVICE_VERSION) + + + LIBAVDEVICE_VERSION = AV_VERSION(LIBAVDEVICE_VERSION_MAJOR, LIBAVDEVICE_VERSION_MINOR, LIBAVDEVICE_VERSION_MICRO) + + + LIBAVDEVICE_VERSION_INT = AV_VERSION_INT(LIBAVDEVICE_VERSION_MAJOR, LIBAVDEVICE_VERSION_MINOR, LIBAVDEVICE_VERSION_MICRO) + + + LIBAVDEVICE_VERSION_MAJOR = 57 + + + LIBAVDEVICE_VERSION_MICRO = 100 + + + LIBAVDEVICE_VERSION_MINOR = 10 + + + LIBAVFILTER_BUILD = LIBAVFILTER_VERSION_INT + + + LIBAVFILTER_IDENT = "Lavfi" + + + LIBAVFILTER_VERSION = AV_VERSION(LIBAVFILTER_VERSION_MAJOR, LIBAVFILTER_VERSION_MINOR, LIBAVFILTER_VERSION_MICRO) + + + LIBAVFILTER_VERSION_INT = AV_VERSION_INT(LIBAVFILTER_VERSION_MAJOR, LIBAVFILTER_VERSION_MINOR, LIBAVFILTER_VERSION_MICRO) + + + LIBAVFILTER_VERSION_MAJOR = 0x6 + + + LIBAVFILTER_VERSION_MICRO = 0x64 + + + LIBAVFILTER_VERSION_MINOR = 0x6b + + + LIBAVFORMAT_BUILD = LIBAVFORMAT_VERSION_INT + + + LIBAVFORMAT_IDENT = "Lavf" AV_STRINGIFY(LIBAVFORMAT_VERSION) + + + LIBAVFORMAT_VERSION = AV_VERSION(LIBAVFORMAT_VERSION_MAJOR, LIBAVFORMAT_VERSION_MINOR, LIBAVFORMAT_VERSION_MICRO) + + + LIBAVFORMAT_VERSION_INT = AV_VERSION_INT(LIBAVFORMAT_VERSION_MAJOR, LIBAVFORMAT_VERSION_MINOR, LIBAVFORMAT_VERSION_MICRO) + + + LIBAVFORMAT_VERSION_MAJOR = 57 + + + LIBAVFORMAT_VERSION_MICRO = 100 + + + LIBAVFORMAT_VERSION_MINOR = 83 + + + LIBAVUTIL_BUILD = LIBAVUTIL_VERSION_INT + + + LIBAVUTIL_IDENT = "Lavu" AV_STRINGIFY(LIBAVUTIL_VERSION) + + + LIBAVUTIL_VERSION = AV_VERSION(LIBAVUTIL_VERSION_MAJOR, LIBAVUTIL_VERSION_MINOR, LIBAVUTIL_VERSION_MICRO) + + + LIBAVUTIL_VERSION_INT = AV_VERSION_INT(LIBAVUTIL_VERSION_MAJOR, LIBAVUTIL_VERSION_MINOR, LIBAVUTIL_VERSION_MICRO) + + + LIBAVUTIL_VERSION_MAJOR = 55 + + + LIBAVUTIL_VERSION_MICRO = 100 + + + LIBAVUTIL_VERSION_MINOR = 78 + + + LIBPOSTPROC_BUILD = LIBPOSTPROC_VERSION_INT + + + LIBPOSTPROC_IDENT = "postproc" + + + LIBPOSTPROC_VERSION = AV_VERSION(LIBPOSTPROC_VERSION_MAJOR, LIBPOSTPROC_VERSION_MINOR, LIBPOSTPROC_VERSION_MICRO) + + + LIBPOSTPROC_VERSION_INT = AV_VERSION_INT(LIBPOSTPROC_VERSION_MAJOR, LIBPOSTPROC_VERSION_MINOR, LIBPOSTPROC_VERSION_MICRO) + + + LIBPOSTPROC_VERSION_MAJOR = 0x36 + + + LIBPOSTPROC_VERSION_MICRO = 0x64 + + + LIBPOSTPROC_VERSION_MINOR = 0x7 + + + LIBSWRESAMPLE_BUILD = LIBSWRESAMPLE_VERSION_INT + + + LIBSWRESAMPLE_IDENT = "SwR" + + + LIBSWRESAMPLE_VERSION = AV_VERSION(LIBSWRESAMPLE_VERSION_MAJOR, LIBSWRESAMPLE_VERSION_MINOR, LIBSWRESAMPLE_VERSION_MICRO) + + + LIBSWRESAMPLE_VERSION_INT = AV_VERSION_INT(LIBSWRESAMPLE_VERSION_MAJOR, LIBSWRESAMPLE_VERSION_MINOR, LIBSWRESAMPLE_VERSION_MICRO) + + + LIBSWRESAMPLE_VERSION_MAJOR = 0x2 + + + LIBSWRESAMPLE_VERSION_MICRO = 0x64 + + + LIBSWRESAMPLE_VERSION_MINOR = 0x9 + + + LIBSWSCALE_BUILD = LIBSWSCALE_VERSION_INT + + + LIBSWSCALE_IDENT = "SwS" + + + LIBSWSCALE_VERSION = AV_VERSION(LIBSWSCALE_VERSION_MAJOR, LIBSWSCALE_VERSION_MINOR, LIBSWSCALE_VERSION_MICRO) + + + LIBSWSCALE_VERSION_INT = AV_VERSION_INT(LIBSWSCALE_VERSION_MAJOR, LIBSWSCALE_VERSION_MINOR, LIBSWSCALE_VERSION_MICRO) + + + LIBSWSCALE_VERSION_MAJOR = 0x4 + + + LIBSWSCALE_VERSION_MICRO = 0x64 + + + LIBSWSCALE_VERSION_MINOR = 0x8 + + + M_E = 2.7182818284590452354 + + + M_LN10 = 2.30258509299404568402 + + + M_LN2 = 0.69314718055994530942 + + + M_LOG2_10 = 3.32192809488736234787 + + + M_PHI = 1.61803398874989484820 + + + M_PI = 3.14159265358979323846 + + + M_PI_2 = 1.57079632679489661923 + + + M_SQRT1_2 = 0.70710678118654752440 + + + M_SQRT2 = 1.41421356237309504880 + + + MAX_REORDER_DELAY = 16 + + + MAX_STD_TIMEBASES = (30*12+30+3+6) + + + MB_TYPE_16x16 = 0x0008 + + + MB_TYPE_16x8 = 0x0010 + + + MB_TYPE_8x16 = 0x0020 + + + MB_TYPE_8x8 = 0x0040 + + + MB_TYPE_ACPRED = 0x0200 + + + MB_TYPE_CBP = 0x00020000 + + + MB_TYPE_DIRECT2 = 0x0100 + + + MB_TYPE_GMC = 0x0400 + + + MB_TYPE_INTERLACED = 0x0080 + + + MB_TYPE_INTRA_PCM = 0x0004 + + + MB_TYPE_INTRA16x16 = 0x0002 + + + MB_TYPE_INTRA4x4 = 0x0001 + + + MB_TYPE_L0 = (MB_TYPE_P0L0 | MB_TYPE_P1L0) + + + MB_TYPE_L0L1 = (MB_TYPE_L0 | MB_TYPE_L1) + + + MB_TYPE_L1 = (MB_TYPE_P0L1 | MB_TYPE_P1L1) + + + MB_TYPE_P0L0 = 0x1000 + + + MB_TYPE_P0L1 = 0x4000 + + + MB_TYPE_P1L0 = 0x2000 + + + MB_TYPE_P1L1 = 0x8000 + + + MB_TYPE_QUANT = 0x00010000 + + + MB_TYPE_SKIP = 0x0800 + + + PARSER_FLAG_COMPLETE_FRAMES = 0x0001 + + + PARSER_FLAG_FETCHED_OFFSET = 0x0004 + + + PARSER_FLAG_ONCE = 0x0002 + + + PARSER_FLAG_USE_CODEC_TS = 0x1000 + + + PP_CPU_CAPS_3DNOW = 0x40000000 + + + PP_CPU_CAPS_ALTIVEC = 0x10000000 + + + PP_CPU_CAPS_AUTO = 0x80000 + + + PP_CPU_CAPS_MMX = 0x80000000u + + + PP_CPU_CAPS_MMX2 = 0x20000000 + + + PP_FORMAT = 0x8 + + + PP_FORMAT_411 = 0x2 | PP_FORMAT + + + PP_FORMAT_420 = 0x11 | PP_FORMAT + + + PP_FORMAT_422 = 0x1 | PP_FORMAT + + + PP_FORMAT_440 = 0x10 | PP_FORMAT + + + PP_FORMAT_444 = 0x0 | PP_FORMAT + + + PP_PICT_TYPE_QP2 = 0x10 + + + PP_QUALITY_MAX = 0x6 + + + SLICE_FLAG_ALLOW_FIELD = 0x0002 + + + SLICE_FLAG_ALLOW_PLANE = 0x0004 + + + SLICE_FLAG_CODED_ORDER = 0x0001 + + + SWR_FLAG_RESAMPLE = 0x1 + + + SWS_ACCURATE_RND = 0x40000 + + + SWS_AREA = 0x20 + + + SWS_BICUBIC = 0x4 + + + SWS_BICUBLIN = 0x40 + + + SWS_BILINEAR = 0x2 + + + SWS_BITEXACT = 0x80000 + + + SWS_CS_BT2020 = 0x9 + + + SWS_CS_DEFAULT = 0x5 + + + SWS_CS_FCC = 0x4 + + + SWS_CS_ITU601 = 0x5 + + + SWS_CS_ITU624 = 0x5 + + + SWS_CS_ITU709 = 0x1 + + + SWS_CS_SMPTE170M = 0x5 + + + SWS_CS_SMPTE240M = 0x7 + + + SWS_DIRECT_BGR = 0x8000 + + + SWS_ERROR_DIFFUSION = 0x800000 + + + SWS_FAST_BILINEAR = 0x1 + + + SWS_FULL_CHR_H_INP = 0x4000 + + + SWS_FULL_CHR_H_INT = 0x2000 + + + SWS_GAUSS = 0x80 + + + SWS_LANCZOS = 0x200 + + + SWS_MAX_REDUCE_CUTOFF = 0.002d + + + SWS_PARAM_DEFAULT = 0x1e240 + + + SWS_POINT = 0x10 + + + SWS_PRINT_INFO = 0x1000 + + + SWS_SINC = 0x100 + + + SWS_SPLINE = 0x400 + + + SWS_SRC_V_CHR_DROP_MASK = 0x30000 + + + SWS_SRC_V_CHR_DROP_SHIFT = 0x10 + + + SWS_X = 0x8 + + + Message types used by avdevice_app_to_dev_control_message(). + + + Dummy message. + + + Window size change message. + + + Repaint request message. + + + Request pause/play. + + + Request pause/play. + + + Request pause/play. + + + Volume control message. + + + Mute control messages. + + + Mute control messages. + + + Mute control messages. + + + Get volume/mute messages. + + + Get volume/mute messages. + + + Not part of ABI + + + Location of chroma samples. + + + MPEG-2/4 4:2:0, H.264 default for 4:2:0 + + + MPEG-1 4:2:0, JPEG 4:2:0, H.263 4:2:0 + + + ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2 + + + Not part of ABI + + + not part of ABI/API + + + Identify the syntax and semantics of the bitstream. The principle is roughly: Two decoders with the same ID can decode the same streams. Two encoders with the same ID can encode compatible streams. There may be slight deviations from the principle due to implementation details. + + + preferred ID for MPEG-1/2 video decoding + + + A dummy id pointing at the start of audio codecs + + + preferred ID for decoding MPEG audio layer 1, 2 or 3 + + + as in Berlin toast format + + + A dummy ID pointing at the start of subtitle codecs. + + + raw UTF-8 text + + + A dummy ID pointing at the start of various fake codecs. + + + Contain timestamp estimated through PCR of program stream. + + + codec_id is not known (like AV_CODEC_ID_NONE) but lavf should attempt to identify it + + + _FAKE_ codec to indicate a raw MPEG-2 TS stream (only used by libavformat) + + + _FAKE_ codec to indicate a MPEG-4 Systems stream (only used by libavformat) + + + Dummy codec for streams containing only metadata information. + + + Passthrough codec, AVFrames wrapped in AVPacket + + + Chromaticity coordinates of the source primaries. These values match the ones defined by ISO/IEC 23001-8_2013 § 7.1. + + + also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP177 Annex B + + + also FCC Title 47 Code of Federal Regulations 73.682 (a)(20) + + + also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM + + + also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC + + + functionally identical to above + + + colour filters using Illuminant C + + + ITU-R BT2020 + + + SMPTE ST 428-1 (CIE 1931 XYZ) + + + SMPTE ST 431-2 (2011) / DCI P3 + + + SMPTE ST 432-1 (2010) / P3 D65 / Display P3 + + + JEDEC P22 phosphors + + + Not part of ABI + + + MPEG vs JPEG YUV range. + + + the normal 219*2^(n-8) "MPEG" YUV ranges + + + the normal 2^n-1 "JPEG" YUV ranges + + + Not part of ABI + + + YUV colorspace type. These values match the ones defined by ISO/IEC 23001-8_2013 § 7.3. + + + order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB) + + + also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / SMPTE RP177 Annex B + + + FCC Title 47 Code of Federal Regulations 73.682 (a)(20) + + + also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601 + + + also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC + + + functionally identical to above + + + Used by Dirac / VC-2 and H.264 FRext, see ITU-T SG16 + + + ITU-R BT2020 non-constant luminance system + + + ITU-R BT2020 constant luminance system + + + SMPTE 2085, Y'D'zD'x + + + Chromaticity-derived non-constant luminance system + + + Chromaticity-derived constant luminance system + + + ITU-R BT.2100-0, ICtCp + + + Not part of ABI + + + Color Transfer Characteristic. These values match the ones defined by ISO/IEC 23001-8_2013 § 7.2. + + + also ITU-R BT1361 + + + also ITU-R BT470M / ITU-R BT1700 625 PAL & SECAM + + + also ITU-R BT470BG + + + also ITU-R BT601-6 525 or 625 / ITU-R BT1358 525 or 625 / ITU-R BT1700 NTSC + + + "Linear transfer characteristics" + + + "Logarithmic transfer characteristic (100:1 range)" + + + "Logarithmic transfer characteristic (100 * Sqrt(10) : 1 range)" + + + IEC 61966-2-4 + + + ITU-R BT1361 Extended Colour Gamut + + + IEC 61966-2-1 (sRGB or sYCC) + + + ITU-R BT2020 for 10-bit system + + + ITU-R BT2020 for 12-bit system + + + SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems + + + SMPTE ST 428-1 + + + ARIB STD-B67, known as "Hybrid log-gamma" + + + Not part of ABI + + + Message types used by avdevice_dev_to_app_control_message(). + + + Dummy message. + + + Create window buffer message. + + + Prepare window buffer message. + + + Display window buffer message. + + + Destroy window buffer message. + + + Buffer fullness status messages. + + + Buffer fullness status messages. + + + Buffer readable/writable. + + + Buffer readable/writable. + + + Mute state change message. + + + Volume level change message. + + + discard nothing + + + discard useless packets like 0 size packets in avi + + + discard all non reference + + + discard all bidirectional frames + + + discard all non intra frames + + + discard all frames except keyframes + + + discard all + + + The duration of a video can be estimated through various ways, and this enum can be used to know how the duration was estimated. + + + Duration accurately estimated from PTSes + + + Duration estimated from a stream with a known duration + + + Duration estimated from bitrate (less accurate) + + + stage of the initialization of the link properties (dimensions, etc) + + + not started + + + started, but incomplete + + + complete + + + @{ AVFrame is an abstraction for reference-counted raw multimedia data. + + + The data is the AVPanScan struct defined in libavcodec. + + + ATSC A53 Part 4 Closed Captions. A53 CC bitstream is stored as uint8_t in AVFrameSideData.data. The number of bytes of CC data is AVFrameSideData.size. + + + Stereoscopic 3d metadata. The data is the AVStereo3D struct defined in libavutil/stereo3d.h. + + + The data is the AVMatrixEncoding enum defined in libavutil/channel_layout.h. + + + Metadata relevant to a downmix procedure. The data is the AVDownmixInfo struct defined in libavutil/downmix_info.h. + + + ReplayGain information in the form of the AVReplayGain struct. + + + This side data contains a 3x3 transformation matrix describing an affine transformation that needs to be applied to the frame for correct presentation. + + + Active Format Description data consisting of a single byte as specified in ETSI TS 101 154 using AVActiveFormatDescription enum. + + + Motion vectors exported by some codecs (on demand through the export_mvs flag set in the libavcodec AVCodecContext flags2 option). The data is the AVMotionVector struct defined in libavutil/motion_vector.h. + + + Recommmends skipping the specified number of samples. This is exported only if the "skip_manual" AVOption is set in libavcodec. This has the same format as AV_PKT_DATA_SKIP_SAMPLES. + + + This side data must be associated with an audio frame and corresponds to enum AVAudioServiceType defined in avcodec.h. + + + Mastering display metadata associated with a video frame. The payload is an AVMasteringDisplayMetadata type and contains information about the mastering display color volume. + + + The GOP timecode in 25 bit timecode format. Data format is 64-bit integer. This is set on the first frame of a GOP that has a temporal reference of 0. + + + The data represents the AVSphericalMapping structure defined in libavutil/spherical.h. + + + Content light level (based on CTA-861.3). This payload contains data in the form of the AVContentLightMetadata struct. + + + The data contains an ICC profile as an opaque octet buffer following the format described by ISO 15076-1 with an optional name defined in the metadata key entry "name". + + + Transfer the data from the queried hw frame. + + + Transfer the data to the queried hw frame. + + + Different data types that can be returned via the AVIO write_data_type callback. + + + Header data; this needs to be present for the stream to be decodeable. + + + A point in the output bytestream where a decoder can start decoding (i.e. a keyframe). A demuxer/decoder given the data flagged with AVIO_DATA_MARKER_HEADER, followed by any AVIO_DATA_MARKER_SYNC_POINT, should give decodeable results. + + + A point in the output bytestream where a demuxer can start parsing (for non self synchronizing bytestream formats). That is, any non-keyframe packet start point. + + + This is any, unlabelled data. It can either be a muxer not marking any positions at all, it can be an actual boundary/sync point that the muxer chooses not to mark, or a later part of a packet/fragment that is cut into multiple write callbacks due to limited IO buffer size. + + + Trailer data, which doesn't contain actual content, but only for finalizing the output file. + + + A point in the output bytestream where the underlying AVIOContext might flush the buffer depending on latency or buffering requirements. Typically means the end of a packet. + + + Directory entry types. + + + Lock operation used by lockmgr + + + Create a mutex + + + Lock the mutex + + + Unlock the mutex + + + Free mutex resources + + + Media Type + + + Usually treated as AVMEDIA_TYPE_DATA + + + Opaque data information usually continuous + + + Opaque data information usually sparse + + + @{ AVOptions provide a generic system to declare options on arbitrary structs ("objects"). An option can have a help text, a type and a range of possible values. Options may then be enumerated, read and written to. + + + offset must point to a pointer immediately followed by an int for the length + + + offset must point to two consecutive integers + + + offset must point to AVRational + + + Types and functions for working with AVPacket. @{ + + + An AV_PKT_DATA_PALETTE side data packet contains exactly AVPALETTE_SIZE bytes worth of palette. This side data signals that a new palette is present. + + + The AV_PKT_DATA_NEW_EXTRADATA is used to notify the codec or the format that the extradata buffer was changed and the receiving side should act upon it appropriately. The new extradata is embedded in the side data buffer and should be immediately used for processing the current frame or packet. + + + An AV_PKT_DATA_PARAM_CHANGE side data packet is laid out as follows: + + + An AV_PKT_DATA_H263_MB_INFO side data packet contains a number of structures with info about macroblocks relevant to splitting the packet into smaller packets on macroblock edges (e.g. as for RFC 2190). That is, it does not necessarily contain info about all macroblocks, as long as the distance between macroblocks in the info is smaller than the target payload size. Each MB info structure is 12 bytes, and is laid out as follows: + + + This side data should be associated with an audio stream and contains ReplayGain information in form of the AVReplayGain struct. + + + This side data contains a 3x3 transformation matrix describing an affine transformation that needs to be applied to the decoded video frames for correct presentation. + + + This side data should be associated with a video stream and contains Stereoscopic 3D information in form of the AVStereo3D struct. + + + This side data should be associated with an audio stream and corresponds to enum AVAudioServiceType. + + + This side data contains quality related information from the encoder. + + + This side data contains an integer value representing the stream index of a "fallback" track. A fallback track indicates an alternate track to use when the current track can not be decoded for some reason. e.g. no decoder available for codec. + + + This side data corresponds to the AVCPBProperties struct. + + + Recommmends skipping the specified number of samples + + + An AV_PKT_DATA_JP_DUALMONO side data packet indicates that the packet may contain "dual mono" audio specific to Japanese DTV and if it is true, recommends only the selected channel to be used. + + + A list of zero terminated key/value strings. There is no end marker for the list, so it is required to rely on the side data size to stop. + + + Subtitle event position + + + Data found in BlockAdditional element of matroska container. There is no end marker for the data, so it is required to rely on the side data size to recognize the end. 8 byte id (as found in BlockAddId) followed by data. + + + The optional first identifier line of a WebVTT cue. + + + The optional settings (rendering instructions) that immediately follow the timestamp specifier of a WebVTT cue. + + + A list of zero terminated key/value strings. There is no end marker for the list, so it is required to rely on the side data size to stop. This side data includes updated metadata which appeared in the stream. + + + MPEGTS stream ID, this is required to pass the stream ID information from the demuxer to the corresponding muxer. + + + Mastering display metadata (based on SMPTE-2086:2014). This metadata should be associated with a video stream and contains data in the form of the AVMasteringDisplayMetadata struct. + + + This side data should be associated with a video stream and corresponds to the AVSphericalMapping structure. + + + Content light level (based on CTA-861.3). This metadata should be associated with a video stream and contains data in the form of the AVContentLightMetadata struct. + + + ATSC A53 Part 4 Closed Captions. This metadata should be associated with a video stream. A53 CC bitstream is stored as uint8_t in AVPacketSideData.data. The number of bytes of CC data is AVPacketSideData.size. + + + The number of side data elements (in fact a bit more than it). This is not part of the public API/ABI in the sense that it may change when new side data types are added. This must stay the last enum value. If its value becomes huge, some code using it needs to be updated as it assumes it to be smaller than other limits. + + + @{ + + + @} @} + + + Undefined + + + Intra + + + Predicted + + + Bi-dir predicted + + + S(GMC)-VOP MPEG-4 + + + Switching Intra + + + Switching Predicted + + + BI type + + + Pixel format. + + + planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples) + + + packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr + + + packed RGB 8:8:8, 24bpp, RGBRGB... + + + packed RGB 8:8:8, 24bpp, BGRBGR... + + + planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples) + + + planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples) + + + planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples) + + + planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) + + + Y , 8bpp + + + Y , 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb to the lsb + + + Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb + + + 8 bits with AV_PIX_FMT_RGB32 palette + + + planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting color_range + + + planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting color_range + + + planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting color_range + + + XVideo Motion Acceleration via common packet passing + + + packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1 + + + packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3 + + + packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb) + + + packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits + + + packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb) + + + packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb) + + + packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits + + + packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb) + + + planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V) + + + as above, but U and V bytes are swapped + + + packed ARGB 8:8:8:8, 32bpp, ARGBARGB... + + + packed RGBA 8:8:8:8, 32bpp, RGBARGBA... + + + packed ABGR 8:8:8:8, 32bpp, ABGRABGR... + + + packed BGRA 8:8:8:8, 32bpp, BGRABGRA... + + + Y , 16bpp, big-endian + + + Y , 16bpp, little-endian + + + planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples) + + + planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range + + + planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples) + + + H.264 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers + + + MPEG-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers + + + MPEG-2 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers + + + WMV3 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers + + + VC-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers + + + packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big-endian + + + packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as little-endian + + + packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian + + + packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian + + + packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), big-endian , X=unused/undefined + + + packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), little-endian, X=unused/undefined + + + packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), big-endian + + + packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), little-endian + + + packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), big-endian , X=unused/undefined + + + packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), little-endian, X=unused/undefined + + + HW acceleration through VA API at motion compensation entry-point, Picture.data[3] contains a vaapi_render_state struct which contains macroblocks as well as various fields extracted from headers + + + HW acceleration through VA API at IDCT entry-point, Picture.data[3] contains a vaapi_render_state struct which contains fields extracted from headers + + + HW decoding through VA API, Picture.data[3] contains a VASurfaceID + + + @} + + + planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian + + + planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian + + + planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + + + planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + + + planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian + + + planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian + + + MPEG-4 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers + + + HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer + + + packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), little-endian, X=unused/undefined + + + packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), big-endian, X=unused/undefined + + + packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), little-endian, X=unused/undefined + + + packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), big-endian, X=unused/undefined + + + 8 bits gray, 8 bits alpha + + + alias for AV_PIX_FMT_YA8 + + + alias for AV_PIX_FMT_YA8 + + + packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big-endian + + + packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as little-endian + + + planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian + + + planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian + + + planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian + + + planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian + + + planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + + + planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + + + planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian + + + planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian + + + planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian + + + planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian + + + planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + + + planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + + + hardware decoding through VDA + + + planar GBR 4:4:4 24bpp + + + planar GBR 4:4:4 27bpp, big-endian + + + planar GBR 4:4:4 27bpp, little-endian + + + planar GBR 4:4:4 30bpp, big-endian + + + planar GBR 4:4:4 30bpp, little-endian + + + planar GBR 4:4:4 48bpp, big-endian + + + planar GBR 4:4:4 48bpp, little-endian + + + planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples) + + + planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples) + + + planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), big-endian + + + planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), little-endian + + + planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), big-endian + + + planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), little-endian + + + planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), big-endian + + + planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), little-endian + + + planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian) + + + planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian) + + + planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian) + + + planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian) + + + planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian) + + + planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian) + + + planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian) + + + planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian) + + + planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian) + + + planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian) + + + planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian) + + + planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian) + + + HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface + + + packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as little-endian, the 4 lower bits are set to 0 + + + packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as big-endian, the 4 lower bits are set to 0 + + + interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples) + + + interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + + + interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + + + packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian + + + packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian + + + packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian + + + packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian + + + packed YUV 4:2:2, 16bpp, Y0 Cr Y1 Cb + + + HW acceleration through VDA, data[3] contains a CVPixelBufferRef + + + 16 bits gray, 16 bits alpha (big-endian) + + + 16 bits gray, 16 bits alpha (little-endian) + + + planar GBRA 4:4:4:4 32bpp + + + planar GBRA 4:4:4:4 64bpp, big-endian + + + planar GBRA 4:4:4:4 64bpp, little-endian + + + HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure. + + + HW acceleration though MMAL, data[3] contains a pointer to the MMAL_BUFFER_HEADER_T structure. + + + HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView pointer + + + HW acceleration through CUDA. data[i] contain CUdeviceptr pointers exactly as for system memory frames. + + + packed RGB 8:8:8, 32bpp, XRGBXRGB... X=unused/undefined + + + packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined + + + packed BGR 8:8:8, 32bpp, XBGRXBGR... X=unused/undefined + + + packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined + + + planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian + + + planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian + + + planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian + + + planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian + + + planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + + + planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + + + planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + + + planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + + + planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian + + + planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian + + + planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian + + + planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian + + + planar GBR 4:4:4 36bpp, big-endian + + + planar GBR 4:4:4 36bpp, little-endian + + + planar GBR 4:4:4 42bpp, big-endian + + + planar GBR 4:4:4 42bpp, little-endian + + + planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV411P and setting color_range + + + bayer, BGBG..(odd line), GRGR..(even line), 8-bit samples */ + + + bayer, RGRG..(odd line), GBGB..(even line), 8-bit samples */ + + + bayer, GBGB..(odd line), RGRG..(even line), 8-bit samples */ + + + bayer, GRGR..(odd line), BGBG..(even line), 8-bit samples */ + + + bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, little-endian */ + + + bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, big-endian */ + + + bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, little-endian */ + + + bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, big-endian */ + + + bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, little-endian */ + + + bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, big-endian */ + + + bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, little-endian */ + + + bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, big-endian */ + + + planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian + + + planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian + + + planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian + + + planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian + + + packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), little-endian + + + packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), big-endian + + + hardware decoding through Videotoolbox + + + like NV12, with 10bpp per component, data in the high bits, zeros in the low bits, little-endian + + + like NV12, with 10bpp per component, data in the high bits, zeros in the low bits, big-endian + + + planar GBR 4:4:4:4 48bpp, big-endian + + + planar GBR 4:4:4:4 48bpp, little-endian + + + planar GBR 4:4:4:4 40bpp, big-endian + + + planar GBR 4:4:4:4 40bpp, little-endian + + + hardware decoding through MediaCodec + + + Y , 12bpp, big-endian + + + Y , 12bpp, little-endian + + + Y , 10bpp, big-endian + + + Y , 10bpp, little-endian + + + like NV12, with 16bpp per component, little-endian + + + like NV12, with 16bpp per component, big-endian + + + Hardware surfaces for Direct3D11. + + + Y , 9bpp, big-endian + + + Y , 9bpp, little-endian + + + IEEE-754 single precision planar GBR 4:4:4, 96bpp, big-endian + + + IEEE-754 single precision planar GBR 4:4:4, 96bpp, little-endian + + + IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, big-endian + + + IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, little-endian + + + DRM-managed buffers exposed through PRIME buffer sharing. + + + number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of formats might differ between versions + + + Rounding methods. + + + Round toward zero. + + + Round away from zero. + + + Round toward -infinity. + + + Round toward +infinity. + + + Round to nearest and halfway cases away from zero. + + + Flag telling rescaling functions to pass `INT64_MIN`/`MAX` through unchanged, avoiding special cases for #AV_NOPTS_VALUE. + + + Audio sample formats + + + unsigned 8 bits + + + signed 16 bits + + + signed 32 bits + + + float + + + double + + + unsigned 8 bits, planar + + + signed 16 bits, planar + + + signed 32 bits, planar + + + float, planar + + + double, planar + + + signed 64 bits + + + signed 64 bits, planar + + + Number of sample formats. DO NOT USE if linking dynamically + + + @} + + + full parsing and repack + + + Only parse headers, do not repack. + + + full parsing and interpolation of timestamps for frames not starting on a packet boundary + + + full parsing and repack of the first frame only, only implemented for H.264 currently + + + full parsing and repack with timestamp and position generation by parser for raw this assumes that each packet in the file contains no demuxer level headers and just codec level data, otherwise position generation would fail + + + A bitmap, pict will be set + + + Plain text, the text field must be set by the decoder and is authoritative. ass and pict fields may contain approximations. + + + Formatted text, the ass field must be set by the decoder and is authoritative. pict and text fields may contain approximations. + + + timecode is drop frame + + + timecode wraps after 24 hours + + + negative time values are allowed + + + no search, that is use 0,0 vector whenever one is needed + + + enhanced predictive zonal search + + + reserved for experiments + + + hexagon based search + + + uneven multi-hexagon search + + + transformed exhaustive search algorithm + + + iterative search + + + Dithering algorithms + + + not part of API/ABI + + + not part of API/ABI + + + Resampling Engines + + + SW Resampler + + + SoX Resampler + + + not part of API/ABI + + + Resampling Filter Types + + + Cubic + + + Blackman Nuttall windowed sinc + + + Kaiser windowed sinc + Rational number (pair of numerator and denominator). @@ -4037,6 +6723,9 @@ Number of componentes. + + Structure to hold side data for an AVFrame. + A reference to a data buffer. @@ -4046,9 +6735,6 @@ Size of data in bytes. - - Structure to hold side data for an AVFrame. - This structure describes decoded (raw) audio or video data. @@ -4062,10 +6748,10 @@ pointers to the data planes/channels. - width and height of the video frame + Video frames only. The coded dimensions (in pixels) of the video frame, i.e. the size of the rectangle that contains some well-defined values. - width and height of the video frame + Video frames only. The coded dimensions (in pixels) of the video frame, i.e. the size of the rectangle that contains some well-defined values. number of audio samples (per channel) described by this frame @@ -4175,8 +6861,8 @@ AVBufferRef for free use by the API user. FFmpeg will never check the contents of the buffer ref. FFmpeg calls av_buffer_unref() on it when the frame is unreferenced. av_frame_copy_props() calls create a new reference with av_buffer_ref() for the target frame's opaque_ref field. - - the default value for scalar options + + cropping Video frames only. The number of pixels to discard from the the top/bottom/left/right border of the frame to obtain the sub-rectangle of the frame intended for presentation. @{ A single allowed range of values, or a single allowed value. @@ -4196,6 +6882,30 @@ Range flag. If set to 1 the struct encodes a range, if set to 0 a single value. + + the default value for scalar options + + + Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes of an image. It also stores the subsampling factors and number of components. + + + The number of components each pixel has, (1-4) + + + Amount to shift the luma width right to find the chroma width. For YV12 this is 1 for example. chroma_width = AV_CEIL_RSHIFT(luma_width, log2_chroma_w) The note above is needed to ensure rounding up. This value only refers to the chroma components. + + + Amount to shift the luma height right to find the chroma height. For YV12 this is 1 for example. chroma_height= AV_CEIL_RSHIFT(luma_height, log2_chroma_h) The note above is needed to ensure rounding up. This value only refers to the chroma components. + + + Combination of AV_PIX_FMT_FLAG_... flags. + + + Parameters that describe how pixels are packed. If the format has 1 or 2 components, then luma is 0. If the format has 3 or 4 components: if the RGB flag is set then 0 is red, 1 is green and 2 is blue; otherwise 0 is luma, 1 is chroma-U and 2 is chroma-V. + + + Alternative comma-separated names. + Which of the 4 planes contains the component. @@ -4220,27 +6930,6 @@ deprecated, use offset instead - - Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes of an image. It also stores the subsampling factors and number of components. - - - The number of components each pixel has, (1-4) - - - Amount to shift the luma width right to find the chroma width. For YV12 this is 1 for example. chroma_width = AV_CEIL_RSHIFT(luma_width, log2_chroma_w) The note above is needed to ensure rounding up. This value only refers to the chroma components. - - - Amount to shift the luma height right to find the chroma height. For YV12 this is 1 for example. chroma_height= AV_CEIL_RSHIFT(luma_height, log2_chroma_h) The note above is needed to ensure rounding up. This value only refers to the chroma components. - - - Combination of AV_PIX_FMT_FLAG_... flags. - - - Parameters that describe how pixels are packed. If the format has 1 or 2 components, then luma is 0. If the format has 3 or 4 components: if the RGB flag is set then 0 is red, 1 is green and 2 is blue; otherwise 0 is luma, 1 is chroma-U and 2 is chroma-V. - - - Alternative comma-separated names. - timecode frame start (first base frame number) @@ -4346,6 +7035,45 @@ Certain drivers require the decoder to be destroyed before the surfaces. To allow internally managed pools to work properly in such cases, this field is provided. + + This struct is allocated as AVHWDeviceContext.hwctx + + + Device used for texture creation and access. This can also be used to set the libavcodec decoding device. + + + If unset, this will be set from the device field on init. + + + If unset, this will be set from the device field on init. + + + If unset, this will be set from the device_context field on init. + + + Callbacks for locking. They protect accesses to device_context and video_context calls. They also protect access to the internal staging texture (for av_hwframe_transfer_data() calls). They do NOT protect access to hwcontext or decoder state in general. + + + D3D11 frame descriptor for pool allocation. + + + The texture in which the frame is located. The reference count is managed by the AVBufferRef, and destroying the reference will release the interface. + + + The index into the array texture element representing the frame, or 0 if the texture is not an array texture. + + + This struct is allocated as AVHWFramesContext.hwctx + + + The canonical texture used for pool allocation. If this is set to NULL on init, the hwframes implementation will allocate and set an array texture if initial_pool_size > 0. + + + D3D11_TEXTURE2D_DESC.BindFlags used for texture creation. The user must at least set D3D11_BIND_DECODER if the frames context is to be used for video decoding. This field is ignored/invalid if a user-allocated texture is provided. + + + D3D11_TEXTURE2D_DESC.MiscFlags used for texture creation. This field is ignored/invalid if a user-allocated texture is provided. + pointer to the list of coefficients @@ -4868,6 +7596,9 @@ ',' separated list of allowed decoders. If NULL then all are allowed - encoding: unused - decoding: set by user + + Properties of the stream that gets decoded - encoding: unused - decoding: set by libavcodec + Additional data associated with the entire coded stream. @@ -4889,6 +7620,9 @@ Bit set of AV_HWACCEL_FLAG_* flags, which affect hardware accelerated decoding (if active). - encoding: unused - decoding: Set by user (either before avcodec_open2(), or in the AVCodecContext.get_format callback) + + Video decoding only. Certain video codecs support cropping, meaning that only a sub-rectangle of the decoded frame is intended for display. This option controls how cropping is handled by libavcodec. + AVCodec. @@ -4944,7 +7678,10 @@ Encode data to an AVPacket. - Decode/encode API with decoupled packet/frame dataflow. The API is the same as the avcodec_ prefixed APIs (avcodec_send_frame() etc.), except that: - never called if the codec is closed or the wrong type, - AVPacket parameter change side data is applied right before calling AVCodec->send_packet, - if AV_CODEC_CAP_DELAY is not set, drain packets or frames are never sent, - only one drain packet is ever passed down (until the next flush()), - a drain AVPacket is always NULL (no need to check for avpkt->size). + Encode API with decoupled packet/frame dataflow. The API is the same as the avcodec_ prefixed APIs (avcodec_send_frame() etc.), except that: - never called if the codec is closed or the wrong type, - if AV_CODEC_CAP_DELAY is not set, drain frames are never sent, - only one drain frame is ever passed down, + + + Decode API with decoupled packet/frame dataflow. This function is called to get one output frame. It should call ff_decode_get_packet() to obtain input data. Flush buffers. Will be called when seeking @@ -4952,6 +7689,9 @@ Internal codec capabilities. See FF_CODEC_CAP_* in internal.h + + Decoding only, a comma-separated list of bitstream filters to apply to packets before decoding. + Same as packet pts, in AV_TIME_BASE @@ -5004,7 +7744,7 @@ Supported pixel format. - Hardware accelerated codec capabilities. see HWACCEL_CODEC_CAP_* + Hardware accelerated codec capabilities. see AV_HWACCEL_CODEC_CAP_* *************************************************************** No fields below this line are part of the public API. They may not be used outside of libavcodec and can be changed and removed at will. New public fields should be added right above. **************************************************************** @@ -5769,7 +8509,7 @@ position in the file of the current buffer - true if the next seek should flush + unused true if eof reached @@ -5828,6 +8568,12 @@ A callback that is used instead of short_seek_threshold. This is current internal only, do not use from outside. + + Maximum reached position before a backward seek in the write buffer, used keeping track of already written data for a later flush. + + + Try to buffer at least this amount of data before flushing it + Callback for checking whether to abort blocking functions. AVERROR_EXIT is returned in this case by the interrupted function. During blocking operations, callback is called with opaque as parameter. If the callback returns 1, the blocking operation will be aborted. @@ -5948,6 +8694,9 @@ A combination of AVFILTER_FLAG_* + + Filter pre-initialization function + Filter initialization function. @@ -6194,5 +8943,95 @@ height + + + Supports loading functions from native libraries. Provides a more flexible alternative to P/Invoke. + + + + + Creates a delegate which invokes a native function. + + + The function delegate. + + + The native library which contains the function. + + + The name of the function for which to create the delegate. + + + A new delegate which points to the native function. + + + + + Attempts to load a native library using platform nammig convention. + + Path of the library. + Name of the library. + Version of the library. + + A handle to the library when found; otherwise, . + + + This function may return a null handle. If it does, individual functions loaded from it will throw a + DllNotFoundException, + but not until an attempt is made to actually use the function (rather than load it). This matches how PInvokes + behave. + + + + + Attempts to load a native library. + + Path of the library. + Name of the library. + Version of the library. + + A handle to the library when found; otherwise, . + + + This function may return a null handle. If it does, individual functions loaded from it will throw a + DllNotFoundException, + but not until an attempt is made to actually use the function (rather than load it). This matches how PInvokes + behave. + + + + + Loads the specified module into the address space of the calling process. The specified module may cause other modules to be loaded. + + + + The name of the module. This can be either a library module (a .dll file) or an executable module (an .exe file). + The name specified is the file name of the module and is not related to the name stored in the library module itself, + as specified by the LIBRARY keyword in the module-definition (.def) file. + + + If the string specifies a full path, the function searches only that path for the module. + + + If the string specifies a relative path or a module name without a path, the function uses a standard search strategy + to find the module; for more information, see the Remarks. + + + If the function cannot find the module, the function fails. When specifying a path, be sure to use backslashes (\), + not forward slashes (/). For more information about paths, see Naming a File or Directory. + + + If the string specifies a module name without a path and the file name extension is omitted, the function appends the + default library extension .dll to the module name. To prevent the function from appending .dll to the module name, + include a trailing point character (.) in the module name string. + + + + If the function succeeds, the return value is a handle to the module. + If the function fails, the return value is . To get extended error information, call + . + + + diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffme/ffme.common.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffme/ffme.common.dll new file mode 100644 index 0000000..ba41780 Binary files /dev/null and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffme/ffme.common.dll differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/References/ffme.xml b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffme/ffme.common.xml similarity index 61% rename from QuickLook.Plugin/QuickLook.Plugin.VideoViewer/References/ffme.xml rename to QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffme/ffme.common.xml index 6a01b3a..dd5774d 100644 --- a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/References/ffme.xml +++ b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffme/ffme.common.xml @@ -1,9 +1,707 @@ - ffme + ffme.common + + + Enumerates the differen Closed-Captioning Colors + + + + + No color + + + + + The white color + + + + + The white transparent color + + + + + The green color + + + + + The green transparent color + + + + + The blue color + + + + + The blue transparent color + + + + + The cyan color + + + + + The cyan transparent color + + + + + The red color + + + + + The red transparent color + + + + + The yellow color + + + + + The yellow transparent color + + + + + The magenta color + + + + + The magenta transparent color + + + + + The white italics color + + + + + The white italics transparent color + + + + + The background transparent color + + + + + The foreground black color + + + + + The foreground black underline color + + + + + Enumerates the Closed-Captioning misc commands + + + + + No command + + + + + The resume command + + + + + The backspace command + + + + + The alarm off command + + + + + The alarm on command + + + + + The clear line command + + + + + The roll up2 command + + + + + The roll up3 command + + + + + The roll up4 command + + + + + The start caption command + + + + + The star non caption command + + + + + The resume non caption command + + + + + The clear screen command + + + + + The new line command + + + + + The clear buffer command + + + + + The end caption command + + + + + Defines Closed-Captioning Packet types + + + + + The unrecognized packet type + + + + + The null pad packet type + + + + + The XDS class packet type + + + + + The misc command packet type + + + + + The text packet type + + + + + The mid row packet type + + + + + The preamble packet type + + + + + The color packet type + + + + + The charset packet type + + + + + The tabs packet type + + + + + Enumerates the differen Closed-Captioning Styles + + + + + The none style + + + + + The white style + + + + + The white underline style + + + + + The green style + + + + + The green underline style + + + + + The blue style + + + + + The blue underline style + + + + + The cyan style + + + + + The cyan underline style + + + + + The red style + + + + + The red underline style + + + + + The yellow style + + + + + The yellow underline style + + + + + The magenta style + + + + + The magenta underline style + + + + + The white italics style + + + + + The white italics underline style + + + + + The white indent0 style + + + + + The white indent0 underline style + + + + + The white indent4 style + + + + + The white indent4 underline style + + + + + The white indent8 style + + + + + The white indent8 underline style + + + + + The white indent12 style + + + + + The white indent12 underline style + + + + + The white indent16 style + + + + + The white indent16 underline style + + + + + The white indent20 style + + + + + The white indent20 underline style + + + + + The white indent24 style + + + + + The white indent24 underline style + + + + + The white indent28 style + + + + + The white indent28 underline style + + + + + Defines Closed-Captioning XDS Packet Classes + + + + + The none XDS Class + + + + + The current start XDS Class + + + + + The current continue XDS Class + + + + + The future start XDS Class + + + + + The future continue XDS Class + + + + + The channel start XDS Class + + + + + The channel continue XDS Class + + + + + The misc start XDS Class + + + + + The misc continue XDS Class + + + + + The public service start XDS Class + + + + + The public service continue XDS Class + + + + + The reserved start XDS Class + + + + + The reserved continue XDS Class + + + + + The private start XDS Class + + + + + The private continue XDS Class + + + + + The end all XDS Class + + + + + Represents a set of Closed Captioning Tracks + in a stream of CC packets. + + + + + The CC1 Track Packets + + + + + The CC2 Track Packets + + + + + The CC3 Track Packets + + + + + The CC4 Track Packets + + + + + Adds the specified packet and automatically places it on the right track. + If the track requires sorting it does so by reordering packets based on their timestamp. + + The item. + + + + Represents a 3-byte packet of closed-captioning data in EIA-608 format. + See: http://jackyjung.tistory.com/attachment/499e14e28c347DB.pdf + + + + + Initializes a new instance of the class. + + The timestamp. + The source. + The offset. + + + + Initializes a new instance of the class. + + The timestamp. + The header. + The d0. + The d1. + + + + Gets the original packet data. + + + + + Gets the first of the two-byte packet data + + + + + Gets the second of the two-byte packet data + + + + + Gets the timestamp this packet applies to. + + + + + Gets the NTSC field (1 or 2). + 0 for unknown/null packet + + + + + Gets the channel. 0 for any, 1 or 2 for specific channel toggle. + 0 just means to use what a prior packet had specified. + + + + + Gets the type of the packet. + + + + + Gets the number of tabs, if the packet type is of Tabs + + + + + Gets the Misc Command, if the packet type is of Misc Command + + + + + Gets the Color, if the packet type is of Color + + + + + Gets the Style, if the packet type is of Mid Row Style + + + + + Gets the XDS Class, if the packet type is of XDS + + + + + Gets the Preamble Row Number (1 through 15), if the packet type is of Preamble + + + + + Gets the Style, if the packet type is of Preamble + + + + + Gets the text, if the packet type is of text. + + + + + Returns a that represents this instance. + + + A that represents this instance. + + + + + Compares the current instance with another object of the same type and returns an integer that indicates whether the current instance precedes, follows, or occurs in the same position in the sort order as the other object. + + An object to compare with this instance. + + A value that indicates the relative order of the objects being compared. The return value has these meanings: Value Meaning Less than zero This instance precedes in the sort order. Zero This instance occurs in the same position in the sort order as . Greater than zero This instance follows in the sort order. + + + + + Checks that the header byte starts with 11111b (5 ones binary) + + The data. + If header has markers + + + + Determines whether the valid flag of the header byte is set. + + The data. + + true if [is header valid falg set] [the specified data]; otherwise, false. + + + + + Gets the NTSC field type (1 or 2). + Returns 0 for unknown. + + The data. + The field type + + + + Determines whether the data is null padding + + The d0. + The d1. + + true if [is empty channel data] [the specified d0]; otherwise, false. + + + + + Drops the parity bit from the data byte. + + The input. + The byte without a parity bit. + + + + Converst an ASCII character code to an EIA-608 char (in Unicode) + + The input. + The charset char. + Implements the logic to close a media stream. @@ -12,7 +710,7 @@ - Initializes a new instance of the class. + Initializes a new instance of the class. The media element. @@ -21,82 +719,11 @@ Executes this command. - - - Implements the logic to open a media stream. - - - - - - Initializes a new instance of the class. - - The manager. - The source. - - - - Gets the source uri of the media stream. - - - - - Performs the actions that this command implements. - - - - - Creates a new instance of the renderer of the given type. - - Type of the media. - The renderer that was created - mediaType has to be of a vild type - - - - Implements the logic to pause the media stream - - - - - - Initializes a new instance of the class. - - The manager. - - - - Performs the actions that this command implements. - - - - - Implements the logic to start or resume media playback - - - - - - Initializes a new instance of the class. - - The media element. - - - - Performs the actions that this command implements. - - Represents a command to be executed against an intance of the MediaElement - - - Set when the command has finished execution. - Do not use this field directly. It is managed internally by the command manager. - - Initializes a new instance of the class. @@ -119,79 +746,134 @@ Gets a value indicating whether this command is marked as completed. + + + Gets the task that this command will run. + + + + + Gets a value indicating whether this instance is running. + + + true if this instance is running; otherwise, false. + + Marks the command as completed. - + - Executes the code for the command + Executes the code for the command asynchronously + The awaitable task + + + + Executes the command Synchronously. + + + + + Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + + + + + Returns a that represents this instance. + + + A that represents this instance. + Performs the actions that this command implements. + + + Releases unmanaged and - optionally - managed resources. + + true to release both managed and unmanaged resources; false to release only unmanaged resources. + - Represents a singlo point of contact for media command excution. + Represents a single point of contact for media command excution. - + Initializes a new instance of the class. - The media element. + The media element. Gets the number of commands pending execution. - + - Gets the parent media element. + Gets the core platform independent player component. - + + + Gets a value indicating whether commands can be executed. + Returns false if an Opening or Closing Command is in progress. + + + true if this instance can execute commands; otherwise, false. + + + Opens the specified URI. - The command is processed in a Thread Pool Thread. + This command gets processed in a threadpool thread asynchronously. The URI. + The asynchronous task - + + + Closes the specified media. + This command gets processed in a threadpool thread asynchronously. + + Returns the background task. + + Starts playing the open media URI. + The awaitable command - + Pauses the media. + The awaitable command - + Pauses and rewinds the media + This command invalidates all queued commands + The awaitable command Seeks to the specified position within the media. + This command is a queued command The position. - - - Closes the specified media. - This command gets processed in a threadpool thread. - - Sets the playback speed ratio. + This command is a queued command The target speed ratio. @@ -214,18 +896,17 @@ The command. - + - Waits for the command to complete execution. + Outputs the state of the queue - The command. + The operation. + if set to true [output empty]. - + - Calls the execution of the given command instance - and wait for its completion without blocking the dispatcher + Clears the command queue. - The command. @@ -267,6 +948,63 @@ The set speed ratio command + + + Implements the logic to open a media stream. + + + + + + Initializes a new instance of the class. + + The manager. + The source. + + + + Gets the source uri of the media stream. + + + + + Performs the actions that this command implements. + + + + + Implements the logic to pause the media stream + + + + + + Initializes a new instance of the class. + + The manager. + + + + Performs the actions that this command implements. + + + + + Implements the logic to start or resume media playback + + + + + + Initializes a new instance of the class. + + The media element. + + + + Performs the actions that this command implements. + + Implements the logic to seek on the media stream @@ -333,70 +1071,514 @@ Performs the actions that this command implements. - + - Fast, atomioc boolean combining interlocked to write value and volatile to read values - Idea taken from Memory model and .NET operations in article: - http://igoro.com/archive/volatile-keyword-in-c-memory-model-explained/ + Contains audio format properties essential + to audio processing and resampling in FFmpeg libraries - + - Initializes a new instance of the class. + The standard output audio spec - + - Gets the latest value written by any of the processors in the machine - Setting + Initializes static members of the class. - + - Fast, atomioc double combining interlocked to write value and volatile to read values - Idea taken from Memory model and .NET operations in article: - http://igoro.com/archive/volatile-keyword-in-c-memory-model-explained/ + Prevents a default instance of the class from being created. - + - Initializes a new instance of the class. + Initializes a new instance of the class. + + The frame. + + + + Gets the channel count. - + - Gets or sets the latest value written by any of the processors in the machine + Gets the channel layout. - + - Fast, atomioc long combining interlocked to write value and volatile to read values - Idea taken from Memory model and .NET operations in article: - http://igoro.com/archive/volatile-keyword-in-c-memory-model-explained/ + Gets the samples per channel. - + - Initializes a new instance of the class. + Gets the audio sampling rate. - + - Gets or sets the latest value written by any of the processors in the machine + Gets the sample format. - + - Manual additions to API calls not available in FFmpeg.Autogen + Gets the length of the buffer required to store + the samples in the current format. - + + + Creates a source audio spec based on the info in the given audio frame + + The frame. + The audio parameters + + + + Creates a target audio spec using the sample quantities provided + by the given source audio frame + + The frame. + The audio parameters + + + + Determines if the audio specs are compatible between them. + They must share format, channel count, layout and sample rate + + a. + The b. + True if the params are compatible, flase otherwise. + + + + An AVDictionary management class + + + + + To detect redundant Dispose calls + + + + + Initializes a new instance of the class. + + + + + Initializes a new instance of the class. + + The other. + + + + Gets the number of elements in the dictionary + + + The count. + + + + + Gets or sets the value with the specified key. + + + The . + + The key. + The entry + + + + Converts the AVDictionary to a regular dictionary. + + The dictionary to convert from. + the converterd dictionary + + + + A wrapper for the av_dict_get method + + The dictionary. + The key. + if set to true [match case]. + The Entry + + + + Fills this dictionary with a set of options + + The other dictionary (source) + + + + Gets the first entry. Null if no entries. + + The entry + + + + Gets the next entry based on the provided prior entry. + + The prior entry. + The entry + + + + Determines if the given key exists in the dictionary + + The key. + if set to true [match case]. + True or False + + + + Gets the entry given the key. + + The key. + if set to true [match case]. + The entry + + + + Gets the value with specified key. + + The key. + The value + + + + Sets the value for the specified key. + + The key. + The value. + + + + Sets the value for the specified key. + + The key. + The value. + if set to true [dont overwrite]. + + + + Removes the entry with the specified key. + + The key. + + + + Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + + + + + Releases unmanaged and - optionally - managed resources. + + true to release both managed and unmanaged resources; false to release only unmanaged resources. + + + + An AVDictionaryEntry wrapper + + + + + Initializes a new instance of the class. + + The entry pointer. + + + + Gets the key. + + + + + Gets the value. + + + + + Provides a set of utilities to perfrom logging, text formatting, + conversion and other handy calculations. + + + + + True when libraries were initialized correctly. + + + + + Gets the libraries path. Only filled when initialized correctly. + + + + + Gets the bitwise FFmpeg library identifiers that were loaded. + + + + + Registers FFmpeg library and initializes its components. + It only needs to be called once but calling it more than once + has no effect. Returns the path that FFmpeg was registered from. + This method is thread-safe. + + The override path. + The bitwaise flag identifiers corresponding to the libraries. + + Returns true if it was a new initialization and it succeeded. False if there was no need to initialize + as there is already a valid initialization. + + When ffmpeg libraries are not found + + Gets the FFmpeg error mesage based on the error code - The code. + The code. The decoded error message + + + Converts a byte pointer to a string + + The byte PTR. + The string + + + + Converts a byte pointer to a UTF8 encoded string. + + The byte PTR. + The string + + + + Defines FFmpeg library metadata and access. + It allows for the loading of individual libraries. + + + + + The load lock preventing libraries to load at the same time. + + + + + Initializes static members of the class. + + + + + Initializes a new instance of the class. + + The name. + The version. + The flag identifier. + + + + Gets all the libraries as a collection. + + + + + Gets the AVCodec library. + + + + + Gets the AVFormat library. + + + + + Gets the AVUtil library. + + + + + Gets the SWResample library. + + + + + Gets the SWScale library. + + + + + Gets the AVDevice library. + + + + + Gets the AVFilter library. + + + + + Gets the flag identifier. + + + + + Gets the name of the library. + + + + + Gets the base path from where the library was loaded. + Returns null if it has not been loaded. + + + + + Gets the library version. + + + + + Gets the pointer reference to the library. + > + + + + Gets a value indicating whether the library has already been loaded. + + + + + Gets the load error code. 0 for success. + + + + + Loads the library from the specified path. + + The base path. + True if the registration was successful + When library has already been loaded. + + + + Defines the library names as constants + + + + + A lock manager for FFmpeg libraries + + + + + The register lock + + + + + Keeps track of the unmanaged and managed locking structures for the FFmpeg libraries to use. + + + + + The registration state + + + + + Gets a value indicating whether the lock manager has registered. + + + + + Gets the FFmpeg lock manager callback. + Example: ffmpeg.av_lockmgr_register(FFLockManager.LockOpCallback); + + + + + Registers the lock manager. If it has been registered it does not do it again. + Thi method is thread-safe. + + + + + Manages FFmpeg Multithreaded locking + + The mutex. + The op. + + 0 for success, 1 for error + + + + + A queue-based logger that automatically stats a background timer that + empties the queue constantly, at low priority. + + + + + Initializes static members of the class. + + + + + Gets the FFmpeg log callback method. + Example: ffmpeg.av_log_set_callback(LoggingWorker.FFmpegLogCallback); + + + + + Starts to listen to FFmpeg logging messages. + This method is not thread-safe. + + + + + Logs the specified message. This the genric logging mechanism available to all classes. + + The sender. + Type of the message. + The message. + sender + When sender is null + + + + Logs a block rendering operation as a Trace Message + if the debugger is attached. + + The media engine. + The block. + The clock position. + Index of the render. + + + + Logs the specified message. This the way ffmpeg messages are logged. + + Type of the message. + The message. + + + + Log message callback from ffmpeg library. + + The p0. + The level. + The format. + The vl. + A reference counter to keep track of unmanaged objects @@ -529,1539 +1711,221 @@ A reference entry - - - Represents a generic Logger - - The sender's concrete type - - - - - Initializes a new instance of the class. - - The sender. - - - - Holds a reference to the sender. - - - - - Logs the specified message. - - Type of the message. - The message. - - - - A very simple and standard interface for message logging - - - - - Logs the specified message of the given type. - - Type of the message. - The message. - - - - Represents a very simple dictionary for MediaType keys - - The type of the value. - - - - Initializes a new instance of the class. - - - - - Gets or sets the item with the specified key. - return the default value of the value type when the key does not exist. - - The key. - The item - - - - FFmpeg Registration Native Methods - - - - - Sets the DLL directory in which external dependencies can be located. - - the full path. - True if set, false if not set - - - - Fast pointer memory block copy function - - The destination. - The source. - The length. - - - - Fills the memory. - - The destination. - The length. - The fill. - - - - Provides helpers tor un code in different modes on the UI dispatcher. - - - - - Gets the UI dispatcher. - - - - - Synchronously invokes the given instructions on the main application dispatcher. - - The priority. - The action. - - - - Enqueues the given instructions with the given arguments on the main application dispatcher. - This is a way to execute code in a fire-and-forget style - - The priority. - The action. - The arguments. - - - - Exits the execution frame. - - The f. - Always a null value - - - - A fixed-size buffer that acts as an infinite length one. - This buffer is backed by unmanaged, very fast memory so ensure you call - the dispose method when you are donde using it. - - - - - - The locking object to perform synchronization. - - - - - To detect redundant calls - - - - - The unbmanaged buffer - - - - - Initializes a new instance of the class. - - Length of the buffer. - - - - Finalizes an instance of the class. - - - - - Gets the capacity of this buffer. - - - - - Gets the current, 0-based read index - - - - - Gets the maximum rewindable amount of bytes. - - - - - Gets the current, 0-based write index. - - - - - Gets an the object associated with the last write - - - - - Gets the available bytes to read. - - - - - Gets the number of bytes that can be written. - - - - - Gets percentage of used bytes (readbale/available, from 0.0 to 1.0). - - - - - Skips the specified amount requested bytes to be read. - - The requested bytes. - When requested bytes GT readable count - - - - Rewinds the read position by specified requested amount of bytes. - - The requested bytes. - When requested GT rewindable - - - - Reads the specified number of bytes into the target array. - - The requested bytes. - The target. - The target offset. - When requested GT readble - - - - Writes data to the backing buffer using the specified pointer and length. - and associating a write tag for this operation. - - The source. - The length. - The write tag. - if set to true, overwrites the data even if it has not been read. - Read - When read needs to be called more! - - - - Resets all states as if this buffer had just been created. - - - - - Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. - - - - - Releases unmanaged and - optionally - managed resources. - - true to release both managed and unmanaged resources; false to release only unmanaged resources. - - + A time measurement artifact. - + - Initializes a new instance of the class. + Initializes a new instance of the class. The clock starts poaused and at the 0 position. - + Gets or sets the clock position. - + Gets a value indicating whether the clock is running. - + Gets or sets the speed ratio at which the clock runs. - + Starts or resumes the clock. - + Pauses the clock. - + Sets the clock position to 0 and stops it. The speed ratio is not modified. - - - Defines library-wide constants - - - - - Determines if the av_lockmgr_register is called. - If this is set to false, then the number of threads will be set to 1. - - - - - Contains audio format properties essential - to audio resampling - - - - - The standard output audio spec - - - - - Initializes static members of the class. - - - - - Prevents a default instance of the class from being created. - - - - - Initializes a new instance of the class. - - The frame. - - - - Gets the channel count. - - - - - Gets the channel layout. - - - - - Gets the samples per channel. - - - - - Gets the audio sampling rate. - - - - - Gets the sample format. - - - - - Gets the length of the buffer required to store - the samples in the current format. - - - - - Creates a source audio spec based on the info in the given audio frame - - The frame. - The audio parameters - - - - Creates a target audio spec using the sample quantities provided - by the given source audio frame - - The frame. - The audio parameters - - - - Determines if the audio specs are compatible between them. - They must share format, channel count, layout and sample rate - - a. - The b. - True if the params are compatible, flase otherwise. - - - - A single codec option along with a stream specifier. - - - - - Initializes a new instance of the class. - - The spec. - The key. - The value. - - - - Gets or sets the stream specifier. - - - - - Gets or sets the option name - - - - - Gets or sets the option value. - - - - - Enumerates the different Media Types - - - - - Represents an unexisting media type (-1) - - - - - The video media type (0) - - - - - The audio media type (1) - - - - - The subtitle media type (3) - - - - - An AVDictionaryEntry wrapper - - - - - Initializes a new instance of the class. - - The entry pointer. - - - - Gets the key. - - - - - Gets the value. - - - - - An AVDictionary management class - - - - - To detect redundant Dispose calls - - - - - Initializes a new instance of the class. - - - - - Initializes a new instance of the class. - - The other. - - - - Gets the number of elements in the dictionary - - - The count. - - - - - Gets or sets the value with the specified key. - - - The . - - The key. - The entry - - - - Converts the AVDictionary to a regular dictionary. - - The dictionary to convert from. - the converterd dictionary - - - - A wrapper for the av_dict_get method - - The dictionary. - The key. - if set to true [match case]. - The Entry - - - - Fills this dictionary with a set of options - - The other dictionary (source) - - - - Gets the first entry. Null if no entries. - - The entry - - - - Gets the next entry based on the provided prior entry. - - The prior entry. - The entry - - - - Determines if the given key exists in the dictionary - - The key. - if set to true [match case]. - True or False - - - - Gets the entry given the key. - - The key. - if set to true [match case]. - The entry - - - - Gets the value with specified key. - - The key. - The value - - - - Sets the value for the specified key. - - The key. - The value. - - - - Sets the value for the specified key. - - The key. - The value. - if set to true [dont overwrite]. - - - - Removes the entry with the specified key. - - The key. - - + Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. - + Releases unmanaged and - optionally - managed resources. true to release both managed and unmanaged resources; false to release only unmanaged resources. - + - A managed representation of an FFmpeg stream specifier + Provides audio sample extraction, decoding and scaling functionality. + + + + + + Holds a reference to the audio resampler + This resampler gets disposed upon disposal of this object. - + - Initializes a new instance of the class. + Used to determine if we have to reset the scaler parameters - + - Initializes a new instance of the class. + Initializes a new instance of the class. - The stream identifier. - streamId + The container. + Index of the stream. - + - Initializes a new instance of the class. + Gets the number of audio channels. - Type of the media. - streamType - + - Initializes a new instance of the class. + Gets the audio sample rate. - Type of the media. - The stream identifier. - - streamType + + + + Gets the bits per sample. + + + + + Converts decoded, raw frame data in the frame source into a a usable frame.
+ The process includes performing picture, samples or text conversions + so that the decoded source frame data is easily usable in multimedia applications +
+ The source frame to use as an input. + The target frame that will be updated with the source frame. If null is passed the frame will be instantiated. + The sibling blocks that may help guess some additional parameters for the input frame. + + Return the updated output frame + + input +
+ + + Creates a frame source object given the raw FFmpeg frame reference. + + The raw FFmpeg frame pointer. + The media frame + + + + Releases unmanaged and - optionally - managed resources. + + true to release both managed and unmanaged resources; false to release only unmanaged resources. + + + + Destroys the filtergraph releasing unmanaged resources. + + + + + Computes the frame filter arguments that are appropriate for the audio filtering chain. + + The frame. + The base filter arguments + + + + If necessary, disposes the existing filtergraph and creates a new one based on the frame arguments. + + The frame. + + avfilter_graph_create_filter or - streamId + avfilter_graph_create_filter + or + avfilter_link + or + avfilter_graph_parse + or + avfilter_graph_config - + - Provides suffixes for the different media types. + Represents a wrapper from an unmanaged FFmpeg audio frame + + + + + + + Initializes a new instance of the class. + + The frame. + The component. + + + + Finalizes an instance of the class. - + - Gets the stream identifier. + Gets the type of the media. - + - Gets the stream suffix. + Gets the pointer to the unmanaged frame. - + - Returns a that represents this stream specifier. - - - A that represents this instance. - - - - - Provides a set of utilities to perfrom logging, text formatting, - conversion and other handy calculations. + Releases unmanaged and - optionally - managed resources. - + - Initializes static members of the class. + Releases unmanaged and - optionally - managed resources. + + true to release both managed and unmanaged resources; false to release only unmanaged resources. + + + + A single codec option along with a stream specifier. - + - Determines if we are currently in Design Time - - - true if this instance is in design time; otherwise, false. - - - - - Gets a value indicating whether this instance is in debug mode. - - - - - Gets the assembly location. - - - - - Converts a byte pointer to a string - - The byte PTR. - The string - - - - Converts a byte pointer to a UTF8 encoded string. - - The byte PTR. - The string - - - - Converts the given value to a value that is of the given multiple. + Initializes a new instance of the class. + The spec. + The key. The value. - The multiple. - The value - + - Gets a timespan given a timestamp and a timebase. + Gets or sets the stream specifier. - The PTS. - The time base. - The TimeSpan - + - Gets a timespan given a timestamp and a timebase. + Gets or sets the option name - The PTS. - The time base. - The TimeSpan - + - Gets a timespan given a timestamp and a timebase. + Gets or sets the option value. - The PTS in seconds. - The time base. - The TimeSpan - - - - Gets a timespan given a timestamp and a timebase. - - The PTS. - The time base. - The TimeSpan - - - - Gets a timespan given a timestamp (in AV_TIME_BASE units) - - The PTS. - The TimeSpan - - - - Gets a timespan given a timestamp (in AV_TIME_BASE units) - - The PTS. - The TimeSpan - - - - Converts a fraction to a double - - The rational. - The value - - - - Registers FFmpeg library and initializes its components. - It only needs to be called once but calling it more than once - has no effect. Returns the path that FFmpeg was registered from. - - The override path. - Returns the path that FFmpeg was registered from. - When the folder is not found - - - - Logs the specified message. - - The sender. - Type of the message. - The message. - sender - - - - Logs a block rendering operation as a Trace Message - if the debugger is attached. - - The media element. - The block. - The clock position. - Index of the render. - - - - Returns a formatted timestamp string in Seconds - - The ts. - The formatted string - - - - Returns a formatted string with elapsed milliseconds between now and - the specified date. - - The dt. - The formatted string - - - - Returns a fromatted string, dividing by the specified - factor. Useful for debugging longs with byte positions or sizes. - - The ts. - The divide by. - The formatted string - - - - Strips the SRT format and returns plain text. - - The input. - The formatted string - - - - Strips a line of text from the ASS format. - - The input. - The formatted string - - - - Handles the Tick event of the LogOutputter timer. - - The source of the event. - The instance containing the event data. - - - - Manages FFmpeg Multithreaded locking - - The mutex. - The op. - - 0 for success, 1 for error - - - - - Log message callback from ffmpeg library. - - The p0. - The level. - The format. - The vl. - - - - Enumerates the differen Closed-Captioning Colors - - - - - No color - - - - - The white color - - - - - The white transparent color - - - - - The green color - - - - - The green transparent color - - - - - The blue color - - - - - The blue transparent color - - - - - The cyan color - - - - - The cyan transparent color - - - - - The red color - - - - - The red transparent color - - - - - The yellow color - - - - - The yellow transparent color - - - - - The magenta color - - - - - The magenta transparent color - - - - - The white italics color - - - - - The white italics transparent color - - - - - The background transparent color - - - - - The foreground black color - - - - - The foreground black underline color - - - - - Enumerates the Closed-Captioning misc commands - - - - - No command - - - - - The resume command - - - - - The backspace command - - - - - The alarm off command - - - - - The alarm on command - - - - - The clear line command - - - - - The roll up2 command - - - - - The roll up3 command - - - - - The roll up4 command - - - - - The start caption command - - - - - The star non caption command - - - - - The resume non caption command - - - - - The clear screen command - - - - - The new line command - - - - - The clear buffer command - - - - - The end caption command - - - - - Defines Closed-Captioning Packet types - - - - - The unrecognized packet type - - - - - The null pad packet type - - - - - The XDS class packet type - - - - - The misc command packet type - - - - - The text packet type - - - - - The mid row packet type - - - - - The preamble packet type - - - - - The color packet type - - - - - The charset packet type - - - - - The tabs packet type - - - - - Enumerates the differen Closed-Captioning Styles - - - - - The none style - - - - - The white style - - - - - The white underline style - - - - - The green style - - - - - The green underline style - - - - - The blue style - - - - - The blue underline style - - - - - The cyan style - - - - - The cyan underline style - - - - - The red style - - - - - The red underline style - - - - - The yellow style - - - - - The yellow underline style - - - - - The magenta style - - - - - The magenta underline style - - - - - The white italics style - - - - - The white italics underline style - - - - - The white indent0 style - - - - - The white indent0 underline style - - - - - The white indent4 style - - - - - The white indent4 underline style - - - - - The white indent8 style - - - - - The white indent8 underline style - - - - - The white indent12 style - - - - - The white indent12 underline style - - - - - The white indent16 style - - - - - The white indent16 underline style - - - - - The white indent20 style - - - - - The white indent20 underline style - - - - - The white indent24 style - - - - - The white indent24 underline style - - - - - The white indent28 style - - - - - The white indent28 underline style - - - - - Defines Closed-Captioning XDS Packet Classes - - - - - The none XDS Class - - - - - The current start XDS Class - - - - - The current continue XDS Class - - - - - The future start XDS Class - - - - - The future continue XDS Class - - - - - The channel start XDS Class - - - - - The channel continue XDS Class - - - - - The misc start XDS Class - - - - - The misc continue XDS Class - - - - - The public service start XDS Class - - - - - The public service continue XDS Class - - - - - The reserved start XDS Class - - - - - The reserved continue XDS Class - - - - - The private start XDS Class - - - - - The private continue XDS Class - - - - - The end all XDS Class - - - - - Represents a set of Closed Captioning Tracks - in a stream of CC packets. - - - - - The CC1 Track Packets - - - - - The CC2 Track Packets - - - - - The CC3 Track Packets - - - - - The CC4 Track Packets - - - - - Adds the specified packet and automatically places it on the right track. - If the track requires sorting it does so by reordering packets based on their timestamp. - - The item. - - - - Represents a 3-byte packet of closed-captioning data in EIA-608 format. - See: http://jackyjung.tistory.com/attachment/499e14e28c347DB.pdf - - - - - Holds the data bytes - - - - - Initializes a new instance of the class. - - The timestamp. - The source. - The offset. - - - - Initializes a new instance of the class. - - The timestamp. - The header. - The d0. - The d1. - - - - Gets the first of the two-byte packet data - - - - - Gets the second of the two-byte packet data - - - - - Gets the timestamp this packet applies to. - - - - - Gets the NTSC field (1 or 2). - 0 for unknown/null packet - - - - - Gets the channel. 0 for any, 1 or 2 for specific channel toggle. - 0 just means to use what a prior packet had specified. - - - - - Gets the type of the packet. - - - - - Gets the number of tabs, if the packet type is of Tabs - - - - - Gets the Misc Command, if the packet type is of Misc Command - - - - - Gets the Color, if the packet type is of Color - - - - - Gets the Style, if the packet type is of Mid Row Style - - - - - Gets the XDS Class, if the packet type is of XDS - - - - - Gets the Preamble Row Number (1 through 15), if the packet type is of Preamble - - - - - Gets the Style, if the packet type is of Preamble - - - - - Gets the text, if the packet type is of text. - - - - - Returns a that represents this instance. - - - A that represents this instance. - - - - - Compares the current instance with another object of the same type and returns an integer that indicates whether the current instance precedes, follows, or occurs in the same position in the sort order as the other object. - - An object to compare with this instance. - - A value that indicates the relative order of the objects being compared. The return value has these meanings: Value Meaning Less than zero This instance precedes in the sort order. Zero This instance occurs in the same position in the sort order as . Greater than zero This instance follows in the sort order. - - - - - Checks that the header byte starts with 11111b (5 ones binary) - - The data. - If header has markers - - - - Determines whether the valid flag of the header byte is set. - - The data. - - true if [is header valid falg set] [the specified data]; otherwise, false. - - - - - Gets the NTSC field type (1 or 2). - Returns 0 for unknown. - - The data. - The field type - - - - Determines whether the data is null padding - - The d0. - The d1. - - true if [is empty channel data] [the specified d0]; otherwise, false. - - - - - Drops the parity bit from the data byte. - - The input. - The byte without a parity bit. - - - - Converst an ASCII character code to an EIA-608 char (in Unicode) - - The input. - The charset char. @@ -2088,6 +1952,22 @@ Gets the dxva2 accelerator. + + + Gets the CUDA video accelerator. + + + + + Gets the name of the HW accelerator. + + + + + Gets a value indicating whether the frame requires the transfer from + the hardware to RAM + + Gets the hardware output pixel format. @@ -2133,540 +2013,9 @@ The pixel formats. The real pixel format that the codec will be using - - - Enumerates the seek target requirement levels. - - - - - Seek requirement is satisfied when - the main component has frames in the seek range. - This is the fastest option. - - - - - Seek requirement is satisfied when - the both audio and video comps have frames in the seek range. - This is the recommended option. - - - - - Seek requirement is satisfied when - ALL components have frames in the seek range - This is NOT recommended as it forces large amounts of - frames to get decoded in subtitle files. - - - - - A scaled, preallocated audio frame container. - The buffer is in 16-bit signed, interleaved sample data - - - - - Finalizes an instance of the class. - - - - - Gets a pointer to the first byte of the data buffer. - The format signed 16-bits per sample, channel interleaved - - - - - Gets the length of the buffer in bytes. - - - - - Gets the sample rate. - - - - - Gets the channel count. - - - - - Gets the available samples per channel. - - - - - Gets the media type of the data - - - - - The picture buffer length of the last allocated buffer - - - - - Holds a reference to the last allocated buffer - - - - - Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. - - - - - Releases unmanaged and - optionally - managed resources. - - true to release both managed and unmanaged resources; false to release only unmanaged resources. - - - - Represents a wrapper from an unmanaged FFmpeg audio frame - - - - - - - Initializes a new instance of the class. - - The frame. - The component. - - - - Finalizes an instance of the class. - - - - - Gets the type of the media. - - - - - Gets the pointer to the unmanaged frame. - - - - - Releases unmanaged and - optionally - managed resources. - - - - - Releases unmanaged and - optionally - managed resources. - - true to release both managed and unmanaged resources; false to release only unmanaged resources. - - - - Represents a set of preallocated media blocks of the same media type. - A block buffer contains playback and pool blocks. Pool blocks are blocks that - can be reused. Playback blocks are blocks that have been filled. - This class is thread safe. - - - - - The blocks that are available to be filled. - - - - - The blocks that are available for rendering. - - - - - Initializes a new instance of the class. - - The capacity. - Type of the media. - - - - Gets the media type of the block buffer. - - - - - Gets the start time of the first block. - - - - - Gets the end time of the last block. - - - - - Gets the range of time between the first block and the end time of the last block. - - - - - Gets the average duration of the currently available playback blocks. - - - - - Gets a value indicating whether all the durations of the blocks are equal - - - - - Gets the number of available playback blocks. - - - - - Gets the maximum count of this buffer. - - - - - Gets the usage percent from 0.0 to 1.0 - - - - - Gets a value indicating whether the playback blocks are all allocated. - - - - - Gets the at the specified index. - - - The . - - The index. - The media block - - - - Gets the at the specified timestamp. - - - The . - - At time. - The media block - - - - Gets the percentage of the range for the given time position. - - The position. - The percent of the range - - - - Retrieves the block following the provided current block - - The current block. - The next media block - - - - Adds a block to the playback blocks by converting the given frame. - If there are no more blocks in the pool, the oldest block is returned to the pool - and reused for the new block. The source frame is automatically disposed. - - The source. - The container. - The filled block. - - - - Clears all the playback blocks returning them to the - block pool. - - - - - Determines whether the given render time is within the range of playback blocks. - - The render time. - - true if [is in range] [the specified render time]; otherwise, false. - - - - - Retrieves the index of the playback block corresponding to the specified - render time. This uses very fast binary and linear search commbinations. - If there are no playback blocks it returns -1. - If the render time is greater than the range end time, it returns the last playback block index. - If the render time is less than the range start time, it returns the first playback block index. - - The render time. - The media block's index - - - - Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. - - - - - Returns a formatted string with information about this buffer - - The formatted string - - - - Block factory method. - - The media frame - MediaBlock - - - - Provides audio sample extraction, decoding and scaling functionality. - - - - - - Holds a reference to the audio resampler - This resampler gets disposed upon disposal of this object. - - - - - Used to determine if we have to reset the scaler parameters - - - - - Initializes a new instance of the class. - - The container. - Index of the stream. - - - - Gets the number of audio channels. - - - - - Gets the audio sample rate. - - - - - Gets the bits per sample. - - - - - Converts decoded, raw frame data in the frame source into a a usable frame.
- The process includes performing picture, samples or text conversions - so that the decoded source frame data is easily usable in multimedia applications -
- The source frame to use as an input. - The target frame that will be updated with the source frame. If null is passed the frame will be instantiated. - The sibling blocks that may help guess some additional parameters for the input frame. - - Return the updated output frame - - input -
- - - Creates a frame source object given the raw FFmpeg frame reference. - - The raw FFmpeg frame pointer. - The media frame - - - - Releases unmanaged and - optionally - managed resources. - - true to release both managed and unmanaged resources; false to release only unmanaged resources. - - - - Destroys the filtergraph releasing unmanaged resources. - - - - - Computes the frame filter arguments that are appropriate for the audio filtering chain. - - The frame. - The base filter arguments - - - - If necessary, disposes the existing filtergraph and creates a new one based on the frame arguments. - - The frame. - - avfilter_graph_create_filter - or - avfilter_graph_create_filter - or - avfilter_link - or - avfilter_graph_parse - or - avfilter_graph_config - - - - - Represents a wrapper for an unmanaged frame. - Derived classes implement the specifics of each media type. - - - - - - Initializes a new instance of the class. - - The pointer. - The component. - - - - Gets the type of the media. - - - The type of the media. - - - - - Gets the start time of the frame. - - - - - Gets the end time of the frame - - - - - Gets the index of the stream from which this frame was decoded. - - - - - Gets the amount of time this data has to be presented - - - - - Gets or sets a value indicating whether this frame obtained its start time - form a valid frame pts value - - - - - When the unmanaged frame is released (freed from unmanaged memory) - this property will return true. - - - - - Gets the time base of the stream that generated this frame. - - - - - Compares the current instance with another object of the same type and returns an integer that indicates whether the current instance precedes, follows, or occurs in the same position in the sort order as the other object. - - An object to compare with this instance. - - A value that indicates the relative order of the objects being compared. The return value has these meanings: Value Meaning Less than zero This instance precedes in the sort order. Zero This instance occurs in the same position in the sort order as . Greater than zero This instance follows in the sort order. - - - - - Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. - - - - - A base class for blocks of the deifferent MediaTypes. - Blocks are the result of decoding and scaling a frame. - Blocks have preallocated buffers wich makes them memory and CPU efficient - Reue blocks as much as possible. Once you create a block from a frame, - you don't need the frame anymore so make sure you dispose the frame. - - - - - Gets the media type of the data - - - - - Gets or sets a value indicating whether the start time was guessed from siblings - or the source frame PTS comes from a NO PTS value - - - - - Gets the time at which this data should be presented (PTS) - - - - - Gets the amount of time this data has to be presented - - - - - Gets the end time. - - - - - Gets or sets the index of the stream. - - - - - Gets the middle timestamp between the start and end time. - Returns Zero if the duration is Zero or negative. - - - - - Determines whether this media block holds the specified position. - Returns false if it does not have a valid duration. - - The position. - - true if [contains] [the specified position]; otherwise, false. - - - - - Compares the current instance with another object of the same type and returns an integer that indicates whether the current instance precedes, follows, or occurs in the same position in the sort order as the other object. - - An object to compare with this instance. - - A value that indicates the relative order of the objects being compared. The return value has these meanings: Value Meaning Less than zero This instance precedes in the sort order. Zero This instance occurs in the same position in the sort order as . Greater than zero This instance follows in the sort order. - - - - - Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. - - - Represents a media component of a given media type within a + Represents a media component of a given media type within a media container. Derived classes must implement frame handling logic. @@ -2682,6 +2031,11 @@ Holds a reference to the associated input context stream
+ + + Related to issue 94, looks like FFmpeg requires exclusive access when calling avcodec_open2() + + Contains the packets pending to be sent to the decoder @@ -2698,9 +2052,9 @@ Detects redundant, unmanaged calls to the Dispose method. - + - The m total bytes read + Holds total bytes read in the lifetime of this object @@ -2710,7 +2064,7 @@ The container. Index of the stream. container - The container exception. + The container exception. @@ -2734,7 +2088,7 @@ - Returns the component's stream start timestamp as reported + Gets the component's stream start timestamp as reported by the start time of the stream. @@ -2746,7 +2100,7 @@ - Gets the current length in bytes of the + Gets the current length in bytes of the packet buffer. Limit your Reads to something reasonable before this becomes too large. @@ -2757,9 +2111,9 @@ Decode packets until this number becomes 0. - + - Gets the total amount of bytes read by this component. + Gets the total amount of bytes read by this component in the lifetime of this component. @@ -2774,7 +2128,7 @@ - Gets the bitrate of this component as reported by the codec context. + Gets the bitrate of this component as reported by the codec context. Returns 0 for unknown. @@ -2799,8 +2153,7 @@ Pushes a packet into the decoding Packet Queue and processes the packet in order to try to decode - 1 or more frames. The packet has to be within the range of - the start time and end time of + 1 or more frames. The packet. @@ -2811,7 +2164,7 @@ The received Media Frames - + Converts decoded, raw frame data in the frame source into a a usable frame.
The process includes performing picture, samples or text conversions @@ -2876,7 +2229,7 @@ Represents a set of Audio, Video and Subtitle components. - This class is useful in order to group all components into + This class is useful in order to group all components into a single set. Sending packets is automatically handled by this class. This class is thread safe. @@ -2952,9 +2305,9 @@ fed to the decoders.
- + - Gets the total bytes read by all components. + Gets the total bytes read by all components in the lifetime of this object. @@ -2972,7 +2325,7 @@ Gets a value indicating whether this instance has a subtitles component. - + Gets or sets the with the specified media type. Setting a new component on an existing media type component will throw. @@ -2983,7 +2336,7 @@ When the media type is invalid MediaComponent - + Removes the component of specified media type (if registered). It calls the dispose method of the media component too. @@ -3025,267 +2378,19 @@ true to release both managed and unmanaged resources; false to release only unmanaged resources. - - - A subtitle frame container. Simply contains text lines. - - - - - Gets the media type of the data - - - - - Gets the lines of text for this subtitle frame with all formatting stripped out. - - - - - Gets the original text in SRT or ASS fromat. - - - - - Gets the type of the original text. - Returns None when it's a bitmap or when it's None - - - - - Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. - - - - - Represents a wrapper for an unmanaged Subtitle frame. - TODO: Only text (ASS and SRT) subtitles are supported currently. - There is no support to bitmap subtitles. - - - - - - Initializes a new instance of the class. - - The frame. - The component. - - - - Finalizes an instance of the class. - - - - - Gets the type of the media. - - - - - Gets lines of text that the subtitle frame contains. - - - - - Gets the type of the text. - - - The type of the text. - - - - - Gets the pointer to the unmanaged subtitle struct - - - - - Releases unmanaged and - optionally - managed resources. - - - - - Allocates an AVSubtitle struct in unmanaged memory, - - The subtitle struct pointer - - - - Deallocates the subtitle struct used to create in managed memory. - - The frame. - - - - Releases unmanaged and - optionally - managed resources. - - true to release both managed and unmanaged resources; false to release only unmanaged resources. - - - - A pre-allocated, scaled video block. The buffer is in BGR, 24-bit format - - - - - Finalizes an instance of the class. - - - - - Gets the media type of the data - - - - - Gets a pointer to the first byte of the data buffer. - The format is 24bit BGR - - - - - Gets the length of the buffer in bytes. - - - - - The picture buffer stride. - Pixel Width * 24-bit color (3 byes) + alignment (typically 0 for modern hw). - - - - - Gets the number of horizontal pixels in the image. - - - - - Gets the number of vertical pixels in the image. - - - - - Gets or sets the width of the aspect ratio. - - - - - Gets or sets the height of the aspect ratio. - - - - - Gets the SMTPE time code. - - - - - Gets the display picture number (frame number). - If not set by the decoder, this attempts to obtain it by dividing the start time by the - frame duration - - - - - Gets the coded picture number set by the decoder. - - - - - The picture buffer length of the last allocated buffer - - - - - Holds a reference to the last allocated buffer - - - - - Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. - - - - - Releases unmanaged and - optionally - managed resources. - - true to release both managed and unmanaged resources; false to release only unmanaged resources. - - - - Represents a wrapper for an unmanaged ffmpeg video frame. - - - - - - Initializes a new instance of the class. - - The frame. - The component. - - - - Finalizes an instance of the class. - - - - - Gets the type of the media. - - - - - Gets the closed caption data collected from the frame in CEA-708/EAS-608 format. - - - - - Gets the display picture number (frame number). - If not set by the decoder, this attempts to obtain it by dividing the start time by the - frame duration - - - - - Gets the coded picture number set by the decoder. - - - - - Gets the SMTPE time code. - - - - - Gets the pointer to the unmanaged frame. - - - - - Releases unmanaged and - optionally - managed resources. - - - - - Releases unmanaged and - optionally - managed resources. - - true to release both managed and unmanaged resources; false to release only unmanaged resources. - A container capable of opening an input url, reading packets from it, decoding frames, seeking, and pausing and resuming network streams Code heavily based on https://raw.githubusercontent.com/FFmpeg/FFmpeg/release/3.2/ffplay.c - The method pipeline should be: - 1. Set Options (or don't, for automatic options) and Initialize, - 2. Perform continuous Reads, + The method pipeline should be: + 1. Set Options (or don't, for automatic options) and Initialize, + 2. Perform continuous Reads, 3. Perform continuous Decodes and Converts/Materialize - + The logger @@ -3315,6 +2420,11 @@ Holds the set of components. + + + The internal flag that determines if a seek operation is in progress. + + To detect redundat Dispose calls @@ -3345,14 +2455,29 @@ When a read operation is started, this is set to the ticks of UTC now. - + + + The signal to request the abortion of the following read operation + + + + + If set to true, it will reset the abort requested flag to false. + + + + + If set to true, an ongoing seek operation will immediately try to return and cancel all reads. + + + Initializes a new instance of the class. The media URL. - The logger. + The logger. - The protocol prefix. See https://ffmpeg.org/ffmpeg-protocols.html + The protocol prefix. See https://ffmpeg.org/ffmpeg-protocols.html Leave null if setting it is not intended. mediaUrl @@ -3421,6 +2546,9 @@ Will be set to true whenever an End Of File situation is reached. + + true if this instance is at end of stream; otherwise, false. + @@ -3445,9 +2573,19 @@ Provides direct access to the individual Media components of the input stream. + + + Gets a value indicating whether reads are in the aborted state. + + + + + Gets a value indicating whether a seek operation is in progress. + + - Gets the media start time by which all component streams are offset. + Gets the media start time by which all component streams are offset. Typically 0 but it could be something other than 0. @@ -3494,7 +2632,7 @@ Any Media Options must be set before this method is called. - + Seeks to the specified position in the stream. This method attempts to do so as precisely as possible, returning decoded frames of all available media type components @@ -3503,7 +2641,11 @@ Pass TimeSpan.Zero to seek to the beginning of the stream. The position. - The list of media frames + if set to true [aborted]. + + The list of media frames + + No input context initialized @@ -3517,7 +2659,7 @@ The media type of the packet that was read No input context initialized - When a read error occurs + When a read error occurs @@ -3533,7 +2675,7 @@ The list of media frames - + Performs audio, video and subtitle conversions on the decoded input frame so data can be used as a Frame. Please note that if the output is passed as a reference. @@ -3550,12 +2692,31 @@ The media block No input context initialized - MediaType + MediaType input input or input + + + Signals the abortion of the current seek operation. + + + Returns true if there was a seek operation in progress when this method was called. + + + + + Signals the packet reading operations to abort immediately. + + if set to true, the read interrupt will reset the aborted state automatically + + + + Signals the state for read operations to stop being in the aborted state + + Closes the input context immediately releasing all resources. @@ -3567,6 +2728,12 @@ Releases unmanaged and - optionally - managed resources. + + + Initializes the InputContext and applies format options. + https://www.ffmpeg.org/ffmpeg-formats.html#Format-Options + + Initializes the input context to start read operations. @@ -3574,7 +2741,7 @@ to the Open method. The input context has already been initialized. - When an error initializing the stream occurs. + When an error initializing the stream occurs. @@ -3586,7 +2753,7 @@ Creates the stream components by first finding the best available streams. Then it initializes the components of the correct type each. - The exception ifnromation + The exception ifnromation @@ -3602,7 +2769,7 @@ The type of media packet that was read Initialize - Raised when an error reading from the stream occurs. + Raised when an error reading from the stream occurs. @@ -3629,13 +2796,16 @@ Seeks to the position at the start of the stream. - + Seeks to the exact or prior frame of the main stream. Supports byte seeking. The target time. - The list of media frames + if set to true [aborted]. + + The list of media frames + @@ -3653,6 +2823,79 @@ true to release both managed and unmanaged resources; false to release only unmanaged resources. + + + Represents a wrapper for an unmanaged frame. + Derived classes implement the specifics of each media type. + + + + + + Initializes a new instance of the class. + + The pointer. + The component. + + + + Gets the type of the media. + + + The type of the media. + + + + + Gets the start time of the frame. + + + + + Gets the end time of the frame + + + + + Gets the index of the stream from which this frame was decoded. + + + + + Gets the amount of time this data has to be presented + + + + + Gets or sets a value indicating whether this frame obtained its start time + form a valid frame pts value + + + + + When the unmanaged frame is released (freed from unmanaged memory) + this property will return true. + + + + + Gets the time base of the stream that generated this frame. + + + + + Compares the current instance with another object of the same type and returns an integer that indicates whether the current instance precedes, follows, or occurs in the same position in the sort order as the other object. + + An object to compare with this instance. + + A value that indicates the relative order of the objects being compared. The return value has these meanings: Value Meaning Less than zero This instance precedes in the sort order. Zero This instance occurs in the same position in the sort order as . Greater than zero This instance follows in the sort order. + + + + + Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + + A data structure containing a quque of packets to process. @@ -3723,6 +2966,92 @@ true to release both managed and unmanaged resources; false to release only unmanaged resources. + + + Enumerates the seek target requirement levels. + + + + + Seek requirement is satisfied when + the main component has frames in the seek range. + This is the fastest option. + + + + + Seek requirement is satisfied when + the both audio and video comps have frames in the seek range. + This is the recommended option. + + + + + Seek requirement is satisfied when + ALL components have frames in the seek range + This is NOT recommended as it forces large amounts of + frames to get decoded in subtitle files. + + + + + A managed representation of an FFmpeg stream specifier + + + + + Initializes a new instance of the class. + + + + + Initializes a new instance of the class. + + The stream identifier. + streamId + + + + Initializes a new instance of the class. + + Type of the media. + streamType + + + + Initializes a new instance of the class. + + Type of the media. + The stream identifier. + + streamType + or + streamId + + + + + Provides suffixes for the different media types. + + + + + Gets the stream identifier. + + + + + Gets the stream suffix. + + + + + Returns a that represents this stream specifier. + + + A that represents this instance. + + Performs subtitle stream extraction, decoding and text conversion. @@ -3736,7 +3065,7 @@ The container. Index of the stream. - + Converts decoded, raw frame data in the frame source into a a usable frame.
The process includes performing picture, samples or text conversions @@ -3750,6 +3079,20 @@ input cannot be null + + + Strips the SRT format and returns plain text. + + The input. + The formatted string + + + + Strips a line of text from the ASS format. + + The input. + The formatted string + Creates a frame source object given the raw FFmpeg subtitle reference. @@ -3757,17 +3100,78 @@ The raw FFmpeg subtitle pointer. The managed frame + + + Represents a wrapper for an unmanaged Subtitle frame. + TODO: Only text (ASS and SRT) subtitles are supported currently. + There is no support to bitmap subtitles. + + + + + + Initializes a new instance of the class. + + The frame. + The component. + + + + Finalizes an instance of the class. + + + + + Gets the type of the media. + + + + + Gets lines of text that the subtitle frame contains. + + + + + Gets the type of the text. + + + The type of the text. + + + + + Gets the pointer to the unmanaged subtitle struct + + + + + Releases unmanaged and - optionally - managed resources. + + + + + Allocates an AVSubtitle struct in unmanaged memory, + + The subtitle struct pointer + + + + Deallocates the subtitle struct used to create in managed memory. + + The frame. + + + + Releases unmanaged and - optionally - managed resources. + + true to release both managed and unmanaged resources; false to release only unmanaged resources. + Performs video picture decoding, scaling and extraction logic. - - - The output pixel format of the scaler: 24-bit BGR - - Holds a reference to the video scaler @@ -3807,7 +3211,7 @@ Gets the height of the picture frame. - + Converts decoded, raw frame data in the frame source into a a usable frame.
The process includes performing picture, samples or text conversions @@ -3854,7 +3258,7 @@ If necessary, disposes the existing filtergraph and creates a new one based on the frame arguments.
The frame. - + avfilter_graph_create_filter or avfilter_graph_create_filter @@ -3871,347 +3275,367 @@ Destroys the filtergraph releasing unmanaged resources.
- + - Represents a control that contains audio and/or video. - In contrast with System.Windows.Controls.MediaElement, this version uses - the FFmpeg library to perform reading and decoding of media streams. + Represents a wrapper for an unmanaged ffmpeg video frame. - + + + + + Initializes a new instance of the class. + + The frame. + The component. + + + + Finalizes an instance of the class. + + + + + Gets the type of the media. + + + + + Gets the closed caption data collected from the frame in CEA-708/EAS-608 format. + + + + + Gets the display picture number (frame number). + If not set by the decoder, this attempts to obtain it by dividing the start time by the + frame duration + + + + + Gets the coded picture number set by the decoder. + + + + + Gets the SMTPE time code. + + + + + Gets the pointer to the unmanaged frame. + + + + + Releases unmanaged and - optionally - managed resources. + + + + + Releases unmanaged and - optionally - managed resources. + + true to release both managed and unmanaged resources; false to release only unmanaged resources. + + + + Represents a Media Engine that contains underlying streams of audio and/or video. + It uses the fantastic FFmpeg library to perform reading and decoding of media streams. + + - - - + - Occurs right before the video is presented on the screen. - You can update the pizels on the bitmap before it is rendered on the screen. - Or you could take a screenshot. - Ensure you handle this very quickly as it runs on the UI thread. + Starts the block rendering worker. - + - Occurs right before the audio is added to the audio buffer. - You can update the bytes before they are enqueued. - Ensure you handle this quickly before you get choppy audio. + Stops the block rendering worker. - + - Occurs right before the subtitles are rendered. - You can update the text. - Ensure you handle this quickly before you get choppy subtitles. + Raises the MessageLogged event + + The instance containing the message. + + + + Raises the media failed event. + + The ex. + + + + Raises the media closed event. - + - Raises the rendering video event. - - The bitmap. - The stream. - The smtpe timecode. - The picture number. - The start time. - The duration. - The clock. - - - - Raises the rendering audio event. - - The audio block. - The clock. - - - - Raises the rendering subtitles event. - - The block. - The clock. - True if the rendering should be prevented - - - - This partial class implements: - 1. Packet reading from the Container - 2. Frame Decoding from packet buffer and Block buffering - 3. Block Rendering from block buffer + Raises the media opened event. - + - Gets the packet reading cycle control evenet. + Raises the media opening event. - + - Gets the frame decoding cycle control event. + Raises the buffering started event. - + - Gets the block rendering cycle control event. + Raises the buffering ended event. - + - Gets the seeking done control event. + Raises the Seeking started event. - + - Gets or sets a value indicating whether the workedrs have been requested - an exit. + Raises the Seeking ended event. - + - Gets or sets a value indicating whether the decoder has moved its byte position - to something other than the normal continuous reads in the last read cycle. + Raises the media ended event. - + - Holds the blocks - - - - - Holds the block renderers - - - - - Holds the last rendered StartTime for each of the media block types - - - - - Gets a value indicating whether more packets can be read from the stream. - This does not check if the packet queue is full. - - - - - Gets a value indicating whether more frames can be decoded from the packet queue. - That is, if we have packets in the packet buffer or if we are not at the end of the stream. - - - - - Runs the read task which keeps a packet buffer as full as possible. - It reports on DownloadProgress by enqueueing an update to the property - in order to avoid any kind of disruption to this thread caused by the UI thread. - - - - - Continually decodes the available packet buffer to have as - many frames as possible in each frame queue and - up to the MaxFrames on each component - - - - - Continuously converts frmes and places them on the corresponding - block buffer. This task is responsible for keeping track of the clock - and calling the render methods appropriate for the current clock position. - - - - - Sets the clock to a discrete video position if possible + Raises the Position Changed event The position. - + - Gets a value indicating whether more frames can be converted into blocks of the given type. + Notifies the platform connector that a property value has changed. - The t. - - true if this instance [can read more frames of] the specified t; otherwise, false. - + Name of the property used to notify listeners. This + value is optional and can be provided automatically when invoked from compilers + that support . - - - Sends the given block to its corresponding media renderer. - - The block. - The clock position. - The number of blocks sent to the renderer - - - - Adds the blocks of the given media type. - - The t. - The number of blocks that were added - - + The command queue to be executed in the order they were sent. - + Represents a real-time time measuring device. Rendering media should occur as requested by the clock. - + - The underlying media container that provides access to + The underlying media container that provides access to individual media component streams - + Begins or resumes playback of the currently loaded media. + The awaitable command - + Pauses playback of the currently loaded media. + The awaitable command - + Pauses and rewinds the currently loaded media. + The awaitable command - + + + Opens the specified URI. + + The URI. + The awaitable task + Source + + Closes the currently loaded media. + The awaitable task - + - The logger + Seeks to the specified position. + + New position for the player. + + + + Sets the specified playback speed ratio. + + New playback speed ratio. + + + + Gets or Sets the Source on this MediaElement. + The Source property is the Uri of the media to be played. - + - This is the image that will display the video from a Writeable Bitmap + Specifies the behavior that the media element should have when it + is loaded. The default behavior is that it is under manual control + (i.e. the caller should call methods such as Play in order to play + the media). If a source is set, then the default behavior changes to + to be playing the media. If a source is set and a loaded behavior is + also set, then the loaded behavior takes control. - + + + Gets or Sets the SpeedRatio property of the media. + + + + + Gets the internal real time clock speed ratio. + This is different from the regular property as this is the immediate value + (i.e. might not yet be applied) + + + + + Specifies how the underlying media should behave when + it has ended. The default behavior is to Close the media. + + + + + Gets/Sets the Volume property on the MediaElement. + Note: Valid values are from 0 to 1 + + + + + Gets/Sets the Balance property on the MediaElement. + + + + + Gets/Sets the IsMuted property on the MediaElement. + + + + + Gets or sets a value that indicates whether the MediaElement will update frames + for seek operations while paused. This is a dependency property. + + + + + Gets or Sets the Position property on the MediaElement. + + + + + Gets the internal real time clock position. + This is different from the regular property as this is the immediate value + (i.e. might not yet be applied) + + + To detect redundant calls - - - The ffmpeg directory - - - - - IUriContext BaseUri backing - - - - - The position update timer - - - + When position is being set from within this control, this field will be set to true. This is useful to detect if the user is setting the position or if the Position property is being driven from within - + Flag when disposing process start but not finished yet - + - Initializes static members of the class. + Initializes a new instance of the class. + + The associated parent object. + The parent implementing connector methods. + Thrown when the static Initialize method has not been called. + + + + Gets the associated parent object. - + - Initializes a new instance of the class. + Gets the event connector (platform specific). - + - Occurs when a logging message from the FFmpeg library has been received. - This is shared across all instances of Media Elements + Gets a value indicating whether this instance is disposed. + + true if this instance is disposed; otherwise, false. + - + - Multicast event for property change notifications. + Logs the specified message into the logger queue. + Type of the message. + The message. - + - Occurs when a logging message has been logged. - This does not include FFmpeg messages. + Retrieves the registered renderer for the given media type. + Type of the media. + The media renderer - - - Gets or sets the FFmpeg path from which to load the FFmpeg binaries. - You must set this path before setting the Source property for the first time on any instance of this control. - Settng this property when FFmpeg binaries have been registered will throw an exception. - - - - - Gets or sets the horizontal alignment characteristics applied to this element when it is - composed within a parent element, such as a panel or items control. - - - - - Gets or sets the base URI of the current application context. - - - - - When position is being set from within this control, this field will - be set to true. This is useful to detect if the user is setting the position - or if the Position property is being driven from within - - - - - Gets the grid control holding the rest of the controls. - - - + Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. - - - Raises the FFmpegMessageLogged event - - The instance containing the event data. - - + Updates the position property signaling the update is - coming internally. This is to distinguish between user/binding + coming internally. This is to distinguish between user/binding written value to the Position Porperty and value set by this control's internal clock. The current position. - + - Raises the MessageLogged event + Resets all the buffering properties to their defaults. - The instance containing the event data. - + + + Updates the buffering properties: IsBuffering, BufferingProgress, DownloadProgress. + + + + + Guesses the bitrate of the input stream. + + + Checks if a property already matches a desired value. Sets the property and notifies listeners only when necessary. @@ -4225,211 +3649,86 @@ True if the value was changed, false if the existing value matched the desired value. - - - Notifies listeners that a property value has changed. - - Name of the property used to notify listeners. This - value is optional and can be provided automatically when invoked from compilers - that support . - - + Releases unmanaged and - optionally - managed resources. + Please not that this call is non-blocking/asynchronous. true to release both managed and unmanaged resources; false to release only unmanaged resources. - - - DependencyProperty for FFmpegMediaElement Source property. - - - - - DependencyProperty for Stretch property. - - - - - DependencyProperty for StretchDirection property. - - - - - The DependencyProperty for the MediaElement.Balance property. - - - - - The DependencyProperty for the MediaElement.IsMuted property. - - - - - The DependencyProperty for the MediaElement.SpeedRatio property. - - - - - The DependencyProperty for the MediaElement.Volume property. - - - - - The DependencyProperty for the MediaElement.ScrubbingEnabled property. - - - - - The DependencyProperty for the MediaElement.UnloadedBehavior property. - TODO: Currently this property has no effect. Needs implementation. - - - - - The DependencyProperty for the MediaElement.LoadedBehavior property. - - - - - The DependencyProperty for the MediaElement.Position property. - - - - - Gets/Sets the Source on this MediaElement. - The Source property is the Uri of the media to be played. - - - - - Gets/Sets the Stretch on this MediaElement. - The Stretch property determines how large the MediaElement will be drawn. - - - - - Gets/Sets the stretch direction of the Viewbox, which determines the restrictions on - scaling that are applied to the content inside the Viewbox.  For instance, this property - can be used to prevent the content from being smaller than its native size or larger than - its native size. - - - - - Specifies the behavior that the media element should have when it - is loaded. The default behavior is that it is under manual control - (i.e. the caller should call methods such as Play in order to play - the media). If a source is set, then the default behavior changes to - to be playing the media. If a source is set and a loaded behavior is - also set, then the loaded behavior takes control. - - - - - Gets/Sets the SpeedRatio property on the MediaElement. - - - - - Specifies how the underlying media should behave when - it has ended. The default behavior is to Close the media. - - - - - Gets/Sets the Volume property on the MediaElement. - Note: Valid values are from 0 to 1 - - - - - Gets/Sets the Balance property on the MediaElement. - - - - - Gets/Sets the IsMuted property on the MediaElement. - - - - - Gets or sets a value that indicates whether the MediaElement will update frames - for seek operations while paused. This is a dependency property. - - - - - Gets/Sets the Position property on the MediaElement. - - - + Provides key-value pairs of the metadata contained in the media. Returns null when media has not been loaded. - + Gets the media format. Returns null when media has not been loaded. - + + + Provides stream, chapter and program info of the underlying media. + Returns null when no media is loaded. + + + Gets the duration of a single frame step. If there is a video component with a framerate, this propery returns the length of a frame. If there is no video component it simply returns a tenth of a second. - - - Returns whether the given media has audio. + + + Returns whether the given media has audio. Only valid after the MediaOpened event has fired. - + - - + + Returns whether the given media has video. Only valid after the MediaOpened event has fired. - + Gets the video codec. Only valid after the MediaOpened event has fired. - + Gets the video bitrate. Only valid after the MediaOpened event has fired. - + Returns the natural width of the media in the video. Only valid after the MediaOpened event has fired. - + - - + + Returns the natural height of the media in the video. Only valid after the MediaOpened event has fired. - + Gets the video frame rate. Only valid after the MediaOpened event has fired. - + Gets the duration in seconds of the video frame. Only valid after the MediaOpened event has fired. - + Gets the name of the video hardware decoder in use. Enabling hardware acceleration does not guarantee decoding will be performed in hardware. @@ -4437,1900 +3736,1263 @@ Otherwise it will return an empty string. - + Gets the audio codec. Only valid after the MediaOpened event has fired. - + Gets the audio bitrate. Only valid after the MediaOpened event has fired. - + Gets the audio channels count. Only valid after the MediaOpened event has fired. - + Gets the audio sample rate. Only valid after the MediaOpened event has fired. - + Gets the audio bits per sample. Only valid after the MediaOpened event has fired. - + Gets the Media's natural duration Only valid after the MediaOpened event has fired. - + Returns whether the currently loaded media can be paused. This is only valid after the MediaOpened event has fired. Note that this property is computed based on wether the stream is detected to be a live stream. - + - Returns whether the currently loaded media is live or realtime + Returns whether the currently loaded media is live or realtime and does not have a set duration This is only valid after the MediaOpened event has fired. - + + + When position is being set from within this control, this field will + be set to true. This is useful to detect if the user is setting the position + or if the Position property is being driven from within + + + Gets a value indicating whether the currently loaded media can be seeked. - + Gets a value indicating whether the media is playing. - + Gets a value indicating whether the media has reached its end. - + Get a value indicating whether the media is buffering. - + Gets a value indicating whether the media seeking is in progress. - + Returns the current video SMTPE timecode if available. If not available, this property returns an empty string. - + + + Gets the guessed buffered bytes in the packet queue per second. + If bitrate information is available, then it returns the bitrate converted to byte rate. + Returns null if it has not been guessed. + + + Gets a value that indicates the percentage of buffering progress made. Range is from 0 to 1 - + - The wait packet buffer length. + The packet buffer length. It is adjusted to 1 second if bitrate information is available. - Otherwise, it's simply 512KB + Otherwise, it's simply 512KB and it is guessed later on. - + Gets a value that indicates the percentage of download progress made. Range is from 0 to 1 - + Gets the maximum packet buffer length, according to the bitrate (if available). If it's a realtime stream it will return 30 times the buffer cache length. Otherwise, it will return 4 times of the buffer cache length. - + Gets a value indicating whether the media is in the process of opening. - + Gets a value indicating whether this media element currently has an open media url. - + Gets the current playback state. - - - Updates the metada property. - - - + Updates the media properties notifying that there are new values to be read from all of them. Call this method only when necessary because it creates a lot of events. - + - Resets the dependency properies. + Resets the controller properies. - + - BufferingStarted is a routed event + The initialize lock - + - BufferingEnded is a routed event + The has intialized flag - + - SeekingStarted is a routed event + The ffmpeg directory - + - SeekingEnded is a routed event + Stores the load mode flags - + - MediaFailedEvent is a routed event. + Gets the platform-specific implementation requirements. - - - MediaOpened is a routed event. - - - - - MediaOpeningEvent is a routed event. - - - - - MediaEnded is a routed event - - - - - Occurs when buffering of packets was started - - - - - Occurs when buffering of packets was Ended - - - - - Occurs when Seeking of packets was started - - - - - Occurs when Seeking of packets was Ended - - - - - Raised when the media fails to load or a fatal error has occurred which prevents playback. - - - - - Raised when the media is opened - - - - - Raised before the input stream of the media is opened. - Use this method to modify the input options. - - - - - Raised when the corresponding media ends. - - - - - Raises the media failed event. - - The ex. - - - - Raises the media opened event. - - - - - Raises the media opening event. - - - - - Creates a new instance of exception routed event arguments. - This method exists because the constructor has not been made public for that class. - - The routed event. - The sender. - The error exception. - The event arguments - - - - Logs the start of an event - - The event. - - - - Logs the end of an event. - - The event. - - - - Raises the buffering started event. - - - - - Raises the buffering ended event. - - - - - Raises the Seeking started event. - - - - - Raises the Seeking ended event. - - - - - Raises the media ended event. - - - - - A base class to represent media block - rendering event arguments. - - - - - - Initializes a new instance of the class. - - The stream. - The position. - The duration. - The clock. - - - - Provides Stream Information coming from the media container. - - - - - Gets the clock position at which the media - was called for rendering - - - - - Gets the starting time at which this media - has to be presented. - - - - - Gets how long this media has to be presented. - - - - - Provides the audio samples rendering payload as event arguments. - - - - - - Initializes a new instance of the class. - - The buffer. - The length. - The stream. - The start time. - The duration. - The clock. - - - - Gets a pointer to the samples buffer. - Samples are provided in PCM 16-bit signed, interleaved stereo. - - - - - Gets the length in bytes of the samples buffer. - - - - - Gets the number of samples in 1 second. - - - - - Gets the number of channels. - - - - - Gets the number of bits per sample. - - - - - Gets the number of samples in the buffer for all channels. - - - - - Gets the number of samples in the buffer per channel. - - - - - Provides the subtitles rendering payload as event arguments. - - - - - - Initializes a new instance of the class. - - The text. - The original text. - The format. - The stream. - The start time. - The duration. - The clock. - - - - Gets the text stripped out of ASS or SRT formatting. - This is what the default subtitle renderer will display - on the screen. - - - - - Gets the text as originally decoded including - all markup and formatting. - - - - - Gets the type of subtitle format the original - subtitle text is in. - - - - - When set to true, clears the current subtitle and - prevents the subtitle block from being rendered. - - - - - The video rendering event arguments - - - - - - Initializes a new instance of the class. - - The bitmap. - The stream. - The smtpe timecode. - The picture number. - The start time. - The duration. - The clock. - - - - Gets the writable bitmap filled with the video frame pixels. - Feel free to capture or change this image. - - - - - Gets the display picture number (frame number). - If not set by the decoder, this attempts to obtain it by dividing the start time by the - frame duration - - - - - Gets the SMTPE time code. - - - - - Holds media information about the input, its chapters, programs and individual stream components - - - - - Initializes a new instance of the class. - - The container. - - - - Gets the input URL string used to access and create the media container - - - - - Gets the name of the container format. - - - - - Gets the metadata for the input. This may include stuff like title, creation date, company name, etc. - Individual stream components may contain additional metadata. - The metadata - - - - - Gets the duration of the input as reported by the container format. - Individual stream components may have different values - - - - - Gets the start timestamp of the input as reported by the container format. - Individual stream components may have different values - - - - - If available, returns a non-zero value as reported by the container format. - - - - - Gets a list of chapters - - - - - Gets a list of programs with their associated streams. - - - - - Gets the dictionary of stream information components by stream index. - - - - - Provides access to the best streams of each media type found in the container. - This uses some internal FFmpeg heuristics. - - - - - Extracts the stream infos from the input. - - The ic. - The list of stream infos - - - - Finds the best streams for audio video, and subtitles. - - The ic. - The streams. - The star infos - - - - Extracts the chapters from the input. - - The ic. - The chapters - - - - Extracts the programs from the input and creates associations between programs and streams. - - The ic. - The streams. - The program information - - - - Represents media stream information - - - - - Gets the stream identifier. This is different from the stream index. - Typically this value is not very useful. - - - - - Gets the index of the stream. - - - - - Gets the type of the codec. - - - - - Gets the name of the codec type. Audio, Video, Subtitle, Data, etc. - - - - - Gets the codec identifier. - - - - - Gets the name of the codec. - - - - - Gets the codec profile. Only valid for H.264 or - video codecs that use profiles. Otherwise empty. - - - - - Gets the codec tag. Not very useful except for fixing bugs with - some demuxer scenarios. - - - - - Gets a value indicating whether this stream has closed captions. - Typically this is set for video streams. - - - - - Gets a value indicating whether this stream contains lossless compressed data. - - - - - Gets the pixel format. Only valid for Vide streams. - - - - - Gets the width of the video frames. - - - - - Gets the height of the video frames. - - - - - Gets the field order. This is useful to determine - if the video needs deinterlacing - - - - - Gets the video color range. - - - - - Gets the audio sample rate. - - - - - Gets the audio sample format. - - - - - Gets the stream time base unit in seconds. - - - - - Gets the sample aspect ratio. - - - - - Gets the display aspect ratio. - - - - - Gets the reported bit rate. 9 for unavalable. - - - - - Gets the maximum bit rate for variable bitrate streams. 0 if unavailable. - - - - - Gets the number of frames that were read to obtain the stream's information. - - - - - Gets the number of reference frames. - - - - - Gets the average FPS reported by the stream. - - - - - Gets the real (base) framerate of the stream - - - - - Gets the fundamental unit of time in 1/seconds used to represent timestamps in the stream, according to the stream data - - - - - Gets the fundamental unit of time in 1/seconds used to represent timestamps in the stream ,accoring to the codec - - - - - Gets the disposition flags. - Please see ffmpeg.AV_DISPOSITION_* fields. - - - - - Gets the start time. - - - - - Gets the duration. - - - - - Gets the stream's metadata. - - - + - Gets the language string from the stream's metadata. + Gets or sets the FFmpeg path from which to load the FFmpeg binaries. + You must set this path before setting the Source property for the first time on any instance of this control. + Settng this property when FFmpeg binaries have been registered will have no effect. - + - Represents a chapter within a container + Gets or sets the bitwise library identifiers to load. + If FFmpeg is already loaded, the value cannot be changed. - + - Gets the chapter index. + Initializes the MedieElementCore. + The platform-specific implementation. - + - Gets the chapter identifier. + This partial class implements: + 1. Packet reading from the Container + 2. Frame Decoding from packet buffer and Block buffering + 3. Block Rendering from block buffer - + - Gets the start time of the chapter. + Holds the blocks - + - Gets the end time of the chapter. + Gets the packet reading cycle control evenet. - + - Gets the chapter metadata. + Gets the frame decoding cycle control event. - + - Represents a program and its associated streams within a container. + Gets the block rendering cycle control event. - + - Gets the program number. + Gets the seeking done control event. - + - Gets the program identifier. + Gets or sets a value indicating whether the workedrs have been requested + an exit. - + - Gets the program metadata. + Gets or sets a value indicating whether the decoder has moved its byte position + to something other than the normal continuous reads in the last read cycle. - + - Gets the associated program streams. + Holds the block renderers - + - Gets the name of the program. Empty if unavailable. + Holds the last rendered StartTime for each of the media block types - + - Represents the contents of alogging message that was sent to the log manager. + Gets a value indicating whether more packets can be read from the stream. + This does not check if the packet queue is full. - - + - Initializes a new instance of the class. + Gets a value indicating whether room is available in the download cache. - The media element. - Type of the message. - The message. - + - Gets the intance of the MediaElement that generated this message. - When null, it means FFmpeg generated this message. + Gets a value indicating whether more frames can be decoded from the packet queue. + That is, if we have packets in the packet buffer or if we are not at the end of the stream. - + - Gets the timestamp. + Runs the read task which keeps a packet buffer as full as possible. + It reports on DownloadProgress by enqueueing an update to the property + in order to avoid any kind of disruption to this thread caused by the UI thread. - + - Gets the type of the message. + Continually decodes the available packet buffer to have as + many frames as possible in each frame queue and + up to the MaxFrames on each component - + - Gets the contents of the message. + Initializes the media block buffers and + starts packet reader, frame decoder, and block rendering workers. - + - Generic interface for all WaveProviders. + Stops the packet reader, frame decoder, and block renderers - + - Gets the WaveFormat of this WaveProvider. + Sets the clock to a discrete video position if possible + The position. - + - Fill the specified buffer with wave data. + Gets a value indicating whether more frames can be converted into blocks of the given type. - The buffer to fill of wave data. - Offset into buffer - The number of bytes to read + The t. - the number of bytes written to the buffer. + true if this instance [can read more frames of] the specified t; otherwise, false. - + - Windows multimedia error codes from mmsystem.h. + Sends the given block to its corresponding media renderer. + The block. + The clock position. + The number of blocks sent to the renderer - - no error, MMSYSERR_NOERROR - - - unspecified error, MMSYSERR_ERROR - - - device ID out of range, MMSYSERR_BADDEVICEID - - - driver failed enable, MMSYSERR_NOTENABLED - - - device already allocated, MMSYSERR_ALLOCATED - - - device handle is invalid, MMSYSERR_INVALHANDLE - - - no device driver present, MMSYSERR_NODRIVER - - - memory allocation error, MMSYSERR_NOMEM - - - function isn't supported, MMSYSERR_NOTSUPPORTED - - - error value out of range, MMSYSERR_BADERRNUM - - - invalid flag passed, MMSYSERR_INVALFLAG - - - invalid parameter passed, MMSYSERR_INVALPARAM - - - handle being used simultaneously on another thread (eg callback),MMSYSERR_HANDLEBUSY - - - specified alias not found, MMSYSERR_INVALIDALIAS - - - bad registry database, MMSYSERR_BADDB - - - registry key not found, MMSYSERR_KEYNOTFOUND - - - registry read error, MMSYSERR_READERROR - - - registry write error, MMSYSERR_WRITEERROR - - - registry delete error, MMSYSERR_DELETEERROR - - - registry value not found, MMSYSERR_VALNOTFOUND - - - driver does not call DriverCallback, MMSYSERR_NODRIVERCB - - - more data to be returned, MMSYSERR_MOREDATA - - - unsupported wave format, WAVERR_BADFORMAT - - - still something playing, WAVERR_STILLPLAYING - - - header not prepared, WAVERR_UNPREPARED - - - device is synchronous, WAVERR_SYNC - - - Conversion not possible (ACMERR_NOTPOSSIBLE) - - - Busy (ACMERR_BUSY) - - - Header Unprepared (ACMERR_UNPREPARED) - - - Cancelled (ACMERR_CANCELED) - - - invalid line (MIXERR_INVALLINE) - - - invalid control (MIXERR_INVALCONTROL) - - - invalid value (MIXERR_INVALVALUE) - - - - http://msdn.microsoft.com/en-us/library/dd757347(v=VS.85).aspx - - - - - Enumerates the various wave output playback states - - - - - Stopped - - - - - Playing - - - - - Paused - - - - - Supported wave formats for WaveOutCapabilities - - - - - 11.025 kHz, Mono, 8-bit - - - - - 11.025 kHz, Stereo, 8-bit - - - - - 11.025 kHz, Mono, 16-bit - - - - - 11.025 kHz, Stereo, 16-bit - - - - - 22.05 kHz, Mono, 8-bit - - - - - 22.05 kHz, Stereo, 8-bit - - - - - 22.05 kHz, Mono, 16-bit - - - - - 22.05 kHz, Stereo, 16-bit - - - - - 44.1 kHz, Mono, 8-bit - - - - - 44.1 kHz, Stereo, 8-bit - - - - - 44.1 kHz, Mono, 16-bit - - - - - 44.1 kHz, Stereo, 16-bit - - - - - 44.1 kHz, Mono, 8-bit - - - - - 44.1 kHz, Stereo, 8-bit - - - - - 44.1 kHz, Mono, 16-bit - - - - - 44.1 kHz, Stereo, 16-bit - - - - - 48 kHz, Mono, 8-bit - - - - - 48 kHz, Stereo, 8-bit - - - - - 48 kHz, Mono, 16-bit - - - - - 48 kHz, Stereo, 16-bit - - - - - 96 kHz, Mono, 8-bit - - - - - 96 kHz, Stereo, 8-bit - - - - - 96 kHz, Mono, 16-bit - - - - - 96 kHz, Stereo, 16-bit - - - - - Represents a Wave file format - - - - The format tag -- always 0x0001 PCM - - - number of channels - - - sample rate - - - for buffer estimation - - - block size of data - - - number of bits per sample of mono data - - - number of following bytes - - - - Initializes a new instance of the class. - PCM 48Khz stereo 16 bit signed, interleaved, 2-channel format - - - - - Initializes a new instance of the class. - - Sample Rate - Number of channels - - - - Initializes a new instance of the class. - - The rate. - The bits. - The channels. - channels - channels - - - - Returns the number of channels (1=mono,2=stereo etc) - - - - - Returns the sample rate (samples per second) - - - - - Returns the average number of bytes used per second - - - - - Returns the block alignment - - - - - Returns the number of bits per sample (usually 16 or 32, sometimes 24 or 8) - Can be 0 for some codecs - - - - - Returns the number of extra bytes used by this waveformat. Often 0, - except for compressed formats which store extra data after the WAVEFORMATEX header - - - - - Gets the size of a wave buffer equivalent to the latency in milliseconds. - - The milliseconds. - The size - - - - Reports this WaveFormat as a string - - String describing the wave format - - - - Compares with another WaveFormat object - - Object to compare to - True if the objects are the same - - - - Provides a Hashcode for this WaveFormat - - A hashcode - - - - WaveHeader interop structure (WAVEHDR) - http://msdn.microsoft.com/en-us/library/dd743837%28VS.85%29.aspx - - - - pointer to locked data buffer (lpData) - - - length of data buffer (dwBufferLength) - - - used for input only (dwBytesRecorded) - - - for client's use (dwUser) - - - assorted flags (dwFlags) - - - loop control counter (dwLoops) - - - PWaveHdr, reserved for driver (lpNext) - - - reserved for driver - - - - Wave Header Flags enumeration - - - - - WHDR_BEGINLOOP - This buffer is the first buffer in a loop. This flag is used only with output buffers. - - - - - WHDR_DONE - Set by the device driver to indicate that it is finished with the buffer and is returning it to the application. - - - - - WHDR_ENDLOOP - This buffer is the last buffer in a loop. This flag is used only with output buffers. - - - - - WHDR_INQUEUE - Set by Windows to indicate that the buffer is queued for playback. - - - - - WHDR_PREPARED - Set by Windows to indicate that the buffer has been prepared with the waveInPrepareHeader or waveOutPrepareHeader function. - - - - - MME Wave function interop - - - - - CALLBACK_NULL - No callback - - - - - CALLBACK_FUNCTION - dwCallback is a FARPROC - - - - - CALLBACK_EVENT - dwCallback is an EVENT handle - - - - - CALLBACK_WINDOW - dwCallback is a HWND - - - - - CALLBACK_THREAD - callback is a thread ID - - - - - WIM_OPEN - - - - - WIM_CLOSE - - - - - WIM_DATA - - - - - WOM_CLOSE - - - - - WOM_DONE - - - - - WOM_OPEN - - - - - A wrapper class for MmException. - - - - - Initializes a new instance of the class. - - The result returned by the Windows API call - The name of the Windows API that failed - - - - Returns the Windows API result - - - - - Helper function to automatically raise an exception on failure - - The result of the API call - The API function name - - - - Creates an error message base don an erro result. - - The result. - The function. - A descriptive rror message - - - - A buffer of Wave samples for streaming to a Wave Output device - - - - - Initializes a new instance of the class. - - WaveOut device to write to - Buffer size in bytes - Stream to provide more data - Lock to protect WaveOut API's from being called on >1 thread - - - - Finalizes an instance of the class. - - - - - Whether the header's in queue flag is set - - - - - The buffer size in bytes - - - - - Releases resources held by this WaveBuffer - - - - - this is called by the Wave callback and should be used to refill the buffer. - This calls the .Read method on the stream - - true when bytes were written. False if no bytes were written. - - - - Releases resources held by this WaveBuffer - - true to release both managed and unmanaged resources; false to release only unmanaged resources. - - - - Writes to wave out. - - waveOutWrite - - - - WaveOutCapabilities structure (based on WAVEOUTCAPS2 from mmsystem.h) - http://msdn.microsoft.com/library/default.asp?url=/library/en-us/multimed/htm/_win32_waveoutcaps_str.asp - - - - - wMid - - - - - wPid - - - - - vDriverVersion - - - - - Product Name (szPname) - - - - - Supported formats (bit flags) dwFormats - - - - - Supported channels (1 for mono 2 for stereo) (wChannels) - Seems to be set to -1 on a lot of devices - - - - - wReserved1 - - - - - Optional functionality supported by the device - - - - - Number of channels supported - - - - - Whether playback rate control is supported - - - - - Whether volume control is supported - - - - - Gets a value indicating whether this device supports independent channel volume control. - - - - - Gets a value indicating whether this device supports pitch control. - - - - - Gets a value indicating whether the device returns sample-accurate position information. - - - - - Gets a value indicating whether the driver is synchronous and will block while playing a buffer. - - - - - The product name - - - - - The device name Guid (if provided) - - - - - The product name Guid (if provided) - - - - - The manufacturer guid (if provided) - - - - - Checks to see if a given SupportedWaveFormat is supported - - The SupportedWaveFormat - true if supported - - - - Flags indicating what features this WaveOut device supports - - - - supports pitch control (WAVECAPS_PITCH) - - - supports playback rate control (WAVECAPS_PLAYBACKRATE) - - - supports volume control (WAVECAPS_VOLUME) - - - supports separate left-right volume control (WAVECAPS_LRVOLUME) - - - (WAVECAPS_SYNC) - - - (WAVECAPS_SAMPLEACCURATE) - - - - A wave player that opens an audio device and continuously feeds it - with audio samples using a wave provider. - - - - - Initializes a new instance of the class. - - The renderer. - - - - Finalizes an instance of the class. - - - - - Gets or sets the desired latency in milliseconds - Should be set before a call to Init - - - - - Gets or sets the number of buffers used - Should be set before a call to Init - - - - - Gets or sets the device number - Should be set before a call to Init - This must be between -1 and DeviceCount - 1. - -1 means stick to default device even default device is changed - - - - - Gets a instance indicating the format the hardware is using. - - - - - Playback State - - - - - Gets the capabilities. - - - + - Initializes the specified wave provider. + Adds the blocks of the given media type. - The wave provider. - Can't re-initialize during playback + The t. + The number of blocks that were added - + - Start playing the audio from the WaveStream + Fast, atomioc boolean combining interlocked to write value and volatile to read values + Idea taken from Memory model and .NET operations in article: + http://igoro.com/archive/volatile-keyword-in-c-memory-model-explained/ - + - Pause the audio + Initializes a new instance of the class. - + - Stop and reset the WaveOut device + Initializes a new instance of the class. + if set to true [initial value]. - + - Gets the current position in bytes from the wave output device. - (n.b. this is not the same thing as the position within your reader - stream - it calls directly into waveOutGetPosition) + Gets the latest value written by any of the processors in the machine + Setting - Position in bytes - + - Closes this WaveOut device + Fast, atomioc double combining interlocked to write value and volatile to read values + Idea taken from Memory model and .NET operations in article: + http://igoro.com/archive/volatile-keyword-in-c-memory-model-explained/ - + - Closes the WaveOut device and disposes of buffers + Initializes a new instance of the class. - True if called from Dispose - + - Resume playing after a pause from the same position + Initializes a new instance of the class. + The initial value. - + - Starts the playback thread. + Gets or sets the latest value written by any of the processors in the machine - + - Performs the continuous playback. + Fast, atomioc long combining interlocked to write value and volatile to read values + Idea taken from Memory model and .NET operations in article: + http://igoro.com/archive/volatile-keyword-in-c-memory-model-explained/ - + - Closes the wave device. + Initializes a new instance of the class. - + - Disposes the buffers. + Gets or sets the latest value written by any of the processors in the machine - + - Provides Audio Output capabilities by writing samples to the default audio output device. + A fixed-size buffer that acts as an infinite length one. + This buffer is backed by unmanaged, very fast memory so ensure you call + the dispose method when you are donde using it. - - - + - Initializes a new instance of the class. - - The media element. - - - - Gets the output format of the audio + The locking object to perform synchronization. - + - Gets the parent media element. + To detect redundant calls - + - Gets or sets the volume. - - - The volume. - - - - - Gets or sets the balance (-1.0 to 1.0). + The unmanaged buffer - + - Gets or sets a value indicating whether the wave output is muted. + Initializes a new instance of the class. + + Length of the buffer. + + + + Finalizes an instance of the class. - + - Gets the realtime latency of the audio relative to the internal wall clock. - A negative value means audio is ahead of the wall clock. - A positive value means audio is behind of the wall clock. + Gets the capacity of this buffer. - + - Gets current audio the position. + Gets the current, 0-based read index - + - Gets the desired latency odf the audio device. - Value is always positive and typically 200ms. This means audio gets rendered up to this late behind the wall clock. + Gets the maximum rewindable amount of bytes. - + - Gets the speed ratio. + Gets the current, 0-based write index. - + - Renders the specified media block. - - The media block. - The clock position. - - - - Called on every block rendering clock cycle just in case some update operation needs to be performed. - This needs to return immediately so the calling thread is not disturbed. - - The clock position. - - - - Executed when the Play method is called on the parent MediaElement + Gets an the object associated with the last write - + - Executed when the Pause method is called on the parent MediaElement + Gets the available bytes to read. - + - Executed when the Pause method is called on the parent MediaElement + Gets the number of bytes that can be written. - + - Executed when the Close method is called on the parent MediaElement + Gets percentage of used bytes (readbale/available, from 0.0 to 1.0). - + - Executed after a Seek operation is performed on the parent MediaElement + Skips the specified amount requested bytes to be read. + + The requested bytes. + When requested bytes GT readable count + + + + Rewinds the read position by specified requested amount of bytes. + + The requested bytes. + When requested GT rewindable + + + + Reads the specified number of bytes into the target array. + + The requested bytes. + The target. + The target offset. + When requested bytes is greater than readble count + + + + Writes data to the backing buffer using the specified pointer and length. + and associating a write tag for this operation. + + The source. + The length. + The write tag. + if set to true, overwrites the data even if it has not been read. + When read needs to be called more often! + + + + Resets all states as if this buffer had just been created. - - - Waits for the renderer to be ready to render. - - - + Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. - - - Called whenever the audio driver requests samples. - Do not call this method directly. - - The render buffer. - The render buffer offset. - The requested bytes. - The number of bytes that were read. - - - - Called when [application exit]. - - The sender. - The instance containing the event data. - - - - Initializes the audio renderer. - Call the Play Method to start reading samples - - - - - Destroys the audio renderer. - Makes it useless. - - - - - Synchronizes audio rendering to the wall clock. - Returns true if additional samples need to be read. - Returns false if silence has been written and no further reading is required. - - The target buffer. - The target buffer offset. - The requested bytes. - True to continue processing. False to write silence. - - - - Reads from the Audio Buffer and stretches the samples to the required requested bytes. - This will make audio samples sound stretched (low pitch). - The result is put to the first requestedBytes count of the ReadBuffer. - requested - - The requested bytes. - - - - Reads from the Audio Buffer and shrinks (averages) the samples to the required requested bytes. - This will make audio samples sound shrunken (high pitch). - The result is put to the first requestedBytes count of the ReadBuffer. - - The requested number of bytes. - if set to true average samples per block. Otherwise, take the first sample per block only - - - - Applies volume and balance to the audio samples storead in RedBuffer and writes them - to the specified target buffer. - - The target buffer. - The target buffer offset. - The requested number of bytes. - - + Releases unmanaged and - optionally - managed resources. - - true to release both managed and unmanaged resources; false to release only unmanaged resources. + true to release both managed and unmanaged resources; false to release only unmanaged resources. - + + + Defines a generic interface for synchronized locking mechanisms + + + + + Acquires a writer lock. + The lock is released when the returned locking object is disposed. + + A disposable locking object. + + + + Acquires a reader lock. + The lock is released when the returned locking object is disposed. + + A disposable locking object. + + + + Represents a set of preallocated media blocks of the same media type. + A block buffer contains playback and pool blocks. Pool blocks are blocks that + can be reused. Playback blocks are blocks that have been filled. + This class is thread safe. + + + + + The blocks that are available to be filled. + + + + + The blocks that are available for rendering. + + + + + Controls multiple reads and exclusive writes + + + + + Initializes a new instance of the class. + + The capacity. + Type of the media. + + + + Gets the media type of the block buffer. + + + + + Gets the start time of the first block. + + + + + Gets the end time of the last block. + + + + + Gets the range of time between the first block and the end time of the last block. + + + + + Gets the average duration of the currently available playback blocks. + + + + + Gets a value indicating whether all the durations of the blocks are equal + + + + + Gets the number of available playback blocks. + + + + + Gets the maximum count of this buffer. + + + + + Gets the usage percent from 0.0 to 1.0 + + + + + Gets a value indicating whether the playback blocks are all allocated. + + + + + Holds the duration of all the blocks that have been added in the lifetime of this object. + + + + + Gets the at the specified index. + + + The . + + The index. + The media block + + + + Gets the at the specified timestamp. + + + The . + + At time. + The media block + + + + Gets the percentage of the range for the given time position. + + The position. + The percent of the range + + + + Retrieves the block following the provided current block + + The current block. + The next media block + + + + Determines whether the given render time is within the range of playback blocks. + + The render time. + + true if [is in range] [the specified render time]; otherwise, false. + + + + + Retrieves the index of the playback block corresponding to the specified + render time. This uses very fast binary and linear search commbinations. + If there are no playback blocks it returns -1. + If the render time is greater than the range end time, it returns the last playback block index. + If the render time is less than the range start time, it returns the first playback block index. + + The render time. + The media block's index + + + + Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + + + + + Adds a block to the playback blocks by converting the given frame. + If there are no more blocks in the pool, the oldest block is returned to the pool + and reused for the new block. The source frame is automatically disposed. + + The source. + The container. + The filled block. + + + + Clears all the playback blocks returning them to the + block pool. + + + + + Returns a formatted string with information about this buffer + + The formatted string + + + + Block factory method. + + Type of the media. + MediaBlock does not have a valid type + An instance of the block of the specified type + + + + Represents a very simple dictionary for MediaType keys + + The type of the value. + + + + Initializes a new instance of the class. + + + + + Gets or sets the item with the specified key. + return the default value of the value type when the key does not exist. + + The key. + The item + + + + Provides factory methods to create synchronized reader-writer locks + that support a generalized locking and releasing api and syntax. + + + + + Enumerates the locking operations + + + + + Creates a reader-writer lock backed by a standard ReaderWriterLock + + The synchronized locker + + + + Creates a reader-writer lock backed by a ReaderWriterLockSlim + + The synchronized locker + + + + A scaled, preallocated audio frame container. + The buffer is in 16-bit signed, interleaved sample data + + + + + Finalizes an instance of the class. + + + + + Gets a pointer to the first byte of the data buffer. + The format signed 16-bits per sample, channel interleaved + + + + + Gets the length of the buffer in bytes. + + + + + Gets the sample rate. + + + + + Gets the channel count. + + + + + Gets the available samples per channel. + + + + + Gets the media type of the data + + + + + The picture buffer length of the last allocated buffer + + + + + Holds a reference to the last allocated buffer + + + + + Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + + + + + Releases unmanaged and - optionally - managed resources. + + true to release both managed and unmanaged resources; false to release only unmanaged resources. + + + + Defaults and constants of the Media Engine + + + + + Gets the assembly location. + + + + + Defines Controller Value Defaults + + + + + The default speed ratio + + + + + The default balance + + + + + The default volume + + + + + The minimum speed ratio + + + + + The maximum speed ratio + + + + + The minimum balance + + + + + The maximum balance + + + + + The maximum volume + + + + + The minimum volume + + + + + Defines decoder output constants for audio streams + + + + + The audio buffer padding + + + + + The audio bits per sample (1 channel only) + + + + + The audio sample format + + + + + The audio channel count + + + + + The audio sample rate (per channel) + + + + + Defines decoder output constants for audio streams + + + + + The video bits per component + + + + + The video bits per pixel + + + + + The video bytes per pixel + + + + + The video pixel format. BGRX, 32bit + + + + + Defines timespans of different priority intervals + + + + + The timer high priority interval for stuff like rendering + + + + + The timer medium priority interval for stuff like property updates + + + + + The timer low priority interval for stuff like logging + + + + + Provides various helpers and extension methods. + + + + + Returns a formatted timestamp string in Seconds + + The ts. + The formatted string + + + + Returns a formatted string with elapsed milliseconds between now and + the specified date. + + The dt. + The formatted string + + + + Returns a fromatted string, dividing by the specified + factor. Useful for debugging longs with byte positions or sizes. + + The ts. + The divide by. + The formatted string + + + + Converts the given value to a value that is of the given multiple. + + The value. + The multiple. + The value + + + + Gets a timespan given a timestamp and a timebase. + + The PTS. + The time base. + The TimeSpan + + + + Gets a timespan given a timestamp and a timebase. + + The PTS. + The time base. + The TimeSpan + + + + Gets a timespan given a timestamp and a timebase. + + The PTS in seconds. + The time base. + The TimeSpan + + + + Gets a timespan given a timestamp and a timebase. + + The PTS. + The time base. + The TimeSpan + + + + Gets a timespan given a timestamp (in AV_TIME_BASE units) + + The PTS. + The TimeSpan + + + + Gets a timespan given a timestamp (in AV_TIME_BASE units) + + The PTS. + The TimeSpan + + + + Converts a fraction to a double + + The rational. + The value + + + + Determines whether the event is in its set state. + + The event. + + true if the specified m is set; otherwise, false. + + + + + Gets the fundamental (audio or video only) auxiliary media types. + + All. + The main. + The non-main audio or video media types + + + + Excludes the type of the media. + + All. + The main. + An array without the media type + + + + Joins the media types. + + The main. + The with. + An array of the media types + + + + Determines whether the array contains the media type + + All. + The t. + True if it exists in the array + + + + Deep-copies the array + + All. + The copy of the array + + + + Verifies all fundamental (audio and video) components are greater than zero + + All. + The value. + + True if all components are greater than the value + + + + + Gets the sum of all the values in the keyed dictionary. + + All. + The sum of all values. + + + + The load mode of FFmpeg Libraries + + + + + The full features. Tries to load everything + + + + + Loads everything except for AVDevice and AVFilter + + + + + Loads the minimum set for Audio-only programs + + + + + Loads the minimum set for Video-only programs + + + + + Connects handlers between the Media Engine and a platfrom-secific implementation + + + + + Called when [media opening]. + + The sender. + The media options. + The media information. + + + + Called when [media opened]. + + The sender. + + + + Called when [media closed]. + + The sender. + + + + Called when [media failed]. + + The sender. + The e. + + + + Called when [media ended]. + + The sender. + + + + Called when [buffering started]. + + The sender. + + + + Called when [buffering ended]. + + The sender. + + + + Called when [seeking started]. + + The sender. + + + + Called when [seeking ended]. + + The sender. + + + + Called when [message logged]. + + The sender. + The instance containing the event data. + + + + Called when [position changed]. + + The sender. + The position. + + + + Called when an underlying media engine property is changed. + This is used to handle property change notifications + + The sender. + Name of the property. + + + + A very simple and standard interface for message logging + + + + + Logs the specified message of the given type. + + Type of the message. + The message. + + Provides a unified API for media rendering classes - + - Gets the parent media element. + Gets the parent media engine. - + Waits for the renderer to be ready to render. + This is called only once before all Render calls are made - + Executed when the Play method is called on the parent MediaElement - + Executed when the Pause method is called on the parent MediaElement - + Executed when the Pause method is called on the parent MediaElement - + Executed when the Close method is called on the parent MediaElement - + Executed after a Seek operation is performed on the parent MediaElement - + Called when a media block is due rendering. This needs to return immediately so the calling thread is not disturbed. @@ -6338,373 +5000,162 @@ The media block. The clock position. - + Called on every block rendering clock cycle just in case some update operation needs to be performed. This needs to return immediately so the calling thread is not disturbed. The clock position. - + - Subtitle Renderer - Does nothing at this point. - - - - - - The synchronize lock + Defines platform-specific methods - + - Holds the text to be rendered when the Update method is called. + Sets the DLL directory in which external dependencies can be located. + + The path. + True for success. False for failure + + + + Fast pointer memory block copy function + + The target address. + The source address. + Length of the copy. + + + + Fills the memory with the specified value repeated. + + The start address. + The length. + The value. + + + + Contains factory methods and properties containing platfrom-specific implementations + of the functionality that is required by an instance of the Media Engine - + - Holds the text that was last rendered when Update was called. + Retrieves the platform-specific Native methods - + - Initializes a new instance of the class. - - The media element. - - - - Gets the parent media element. + Gets a value indicating whether this instance is in debug mode. - + - Executed when the Close method is called on the parent MediaElement + Gets a value indicating whether this instance is in design time. - + - Executed when the Pause method is called on the parent MediaElement + Creates a renderer of the specified media type. + + Type of the media. + The media engine. + The renderer + + + + Handles global FFmpeg library messages + + The message. + + + + A base class for blocks of the deifferent MediaTypes. + Blocks are the result of decoding and scaling a frame. + Blocks have preallocated buffers wich makes them memory and CPU efficient. + Reuse blocks as much as possible. Once you create a block from a frame, + you don't need the frame anymore so make sure you dispose the frame. - + - Executed when the Play method is called on the parent MediaElement + Gets the media type of the data - + - Executed when the Pause method is called on the parent MediaElement + Gets a value indicating whether the start time was guessed from siblings + or the source frame PTS comes from a NO PTS value - + - Executed after a Seek operation is performed on the parent MediaElement + Gets the time at which this data should be presented (PTS) - + - Waits for the renderer to be ready to render. + Gets the amount of time this data has to be presented - + - Renders the specified media block. - - The media block. - The clock position. - - - - Called when a media block must stop being rendered. - This needs to return immediately so the calling thread is not disturbed. - - The clock position. - - - - Gets or creates the tex blocks that make up the subtitle text and outline. - - The text blocks including the fill and outline (5 total) - - - - Sets the text to be rendered on the text blocks. - Returns immediately because it enqueues the action on the UI thread. - - The text. - - - - Provides Video Image Rendering via a WPF Writable Bitmap - - - - - - The bitmap that is presented to the user. + Gets the end time. - + - Set when a bitmap is being written to the target bitmap + Gets the index of the stream. - + - Initializes a new instance of the class. - - The media element. - - - - Gets the parent media element. + Gets a safe timestamp the the block can be displayed. + Returns StartTime if the duration is Zero or negative. - + - Executed when the Play method is called on the parent MediaElement + Determines whether this media block holds the specified position. + Returns false if it does not have a valid duration. + + The position. + + true if [contains] [the specified position]; otherwise, false. + + + + + Compares the current instance with another object of the same type and returns an integer that indicates whether the current instance precedes, follows, or occurs in the same position in the sort order as the other object. + + An object to compare with this instance. + + A value that indicates the relative order of the objects being compared. The return value has these meanings: Value Meaning Less than zero This instance precedes in the sort order. Zero This instance occurs in the same position in the sort order as . Greater than zero This instance follows in the sort order. + + + + + Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. - - - Executed when the Pause method is called on the parent MediaElement - - - - - Executed when the Pause method is called on the parent MediaElement - - - - - Executed when the Close method is called on the parent MediaElement - - - - - Executed after a Seek operation is performed on the parent MediaElement - - - - - Waits for the renderer to be ready to render. - - - - - Renders the specified media block. - This needs to return immediately so the calling thread is not disturbed. - - The media block. - The clock position. - - - - Called on every block rendering clock cycle just in case some update operation needs to be performed. - This needs to return immediately so the calling thread is not disturbed. - - The clock position. - - - - Initializes the target bitmap. Pass a null block to initialize with the default video properties. - - The block. - - - - Applies the scale transform according to the block's aspect ratio. - - The b. - - - - Defines the different log message types received by the log handler - - - - - The none messge type - - - - - The information messge type - - - - - The debug messge type - - - - - The trace messge type - - - - - The error messge type - - - - - The warning messge type - - - - - A Media Container Exception - - - - - - Initializes a new instance of the class. - - The message that describes the error. - - - - Represents a set of options that are used to initialize a media container. - - - - - Initializes a new instance of the class. - - - - - Gets or sets the forced input format. If let null or empty, - the input format will be selected automatically. - - - - - Gets or sets a value indicating whether [enable low resource]. - In theroy this should be 0,1,2,3 for 1, 1/2, 1,4 and 1/8 resolutions. - TODO: We are for now just supporting 1/2 rest (true value) - Port of lowres. - - - - - Gets or sets a value indicating whether [enable fast decoding]. - Port of fast - - - - - A dictionary of Format options. - Supported format options are specified in https://www.ffmpeg.org/ffmpeg-formats.html#Format-Options - - - - - Gets the codec options. - Codec options are documented here: https://www.ffmpeg.org/ffmpeg-codecs.html#Codec-Options - Port of codec_opts - - - - - Gets or sets a value indicating whether experimental hardware acceleration is enabled. - Defaults to false. This feature is experimental. - - - - - Gets or sets a value indicating whether PTS are generated automatically and not read - from the packets themselves. Defaults to false. - Port of genpts - - - - - Gets or sets the maximum duration to be analyzed before ifentifying stream information. - In realtime streams this can be reduced to reduce latency (i.e. TimeSpan.Zero) - - - - - Gets or sets the amount of bytes to probe before getting the stram info - In realtime streams probesize can be reduced to reduce latency. - Minimum value is 32. - - - - - Gets or sets the amount of time to wait for a an open or read operation to complete. - - - - - Prevent reading from audio stream components. - Port of audio_disable - - - - - Prevent reading from video stream components. - Port of video_disable - - - - - Prevent reading from subtitle stream components. - Port of subtitle_disable - Subtitles are not yet first-class citizens in FFmpeg and - this is why they are disabled by default. - - - - - Allows for a custom video filter string. - Please see: https://ffmpeg.org/ffmpeg-filters.html#Video-Filters - - - - - Initially contains the best suitable video stream. - Can be changed to a different stream reference. - - - - - Allows for a custom audio filter string. - Please see: https://ffmpeg.org/ffmpeg-filters.html#Audio-Filters - - - - - Initially contains the best suitable audio stream. - Can be changed to a different stream reference. - - - - - Initially contains the best suitable subititle stream. - Can be changed to a different stream reference. - - - + Represents a set of codec options associated with a stream specifier. - + Holds the internal list of option items - + - Initializes a new instance of the class. + Initializes a new instance of the class. - + Adds an option @@ -6712,7 +5163,7 @@ The value. Type of the stream. - + Adds an option @@ -6720,7 +5171,7 @@ The value. Index of the stream. - + Adds an option @@ -6729,7 +5180,7 @@ Type of the stream. Index of the stream. - + Retrieves a dictionary with the options for the specified codec. Port of filter_codec_opts @@ -6740,7 +5191,7 @@ The codec. The filtered options - + Retrieves an array of dictionaries, one for each stream index https://ffmpeg.org/ffplay.html#toc-Options @@ -6749,59 +5200,867 @@ The format. The options per stream - + Converts a character to a media type. The c. The media type - + - Represents the event arguments of the MediaOpening routed event. - - - - - - Initializes a new instance of the class. - - The routed event. - The source. - The options. - The input information. - - - - Set or change the options before the media is opened. + Well-known codec option names - + - Provides internal details of the media, inclusing its component streams. - Typically, options are set based on what this information contains. + The threads - + - A strongly-typed resource class, for looking up localized strings, etc. + The reference counted frames - + - Returns the cached ResourceManager instance used by this class. + The low resource - + - Overrides the current thread's CurrentUICulture property for all - resource lookups using this strongly typed resource class. + A Media Container Exception + + + + + + Initializes a new instance of the class. + + The message that describes the error. + + + + Media States compatible with MediaState enumeration - + - Looks up a localized resource of type System.Drawing.Bitmap. + The manual state + + + The play state + + + + + The close state + + + + + The pause state + + + + + The stop state + + + + + Contains options for the format context as documented: + https://ffmpeg.org/ffmpeg-formats.html#Format-Options + TODO: There are still quite a bit of options that have not been implemented. + + + + + Port of avioflags direct + + + + + Set probing size in bytes, i.e. the size of the data to analyze to get stream information. + A higher value will enable detecting more information in case it is dispersed into the stream, + but will increase latency. Must be an integer not lesser than 32. It is 5000000 by default. + + + + + Set packet size. + + + + + Ignore index. + Port of ffflags + + + + + Enable fast, but inaccurate seeks for some formats. + Port of ffflags + + + + + Generate PTS. + Port of genpts + + + + + Do not fill in missing values that can be exactly calculated. + Port of ffflags + + + + + Ignore DTS. + Port of ffflags + + + + + Discard corrupted frames. + Port of ffflags + + + + + Try to interleave output packets by DTS. + Port of ffflags + + + + + Do not merge side data. + Port of ffflags + + + + + Enable RTP MP4A-LATM payload. + Port of ffflags + + + + + Reduce the latency introduced by optional buffering + Port of ffflags + + + + + Stop muxing at the end of the shortest stream. + It may be needed to increase max_interleave_delta to avoid flushing the longer streams before EOF. + Port of ffflags + + + + + Allow seeking to non-keyframes on demuxer level when supported if set to 1. Default is 0. + + + + + Gets or sets the maximum duration to be analyzed before ifentifying stream information. + In realtime streams this can be reduced to reduce latency (i.e. TimeSpan.Zero) + + + + + Set decryption key. + + + + + Holds media information about the input, its chapters, programs and individual stream components + + + + + Initializes a new instance of the class. + + The container. + + + + Gets the input URL string used to access and create the media container + + + + + Gets the name of the container format. + + + + + Gets the metadata for the input. This may include stuff like title, creation date, company name, etc. + Individual stream components, chapters and programs may contain additional metadata. + + + + + Gets the duration of the input as reported by the container format. + Individual stream components may have different values + + + + + Gets the start timestamp of the input as reported by the container format. + Individual stream components may have different values + + + + + If available, returns a non-zero value as reported by the container format. + + + + + Gets a list of chapters + + + + + Gets a list of programs with their associated streams. + + + + + Gets the dictionary of stream information components by stream index. + + + + + Provides access to the best streams of each media type found in the container. + This uses some internal FFmpeg heuristics. + + + + + Extracts the stream infos from the input. + + The ic. + The list of stream infos + + + + Finds the best streams for audio video, and subtitles. + + The ic. + The streams. + The star infos + + + + Extracts the chapters from the input. + + The ic. + The chapters + + + + Extracts the programs from the input and creates associations between programs and streams. + + The ic. + The streams. + The program information + + + + Represents media stream information + + + + + Gets the stream identifier. This is different from the stream index. + Typically this value is not very useful. + + + + + Gets the index of the stream. + + + + + Gets the type of the codec. + + + + + Gets the name of the codec type. Audio, Video, Subtitle, Data, etc. + + + + + Gets the codec identifier. + + + + + Gets the name of the codec. + + + + + Gets the codec profile. Only valid for H.264 or + video codecs that use profiles. Otherwise empty. + + + + + Gets the codec tag. Not very useful except for fixing bugs with + some demuxer scenarios. + + + + + Gets a value indicating whether this stream has closed captions. + Typically this is set for video streams. + + + + + Gets a value indicating whether this stream contains lossless compressed data. + + + + + Gets the pixel format. Only valid for Vide streams. + + + + + Gets the width of the video frames. + + + + + Gets the height of the video frames. + + + + + Gets the field order. This is useful to determine + if the video needs deinterlacing + + + + + Gets the video color range. + + + + + Gets the audio sample rate. + + + + + Gets the audio sample format. + + + + + Gets the stream time base unit in seconds. + + + + + Gets the sample aspect ratio. + + + + + Gets the display aspect ratio. + + + + + Gets the reported bit rate. 9 for unavalable. + + + + + Gets the maximum bit rate for variable bitrate streams. 0 if unavailable. + + + + + Gets the number of frames that were read to obtain the stream's information. + + + + + Gets the number of reference frames. + + + + + Gets the average FPS reported by the stream. + + + + + Gets the real (base) framerate of the stream + + + + + Gets the fundamental unit of time in 1/seconds used to represent timestamps in the stream, according to the stream data + + + + + Gets the fundamental unit of time in 1/seconds used to represent timestamps in the stream ,accoring to the codec + + + + + Gets the disposition flags. + Please see ffmpeg.AV_DISPOSITION_* fields. + + + + + Gets the start time. + + + + + Gets the duration. + + + + + Gets the stream's metadata. + + + + + Gets the language string from the stream's metadata. + + + + + Represents a chapter within a container + + + + + Gets the chapter index. + + + + + Gets the chapter identifier. + + + + + Gets the start time of the chapter. + + + + + Gets the end time of the chapter. + + + + + Gets the chapter metadata. + + + + + Represents a program and its associated streams within a container. + + + + + Gets the program number. + + + + + Gets the program identifier. + + + + + Gets the program metadata. + + + + + Gets the associated program streams. + + + + + Gets the name of the program. Empty if unavailable. + + + + + A dictionary containing generic input options for both: + Global Codec Options: https://www.ffmpeg.org/ffmpeg-all.html#Codec-Options + Demuxer-Private options: https://ffmpeg.org/ffmpeg-all.html#Demuxers + + + + + Initializes a new instance of the class. + + + + + A collection of well-known demuxer-specific, non-global format options + TODO: Implement some of the more common names maybe? + + + + + mpegts + + + + + Represents the contents of a logging message that was sent to the log manager. + + + + + Initializes a new instance of the class. + + The media element. + Type of the message. + The message. + + + + Gets the intance of the MediaElement that generated this message. + When null, it means FFmpeg generated this message. + + + + + Gets the timestamp. + + + + + Gets the type of the message. + + + + + Gets the contents of the message. + + + + + Defines the different log message types received by the log handler + + + + + The none messge type + + + + + The information messge type + + + + + The debug messge type + + + + + The trace messge type + + + + + The error messge type + + + + + The warning messge type + + + + + Represents a set of options that are used to initialize a media container. + + + + + Initializes a new instance of the class. + + + + + Gets or sets the forced input format. If let null or empty, + the input format will be selected automatically. + + + + + Gets or sets a value indicating whether [enable low resource]. + In theroy this should be 0,1,2,3 for 1, 1/2, 1,4 and 1/8 resolutions. + TODO: We are for now just supporting 1/2 rest (true value) + Port of lowres. + + + + + Gets or sets a value indicating whether [enable fast decoding]. + Port of fast + + + + + A dictionary containing generic input options for both: + Global Codec Options: https://www.ffmpeg.org/ffmpeg-all.html#Codec-Options + Demuxer-Private Options: https://ffmpeg.org/ffmpeg-all.html#Demuxers + + + + + Gets the codec options. + Codec options are documented here: https://www.ffmpeg.org/ffmpeg-codecs.html#Codec-Options + Port of codec_opts + + + + + Contains options for the format context as documented: + https://ffmpeg.org/ffmpeg-formats.html#Format-Options + + + + + Gets or sets a value indicating whether experimental hardware acceleration is enabled. + Defaults to false. This feature is experimental. + + + + + Gets or sets the amount of time to wait for a an open or read operation to complete. + + + + + Prevent reading from audio stream components. + Port of audio_disable + + + + + Prevent reading from video stream components. + Port of video_disable + + + + + Prevent reading from subtitle stream components. + Port of subtitle_disable + Subtitles are not yet first-class citizens in FFmpeg and + this is why they are disabled by default. + + + + + Allows for a custom video filter string. + Please see: https://ffmpeg.org/ffmpeg-filters.html#Video-Filters + + + + + Initially contains the best suitable video stream. + Can be changed to a different stream reference. + + + + + Allows for a custom audio filter string. + Please see: https://ffmpeg.org/ffmpeg-filters.html#Audio-Filters + + + + + Initially contains the best suitable audio stream. + Can be changed to a different stream reference. + + + + + Initially contains the best suitable subititle stream. + Can be changed to a different stream reference. + + + + + Enumerates the different Media Types compatible with AVMEDIATYPE_* constants + defined by FFmpeg + + + + + Represents an unexisting media type (-1) + + + + + The video media type (0) + + + + + The audio media type (1) + + + + + The subtitle media type (3) + + + + + A subtitle frame container. Simply contains text lines. + + + + + Gets the media type of the data + + + + + Gets the lines of text for this subtitle frame with all formatting stripped out. + + + + + Gets the original text in SRT or ASS fromat. + + + + + Gets the type of the original text. + Returns None when it's a bitmap or when it's None + + + + + Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + + + + + A pre-allocated, scaled video block. The buffer is in BGR, 24-bit format + + + + + Finalizes an instance of the class. + + + + + Gets the media type of the data + + + + + Gets a pointer to the first byte of the data buffer. + The format is 32-bit BGRA + + + + + Gets the length of the buffer in bytes. + + + + + The picture buffer stride. + Pixel Width * 32-bit color (4 byes) + alignment (typically 0 for modern hw). + + + + + Gets the number of horizontal pixels in the image. + + + + + Gets the number of vertical pixels in the image. + + + + + Gets the width of the aspect ratio. + + + + + Gets the height of the aspect ratio. + + + + + Gets the SMTPE time code. + + + + + Gets the display picture number (frame number). + If not set by the decoder, this attempts to obtain it by dividing the start time by the + frame duration + + + + + Gets the coded picture number set by the decoder. + + + + + Gets the closed caption packets for this video block. + + + + + The picture buffer length of the last allocated buffer + + + + + Holds a reference to the last allocated buffer + + + + + Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + + + + + Releases unmanaged and - optionally - managed resources. + + true to release both managed and unmanaged resources; false to release only unmanaged resources. + diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffme/ffme.win.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffme/ffme.win.dll new file mode 100644 index 0000000..197ef17 Binary files /dev/null and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffme/ffme.win.dll differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffme/ffme.win.xml b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffme/ffme.win.xml new file mode 100644 index 0000000..f06bb3d --- /dev/null +++ b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffme/ffme.win.xml @@ -0,0 +1,3065 @@ + + + + ffme.win + + + + + Contains metadata about a raw bitmap back-buffer + + + + + Initializes a new instance of the class. + + The scan0. + The stride. + The bytes per pixel. + Width of the pixel. + Height of the pixel. + The dpi x. + The dpi y. + + + + Gets a pointer to the raw pixel data + + + + + Gets the byte width of each row of pixels + + + + + Gets the bits per pixel. + + + + + Gets the bytes per pixel. + + + + + Gets width of the bitmap + + + + + Gets height of the bitmap + + + + + Gets the DPI on the X axis + + + + + Gets the DPI on the Y axis + + + + + Gets the update rect. + + + + + Creates a buffer reference from the bitmap. Please Lock the bitmap before calling this method. + + The w. + The corresponding buffer + + + + Provides the audio samples rendering payload as event arguments. + + + + + + Initializes a new instance of the class. + + The buffer. + The length. + The stream. + The start time. + The duration. + The clock. + + + + Gets a pointer to the samples buffer. + Samples are provided in PCM 16-bit signed, interleaved stereo. + + + + + Gets the length in bytes of the samples buffer. + + + + + Gets the number of samples in 1 second. + + + + + Gets the number of channels. + + + + + Gets the number of bits per sample. + + + + + Gets the number of samples in the buffer for all channels. + + + + + Gets the number of samples in the buffer per channel. + + + + + A base class to represent media block + rendering event arguments. + + + + + + Initializes a new instance of the class. + + The stream. + The position. + The duration. + The clock. + + + + Provides Stream Information coming from the media container. + + + + + Gets the clock position at which the media + was called for rendering + + + + + Gets the starting time at which this media + has to be presented. + + + + + Gets how long this media has to be presented. + + + + + Provides the subtitles rendering payload as event arguments. + + + + + + Initializes a new instance of the class. + + The text. + The original text. + The format. + The stream. + The start time. + The duration. + The clock. + + + + Gets the text stripped out of ASS or SRT formatting. + This is what the default subtitle renderer will display + on the screen. + + + + + Gets the text as originally decoded including + all markup and formatting. + + + + + Gets the type of subtitle format the original + subtitle text is in. + + + + + When set to true, clears the current subtitle and + prevents the subtitle block from being rendered. + + + + + The video rendering event arguments + + + + + + Initializes a new instance of the class. + + The bitmap. + The stream. + The closed captions. + The smtpe timecode. + The picture number. + The start time. + The duration. + The clock. + + + + Gets the writable bitmap filled with the video frame pixels. + Feel free to capture or change this buffer. + + + + + Gets the closed caption decoded packets. + + + + + Gets the display picture number (frame number). + If not set by the decoder, this attempts to obtain it by dividing the start time by the + frame duration + + + + + Gets the SMTPE time code. + + + + + Contains the Message Logged Event Arguments + + + + + + Initializes a new instance of the class. + + The message. + + + + Gets the intance of the MediaElement that generated this message. + When null, it means FFmpeg generated this message. + + + + + Gets the timestamp. + + + + + Gets the type of the message. + + + + + Gets the contents of the message. + + + + + Contains the position changed routed event args + + + + + + Initializes a new instance of the class. + + The routed event. + The source. + The position. + + + + Gets the position. + + + + + Represents the event arguments of the MediaOpening routed event. + + + + + + Initializes a new instance of the class. + + The routed event. + The source. + The options. + The input information. + + + + Set or change the options before the media is opened. + + + + + Provides internal details of the media, inclusing its component streams. + Typically, options are set based on what this information contains. + + + + + Defines a generic graphical context (compatibility between WPF and WinForms apps) + + + + + Gets a value indicating whetherthe context is in design time + + + + + Returns true if the graphical context is valid. + + + + + Enqueues a UI call + + The priority. + The callback. + The arguments. + + + + Synchronously invokes the call on the UI thread + + The priority. + The action. + + + + The Media engine connector + + + + + + Initializes a new instance of the class. + + The control. + + + + Called when [buffering ended]. + + The sender. + + + + Called when [buffering started]. + + The sender. + + + + Called when [media closed]. + + The sender. + + + + Called when [media ended]. + + The sender. + + + + Called when [media failed]. + + The sender. + The e. + + + + Called when [media opened]. + + The sender. + + + + Called when [media opening]. + + The sender. + The media options. + The media information. + + + + Called when [message logged]. + + The sender. + The instance containing the event data. + + + + Called when [position changed]. + + The sender. + The position. + + + + Called when an underlying media engine property is changed. + This is used to handle property change notifications + + The sender. + Name of the property. + + + + Called when [seeking ended]. + + The sender. + + + + Called when [seeking started]. + + The sender. + + + + Windows-specific native methods + + + + + Initializes static members of the class. + + + + + Prevents a default instance of the class from being created. + + + + + Gets the instance. + + + The instance. + + + + + Fast pointer memory block copy function + + The target address. + The source address. + Length of the copy. + + + + Fills the memory with the specified value repeated. + + The start address. + The length. + The value. + + + + Sets the DLL directory in which external dependencies can be located. + + The path. + + True for success. False for failure + + + + + Contains Interop native methods + + + + + Sets the DLL directory in which external dependencies can be located. + + the full path. + True if set, false if not set + + + + Fast pointer memory block copy function + + The destination. + The source. + The length. + + + + Fills the memory. + + The destination. + The length. + The fill. + + + + Root for platform-specific implementations + + + + + + Initializes static members of the class. + + + + + Prevents a default instance of the class from being created. + + Unable to get a valid GUI context. + + + + Gets the instance. + + + The instance. + + + + + Retrieves the platform-specific Native methods + + + + + Gets the GUI contaxt implementation. + + + + + Gets a value indicating whether this instance is in debug mode. + + + + + Gets a value indicating whether this instance is in design time. + + + + + Creates a renderer of the specified media type. + + Type of the media. + The media engine. + + The renderer + + When the media type is not supported + + + + Handles global FFmpeg library messages + + The message. + + + + The Windows forms graphical context + + + + + + The application synchronization context + + + + + Initializes static members of the class. + + + + + Prevents a default instance of the class from being created. + + + + + Gets the current. + + + + + Gets a value indicating whetherthe context is in design time + + + + + Returns true if this context is valid. + + + + + Enqueues a UI call + + The priority. + The callback. + The arguments. + + + + Synchronously invokes the call on the UI thread + + The priority. + The action. + + + + The WPF graphical context + + + + + + The WPF dispatcher + + + + + Initializes static members of the class. + + + + + Prevents a default instance of the class from being created. + + + + + Gets the current instance. + + + + + Gets a value indicating whetherthe context is in design time + + + + + Returns true if this context is valid. + + + + + Enqueues a UI call + + The priority. + The callback. + The arguments. + + + + Synchronously invokes the call on the UI thread + + The priority. + The action. + + + + SoundTouch audio processing library wrapper (SoundTouch.cs) + + Original code by + Copyright (c) Olli Parviainen + http://www.surina.net/soundtouch + LGPL License + + Modified Code by: + Mario Di Vece + + Changes: + Set-prefixed methods to proety setters + Native wrappers to NativeMethods class name + Adding enum with settings as defined in the header file + Setttings getters and setters as indexers + Implemented Dispose pattern correctly. + + + + + Initializes a new instance of the class. + + + + + Finalizes an instance of the class. + + + + + Settings as defined in SoundTouch.h + + + + + Enable/disable anti-alias filter in pitch transposer (0 = disable) + + + + + Pitch transposer anti-alias filter length (8 .. 128 taps, default = 32) + + + + + Enable/disable quick seeking algorithm in tempo changer routine + (enabling quick seeking lowers CPU utilization but causes a minor sound + quality compromising) + + + + + Time-stretch algorithm single processing sequence length in milliseconds. This determines + to how long sequences the original sound is chopped in the time-stretch algorithm. + See "STTypes.h" or README for more information. + + + + + Time-stretch algorithm seeking window length in milliseconds for algorithm that finds the + best possible overlapping location. This determines from how wide window the algorithm + may look for an optimal joining location when mixing the sound sequences back together. + See "STTypes.h" or README for more information. + + + + + Time-stretch algorithm overlap length in milliseconds. When the chopped sound sequences + are mixed back together, to form a continuous sound stream, this parameter defines over + how long period the two consecutive sequences are let to overlap each other. + See "STTypes.h" or README for more information. + + + + + Call "getSetting" with this ID to query processing sequence size in samples. + This value gives approximate value of how many input samples you'll need to + feed into SoundTouch after initial buffering to get out a new batch of + output samples. + + This value does not include initial buffering at beginning of a new processing + stream, use SETTING_INITIAL_LATENCY to get the initial buffering size. + + Notices: + - This is read-only parameter, i.e. setSetting ignores this parameter + - This parameter value is not constant but change depending on + tempo/pitch/rate/samplerate settings. + + + + + Call "getSetting" with this ID to query nominal average processing output + size in samples. This value tells approcimate value how many output samples + SoundTouch outputs once it does DSP processing run for a batch of input samples. + + Notices: + - This is read-only parameter, i.e. setSetting ignores this parameter + - This parameter value is not constant but change depending on + tempo/pitch/rate/samplerate settings. + + + + + Call "getSetting" with this ID to query initial processing latency, i.e. + approx. how many samples you'll need to enter to SoundTouch pipeline before + you can expect to get first batch of ready output samples out. + + After the first output batch, you can then expect to get approx. + SETTING_NOMINAL_OUTPUT_SEQUENCE ready samples out for every + SETTING_NOMINAL_INPUT_SEQUENCE samples that you enter into SoundTouch. + + Example: + processing with parameter -tempo=5 + => initial latency = 5509 samples + input sequence = 4167 samples + output sequence = 3969 samples + + Accordingly, you can expect to feed in approx. 5509 samples at beginning of + the stream, and then you'll get out the first 3969 samples. After that, for + every approx. 4167 samples that you'll put in, you'll receive again approx. + 3969 samples out. + + This also means that average latency during stream processing is + INITIAL_LATENCY-OUTPUT_SEQUENCE/2, in the above example case 5509-3969/2 + = 3524 samples + + Notices: + - This is read-only parameter, i.e. setSetting ignores this parameter + - This parameter value is not constant but change depending on + tempo/pitch/rate/samplerate settings. + + + + + Get SoundTouch version string + + + + + Gets a value indicating whether the SoundTouch Library (dll) is available + + + + + Returns number of processed samples currently available in SoundTouch for immediate output. + + + + + Returns number of samples currently unprocessed in SoundTouch internal buffer + + Number of sample frames + + + + Check if there aren't any samples available for outputting. + + nonzero if there aren't any samples available for outputting + + + + Sets the number of channels + Value: 1 = mono, 2 = stereo, n = multichannel + + + + + Sets sample rate. + Value: Sample rate, e.g. 44100 + + + + + Sets new tempo control value. + Value: Tempo setting. Normal tempo = 1.0, smaller values + represent slower tempo, larger faster tempo. + + + + + Sets new tempo control value as a difference in percents compared + to the original tempo (-50 .. +100 %); + + + + + Sets new rate control value. + Rate setting. Normal rate = 1.0, smaller values + represent slower rate, larger faster rate. + + + + + Sets new rate control value as a difference in percents compared + to the original rate (-50 .. +100 %); + Value: Rate setting is in % + + + + + Sets new pitch control value. + Value: Pitch setting. Original pitch = 1.0, smaller values + represent lower pitches, larger values higher pitch. + + + + + Sets pitch change in octaves compared to the original pitch + (-1.00 .. +1.00 for +- one octave); + Value: Pitch setting in octaves + + + + + Sets pitch change in semi-tones compared to the original pitch + (-12 .. +12 for +- one octave); + Value: Pitch setting in semitones + + + + + Changes or gets a setting controlling the processing system behaviour. See the + 'SETTING_...' defines for available setting ID's. + + + The . + + The setting identifier. + The value of the setting + + + + Flushes the last samples from the processing pipeline to the output. + Clears also the internal processing buffers. + Note: This function is meant for extracting the last samples of a sound + stream. This function may introduce additional blank samples in the end + of the sound stream, and thus it's not recommended to call this function + in the middle of a sound stream. + + + + + Clears all the samples in the object's output and internal processing + buffers. + + + + + Adds 'numSamples' pcs of samples from the 'samples' memory position into + the input of the object. Notice that sample rate _has_to_ be set before + calling this function, otherwise throws a runtime_error exception. + + Sample buffer to input + Number of sample frames in buffer. Notice + that in case of multi-channel sound a single sample frame contains + data for all channels + + + + int16 version of putSamples(): This accept int16 (short) sample data + and internally converts it to float format before processing + + Sample input buffer. + Number of sample frames in buffer. Notice + that in case of multi-channel sound a single + sample frame contains data for all channels. + + + + Receive processed samples from the processor. + + Buffer where to copy output samples + Max number of sample frames to receive + The number of samples received + + + + int16 version of receiveSamples(): This converts internal float samples + into int16 (short) return data type + + Buffer where to copy output samples. + How many samples to receive at max. + Number of received sample frames + + + + Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + + + + + Releases unmanaged and - optionally - managed resources. + + true to release both managed and unmanaged resources; false to release only unmanaged resources. + + + + Provides direct access to mapped DLL methods + + + + + Represents a control that contains audio and/or video. + In contrast with System.Windows.Controls.MediaElement, this version uses + the FFmpeg library to perform reading and decoding of media streams. + + + + + + + + + Occurs right before the video is presented on the screen. + You can update the pizels on the bitmap before it is rendered on the screen. + Or you could take a screenshot. + Ensure you handle this very quickly as it runs on the UI thread. + + + + + Occurs right before the audio is added to the audio buffer. + You can update the bytes before they are enqueued. + Ensure you handle this quickly before you get choppy audio. + + + + + Occurs right before the subtitles are rendered. + You can update the text. + Ensure you handle this quickly before you get choppy subtitles. + + + + + Raises the rendering video event. + + The block. + The bitmap. + The clock. + + + + Raises the rendering audio event. + + The audio block. + The clock. + + + + Raises the rendering subtitles event. + + The block. + The clock. + True if the rendering should be prevented + + + + IUriContext BaseUri backing + + + + + Holds the Media Engine + + + + + Initializes static members of the class. + + + + + Initializes a new instance of the class. + + + + + Occurs when a logging message from the FFmpeg library has been received. + This is shared across all instances of Media Elements + + + + + Occurs when a logging message has been logged. + This does not include FFmpeg messages. + + + + + Multicast event for property change notifications. + + + + + Gets or sets the FFmpeg path from which to load the FFmpeg binaries. + You must set this path before setting the Source property for the first time on any instance of this control. + Settng this property when FFmpeg binaries have been registered will throw an exception. + + + + + Specifies the bitwise flags that correspond to FFmpeg library identifiers. + Please use the class for valid combinations. + If FFmpeg is already loaded, the value cannot be changed. + + + + + Provides access to the underlying media engine driving this control. + This property is intender for advance usages only. + + + + + Gets or sets the base URI of the current application context. + + + + + This is the image that holds video bitmaps + + + + + A viewbox holding the subtitle text blocks + + + + + Gets the grid control holding the rest of the controls. + + + + + When position is being set from within this control, this field will + be set to true. This is useful to detect if the user is setting the position + or if the Position property is being driven from within + + + + + Begins or resumes playback of the currently loaded media. + + The awaitable command + + + + Pauses playback of the currently loaded media. + + The awaitable command + + + + Pauses and rewinds the currently loaded media. + + The awaitable command + + + + Closes the currently loaded media. + + The awaitable command + + + + Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + + + + + Invoked whenever the effective value of any dependency property on this has been updated. The specific dependency property that changed is reported in the arguments parameter. Overrides . + + The event data that describes the property that changed, as well as old and new values. + + + + DependencyProperty for FFmpegMediaElement Source property. + + + + + DependencyProperty for Stretch property. + + + + + DependencyProperty for StretchDirection property. + + + + + The DependencyProperty for the MediaElement.Balance property. + + + + + The DependencyProperty for the MediaElement.IsMuted property. + + + + + The DependencyProperty for the MediaElement.SpeedRatio property. + + + + + The DependencyProperty for the MediaElement.Volume property. + + + + + The DependencyProperty for the MediaElement.ScrubbingEnabled property. + + + + + The DependencyProperty for the MediaElement.UnloadedBehavior property. + TODO: Currently this property has no effect. Needs implementation. + + + + + The DependencyProperty for the MediaElement.LoadedBehavior property. + + + + + The DependencyProperty for the MediaElement.Position property. + + + + + Gets/Sets the Source on this MediaElement. + The Source property is the Uri of the media to be played. + + + + + Gets/Sets the Stretch on this MediaElement. + The Stretch property determines how large the MediaElement will be drawn. + + + + + Gets/Sets the stretch direction of the Viewbox, which determines the restrictions on + scaling that are applied to the content inside the Viewbox.  For instance, this property + can be used to prevent the content from being smaller than its native size or larger than + its native size. + + + + + Specifies the behavior that the media element should have when it + is loaded. The default behavior is that it is under manual control + (i.e. the caller should call methods such as Play in order to play + the media). If a source is set, then the default behavior changes to + to be playing the media. If a source is set and a loaded behavior is + also set, then the loaded behavior takes control. + + + + + Gets/Sets the SpeedRatio property on the MediaElement. + + + + + Specifies how the underlying media should behave when + it has ended. The default behavior is to Close the media. + + + + + Gets/Sets the Volume property on the MediaElement. + Note: Valid values are from 0 to 1 + + + + + Gets/Sets the Balance property on the MediaElement. + + + + + Gets/Sets the IsMuted property on the MediaElement. + + + + + Gets or sets a value that indicates whether the MediaElement will update frames + for seek operations while paused. This is a dependency property. + + + + + Gets/Sets the Position property on the MediaElement. + + + + + Provides key-value pairs of the metadata contained in the media. + Returns null when media has not been loaded. + + + + + Gets the media format. Returns null when media has not been loaded. + + + + + Gets the duration of a single frame step. + If there is a video component with a framerate, this propery returns the length of a frame. + If there is no video component it simply returns a tenth of a second. + + + + + Returns whether the given media has audio. + Only valid after the MediaOpened event has fired. + + + + + Returns whether the given media has video. Only valid after the + MediaOpened event has fired. + + + + + Gets the video codec. + Only valid after the MediaOpened event has fired. + + + + + Gets the video bitrate. + Only valid after the MediaOpened event has fired. + + + + + Returns the natural width of the media in the video. + Only valid after the MediaOpened event has fired. + + + + + Returns the natural height of the media in the video. + Only valid after the MediaOpened event has fired. + + + + + Gets the video frame rate. + Only valid after the MediaOpened event has fired. + + + + + Gets the duration in seconds of the video frame. + Only valid after the MediaOpened event has fired. + + + + + Gets the name of the video hardware decoder in use. + Enabling hardware acceleration does not guarantee decoding will be performed in hardware. + When hardware decoding of frames is in use this will return the name of the HW accelerator. + Otherwise it will return an empty string. + + + + + Gets the audio codec. + Only valid after the MediaOpened event has fired. + + + + + Gets the audio bitrate. + Only valid after the MediaOpened event has fired. + + + + + Gets the audio channels count. + Only valid after the MediaOpened event has fired. + + + + + Gets the audio sample rate. + Only valid after the MediaOpened event has fired. + + + + + Gets the audio bits per sample. + Only valid after the MediaOpened event has fired. + + + + + Gets the Media's natural duration + Only valid after the MediaOpened event has fired. + + + + + Returns whether the currently loaded media can be paused. + This is only valid after the MediaOpened event has fired. + Note that this property is computed based on wether the stream is detected to be a live stream. + + + + + Returns whether the currently loaded media is live or realtime + This is only valid after the MediaOpened event has fired. + + + + + Gets a value indicating whether the currently loaded media can be seeked. + + + + + Gets a value indicating whether the media is playing. + + + + + Gets a value indicating whether the media has reached its end. + + + + + Get a value indicating whether the media is buffering. + + + + + Gets a value indicating whether the media seeking is in progress. + + + + + Returns the current video SMTPE timecode if available. + If not available, this property returns an empty string. + + + + + Gets a value that indicates the percentage of buffering progress made. + Range is from 0 to 1 + + + + + The wait packet buffer length. + It is adjusted to 1 second if bitrate information is available. + Otherwise, it's simply 512KB + + + + + Gets a value that indicates the percentage of download progress made. + Range is from 0 to 1 + + + + + Gets the maximum packet buffer length, according to the bitrate (if available). + If it's a realtime stream it will return 30 times the buffer cache length. + Otherwise, it will return 4 times of the buffer cache length. + + + + + Gets a value indicating whether the media is in the process of opening. + + + + + Gets a value indicating whether this media element + currently has an open media url. + + + + + Gets the current playback state. + + + + + BufferingStarted is a routed event + + + + + BufferingEnded is a routed event + + + + + SeekingStarted is a routed event + + + + + SeekingEnded is a routed event + + + + + MediaFailedEvent is a routed event. + + + + + MediaOpened is a routed event. + + + + + MediaClosed is a routed event. + + + + + MediaOpeningEvent is a routed event. + + + + + PositionChanged is a routed event + + + + + MediaEnded is a routed event + + + + + Occurs when buffering of packets was started + + + + + Occurs when buffering of packets was Ended + + + + + Occurs when Seeking of packets was started + + + + + Occurs when Seeking of packets was Ended + + + + + Raised when the media fails to load or a fatal error has occurred which prevents playback. + + + + + Raised when the media is opened + + + + + Raised when the media is closed + + + + + Raised before the input stream of the media is opened. + Use this method to modify the input options. + + + + + Raised when the corresponding media ends. + + + + + Occurs when media position is changed + + + + + Creates a new instance of exception routed event arguments. + This method exists because the constructor has not been made public for that class. + + The routed event. + The sender. + The error exception. + The event arguments + + + + Raises the FFmpeg message logged. + + The sender. + The instance containing the event data. + + + + Raises the message logged event. + + The instance containing the event data. + + + + Raises the media failed event. + + The ex. + + + + Raises the media opened event. + + + + + Raises the media closed event. + + + + + Raises the media opening event. + + The media options. + The media information. + + + + Raises the position changed event. + + The position. + + + + Raises the buffering started event. + + + + + Raises the buffering ended event. + + + + + Raises the Seeking started event. + + + + + Raises the Seeking ended event. + + + + + Raises the media ended event. + + + + + Notifies listeners that a property value has changed. + This must be called from a UI thread. + + Name of the property used to notify listeners. This + value is optional and can be provided automatically when invoked from compilers + that support . + + + + Logs the start of an event + + The event. + + + + Logs the end of an event. + + The event. + + + + A control suitable for displaying subtitles. + Layout is: UserControl:Viewbox:Grid:TextBlocks + + + + + + The text dependency property + + + + + The text foreground dependency property + + + + + The text foreground effect dependency property + + + + + The text outline width dependency property + + + + + The text outline dependency property + + + + + Holds the text blocks that together create an outlined subtitle text display. + + + + + The container for the outlined text blocks + + + + + A Layout transform to condense text. + + + + + Initializes a new instance of the class. + + + + + Gets or sets the text contents of this text block. + + + + + Gets or sets the text foreground. + + + + + Gets or sets the text outline. + + + + + Gets or sets the text outline width. + + + + + Gets or sets the text foreground effect. + + + + + Invoked whenever the effective value of any dependency property on this has been updated. The specific dependency property that changed is reported in the arguments parameter. Overrides . + + The event data that describes the property that changed, as well as old and new values. + + + + Computes the margin according to the block type. + + Type of the block. + Width of the outline. + A thickness depending on the block type + + + + Generic interface for all WaveProviders. + + + + + Gets the WaveFormat of this WaveProvider. + + + + + Fill the specified buffer with wave data. + + The buffer to fill of wave data. + Offset into buffer + The number of bytes to read + + the number of bytes written to the buffer. + + + + + Windows multimedia error codes from mmsystem.h. + + + + no error, MMSYSERR_NOERROR + + + unspecified error, MMSYSERR_ERROR + + + device ID out of range, MMSYSERR_BADDEVICEID + + + driver failed enable, MMSYSERR_NOTENABLED + + + device already allocated, MMSYSERR_ALLOCATED + + + device handle is invalid, MMSYSERR_INVALHANDLE + + + no device driver present, MMSYSERR_NODRIVER + + + memory allocation error, MMSYSERR_NOMEM + + + function isn't supported, MMSYSERR_NOTSUPPORTED + + + error value out of range, MMSYSERR_BADERRNUM + + + invalid flag passed, MMSYSERR_INVALFLAG + + + invalid parameter passed, MMSYSERR_INVALPARAM + + + handle being used simultaneously on another thread (eg callback),MMSYSERR_HANDLEBUSY + + + specified alias not found, MMSYSERR_INVALIDALIAS + + + bad registry database, MMSYSERR_BADDB + + + registry key not found, MMSYSERR_KEYNOTFOUND + + + registry read error, MMSYSERR_READERROR + + + registry write error, MMSYSERR_WRITEERROR + + + registry delete error, MMSYSERR_DELETEERROR + + + registry value not found, MMSYSERR_VALNOTFOUND + + + driver does not call DriverCallback, MMSYSERR_NODRIVERCB + + + more data to be returned, MMSYSERR_MOREDATA + + + unsupported wave format, WAVERR_BADFORMAT + + + still something playing, WAVERR_STILLPLAYING + + + header not prepared, WAVERR_UNPREPARED + + + device is synchronous, WAVERR_SYNC + + + Conversion not possible (ACMERR_NOTPOSSIBLE) + + + Busy (ACMERR_BUSY) + + + Header Unprepared (ACMERR_UNPREPARED) + + + Cancelled (ACMERR_CANCELED) + + + invalid line (MIXERR_INVALLINE) + + + invalid control (MIXERR_INVALCONTROL) + + + invalid value (MIXERR_INVALVALUE) + + + + http://msdn.microsoft.com/en-us/library/dd757347(v=VS.85).aspx + + + + + Enumerates the various wave output playback states + + + + + Stopped + + + + + Playing + + + + + Paused + + + + + Supported wave formats for WaveOutCapabilities + + + + + 11.025 kHz, Mono, 8-bit + + + + + 11.025 kHz, Stereo, 8-bit + + + + + 11.025 kHz, Mono, 16-bit + + + + + 11.025 kHz, Stereo, 16-bit + + + + + 22.05 kHz, Mono, 8-bit + + + + + 22.05 kHz, Stereo, 8-bit + + + + + 22.05 kHz, Mono, 16-bit + + + + + 22.05 kHz, Stereo, 16-bit + + + + + 44.1 kHz, Mono, 8-bit + + + + + 44.1 kHz, Stereo, 8-bit + + + + + 44.1 kHz, Mono, 16-bit + + + + + 44.1 kHz, Stereo, 16-bit + + + + + 44.1 kHz, Mono, 8-bit + + + + + 44.1 kHz, Stereo, 8-bit + + + + + 44.1 kHz, Mono, 16-bit + + + + + 44.1 kHz, Stereo, 16-bit + + + + + 48 kHz, Mono, 8-bit + + + + + 48 kHz, Stereo, 8-bit + + + + + 48 kHz, Mono, 16-bit + + + + + 48 kHz, Stereo, 16-bit + + + + + 96 kHz, Mono, 8-bit + + + + + 96 kHz, Stereo, 8-bit + + + + + 96 kHz, Mono, 16-bit + + + + + 96 kHz, Stereo, 16-bit + + + + + Represents a Wave file format + + + + The format tag -- always 0x0001 PCM + + + number of channels + + + sample rate + + + for buffer estimation + + + block size of data + + + number of bits per sample of mono data + + + number of following bytes + + + + Initializes a new instance of the class. + PCM 48Khz stereo 16 bit signed, interleaved, 2-channel format + + + + + Initializes a new instance of the class. + + Sample Rate + Number of channels + + + + Initializes a new instance of the class. + + The rate. + The bits. + The channels. + channels - channels + + + + Returns the number of channels (1=mono,2=stereo etc) + + + + + Returns the sample rate (samples per second) + + + + + Returns the average number of bytes used per second + + + + + Returns the block alignment + + + + + Returns the number of bits per sample (usually 16 or 32, sometimes 24 or 8) + Can be 0 for some codecs + + + + + Returns the number of extra bytes used by this waveformat. Often 0, + except for compressed formats which store extra data after the WAVEFORMATEX header + + + + + Gets the size of a wave buffer equivalent to the latency in milliseconds. + + The milliseconds. + The size + + + + Reports this WaveFormat as a string + + String describing the wave format + + + + Compares with another WaveFormat object + + Object to compare to + True if the objects are the same + + + + Provides a Hashcode for this WaveFormat + + A hashcode + + + + WaveHeader interop structure (WAVEHDR) + http://msdn.microsoft.com/en-us/library/dd743837%28VS.85%29.aspx + + + + pointer to locked data buffer (lpData) + + + length of data buffer (dwBufferLength) + + + used for input only (dwBytesRecorded) + + + for client's use (dwUser) + + + assorted flags (dwFlags) + + + loop control counter (dwLoops) + + + PWaveHdr, reserved for driver (lpNext) + + + reserved for driver + + + + Wave Header Flags enumeration + + + + + WHDR_BEGINLOOP + This buffer is the first buffer in a loop. This flag is used only with output buffers. + + + + + WHDR_DONE + Set by the device driver to indicate that it is finished with the buffer and is returning it to the application. + + + + + WHDR_ENDLOOP + This buffer is the last buffer in a loop. This flag is used only with output buffers. + + + + + WHDR_INQUEUE + Set by Windows to indicate that the buffer is queued for playback. + + + + + WHDR_PREPARED + Set by Windows to indicate that the buffer has been prepared with the waveInPrepareHeader or waveOutPrepareHeader function. + + + + + MME Wave function interop + + + + + CALLBACK_NULL + No callback + + + + + CALLBACK_FUNCTION + dwCallback is a FARPROC + + + + + CALLBACK_EVENT + dwCallback is an EVENT handle + + + + + CALLBACK_WINDOW + dwCallback is a HWND + + + + + CALLBACK_THREAD + callback is a thread ID + + + + + WIM_OPEN + + + + + WIM_CLOSE + + + + + WIM_DATA + + + + + WOM_CLOSE + + + + + WOM_DONE + + + + + WOM_OPEN + + + + + A wrapper class for MmException. + + + + + Initializes a new instance of the class. + + The result returned by the Windows API call + The name of the Windows API that failed + + + + Returns the Windows API result + + + + + Helper function to automatically raise an exception on failure + + The result of the API call + The API function name + + + + Creates an error message base don an erro result. + + The result. + The function. + A descriptive rror message + + + + A buffer of Wave samples for streaming to a Wave Output device + + + + + Initializes a new instance of the class. + + WaveOut device to write to + Buffer size in bytes + Stream to provide more data + Lock to protect WaveOut API's from being called on >1 thread + + + + Finalizes an instance of the class. + + + + + Whether the header's in queue flag is set + + + + + The buffer size in bytes + + + + + Releases resources held by this WaveBuffer + + + + + this is called by the Wave callback and should be used to refill the buffer. + This calls the .Read method on the stream + + true when bytes were written. False if no bytes were written. + + + + Releases resources held by this WaveBuffer + + true to release both managed and unmanaged resources; false to release only unmanaged resources. + + + + Writes to wave out. + + waveOutWrite + + + + WaveOutCapabilities structure (based on WAVEOUTCAPS2 from mmsystem.h) + http://msdn.microsoft.com/library/default.asp?url=/library/en-us/multimed/htm/_win32_waveoutcaps_str.asp + + + + + wMid + + + + + wPid + + + + + vDriverVersion + + + + + Product Name (szPname) + + + + + Supported formats (bit flags) dwFormats + + + + + Supported channels (1 for mono 2 for stereo) (wChannels) + Seems to be set to -1 on a lot of devices + + + + + wReserved1 + + + + + Optional functionality supported by the device + + + + + Number of channels supported + + + + + Whether playback rate control is supported + + + + + Whether volume control is supported + + + + + Gets a value indicating whether this device supports independent channel volume control. + + + + + Gets a value indicating whether this device supports pitch control. + + + + + Gets a value indicating whether the device returns sample-accurate position information. + + + + + Gets a value indicating whether the driver is synchronous and will block while playing a buffer. + + + + + The product name + + + + + The device name Guid (if provided) + + + + + The product name Guid (if provided) + + + + + The manufacturer guid (if provided) + + + + + Checks to see if a given SupportedWaveFormat is supported + + The SupportedWaveFormat + true if supported + + + + Flags indicating what features this WaveOut device supports + + + + supports pitch control (WAVECAPS_PITCH) + + + supports playback rate control (WAVECAPS_PLAYBACKRATE) + + + supports volume control (WAVECAPS_VOLUME) + + + supports separate left-right volume control (WAVECAPS_LRVOLUME) + + + (WAVECAPS_SYNC) + + + (WAVECAPS_SAMPLEACCURATE) + + + + A wave player that opens an audio device and continuously feeds it + with audio samples using a wave provider. + + + + + Initializes a new instance of the class. + + The renderer. + + + + Finalizes an instance of the class. + + + + + Gets or sets the desired latency in milliseconds + Should be set before a call to Init + + + + + Gets or sets the number of buffers used + Should be set before a call to Init + + + + + Gets or sets the device number + Should be set before a call to Init + This must be between -1 and DeviceCount - 1. + -1 means stick to default device even default device is changed + + + + + Gets a instance indicating the format the hardware is using. + + + + + Playback State + + + + + Gets the capabilities. + + + + + Initializes the specified wave provider. + + The wave provider. + Can't re-initialize during playback + + + + Start playing the audio from the WaveStream + + + + + Pause the audio + + + + + Stop and reset the WaveOut device + + + + + Gets the current position in bytes from the wave output device. + (n.b. this is not the same thing as the position within your reader + stream - it calls directly into waveOutGetPosition) + + Position in bytes + + + + Closes this WaveOut device + + + + + Closes the WaveOut device and disposes of buffers + + True if called from Dispose + + + + Resume playing after a pause from the same position + + + + + Starts the playback thread. + + + + + Performs the continuous playback. + + + + + Closes the wave device. + + + + + Disposes the buffers. + + + + + Provides Audio Output capabilities by writing samples to the default audio output device. + + + + + + + + Initializes a new instance of the class. + + The core media engine. + + + + Gets the output format of the audio + + + + + Gets the parent media element (platform specific). + + + + + Gets the core platform independent player component. + + + + + Gets or sets the volume. + + + The volume. + + + + + Gets or sets the balance (-1.0 to 1.0). + + + + + Gets or sets a value indicating whether the wave output is muted. + + + + + Gets the realtime latency of the audio relative to the internal wall clock. + A negative value means audio is ahead of the wall clock. + A positive value means audio is behind of the wall clock. + + + + + Gets current audio the position. + + + + + Gets the desired latency odf the audio device. + Value is always positive and typically 200ms. This means audio gets rendered up to this late behind the wall clock. + + + + + Gets the speed ratio. + + + + + Renders the specified media block. + + The media block. + The clock position. + + + + Called on every block rendering clock cycle just in case some update operation needs to be performed. + This needs to return immediately so the calling thread is not disturbed. + + The clock position. + + + + Executed when the Play method is called on the parent MediaElement + + + + + Executed when the Pause method is called on the parent MediaElement + + + + + Executed when the Pause method is called on the parent MediaElement + + + + + Executed when the Close method is called on the parent MediaElement + + + + + Executed after a Seek operation is performed on the parent MediaElement + + + + + Waits for the renderer to be ready to render. + + + + + Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + + + + + Called whenever the audio driver requests samples. + Do not call this method directly. + + The render buffer. + The render buffer offset. + The requested bytes. + The number of bytes that were read. + + + + Called when [application exit]. + + The sender. + The instance containing the event data. + + + + Initializes the audio renderer. + Call the Play Method to start reading samples + + + + + Destroys the audio renderer. + Makes it useless. + + + + + Synchronizes audio rendering to the wall clock. + Returns true if additional samples need to be read. + Returns false if silence has been written and no further reading is required. + + The target buffer. + The target buffer offset. + The requested bytes. + True to continue processing. False to write silence. + + + + Reads from the Audio Buffer and stretches the samples to the required requested bytes. + This will make audio samples sound stretched (low pitch). + The result is put to the first requestedBytes count of the ReadBuffer. + requested + + The requested bytes. + + + + Reads from the Audio Buffer and shrinks (averages) the samples to the required requested bytes. + This will make audio samples sound shrunken (high pitch). + The result is put to the first requestedBytes count of the ReadBuffer. + + The requested number of bytes. + if set to true average samples per block. Otherwise, take the first sample per block only + + + + Reads from the Audio Buffer and uses the SoundTouch audio processor to adjust tempo + The result is put to the first requestedBytes count of the ReadBuffer. + This feature is experimental + + The requested bytes. + + + + Applies volume and balance to the audio samples storead in RedBuffer and writes them + to the specified target buffer. + + The target buffer. + The target buffer offset. + The requested number of bytes. + + + + Releases unmanaged and - optionally - managed resources. + + + true to release both managed and unmanaged resources; false to release only unmanaged resources. + + + + Subtitle Renderer - Does nothing at this point. + + + + + + The synchronize lock + + + + + Holds the text to be rendered when the Update method is called. + + + + + Holds the text that was last rendered when Update was called. + + + + + Initializes a new instance of the class. + + The core media element. + + + + Gets the parent media element (platform specific). + + + + + Gets the core platform independent player component. + + + + + Executed when the Close method is called on the parent MediaElement + + + + + Executed when the Pause method is called on the parent MediaElement + + + + + Executed when the Play method is called on the parent MediaElement + + + + + Executed when the Pause method is called on the parent MediaElement + + + + + Executed after a Seek operation is performed on the parent MediaElement + + + + + Waits for the renderer to be ready to render. + + + + + Renders the specified media block. + + The media block. + The clock position. + + + + Called when a media block must stop being rendered. + This needs to return immediately so the calling thread is not disturbed. + + The clock position. + + + + Sets the text to be rendered on the text blocks. + Returns immediately because it enqueues the action on the UI thread. + + The text. + + + + Provides Video Image Rendering via a WPF Writable Bitmap + + + + + + Contains an equivalence lookup of FFmpeg pixel fromat and WPF pixel formats. + + + + + The action to perform when a render is enqueued on the UI thread. + + + + + The bitmap that is presented to the user. + + + + + Set when a bitmap is being written to the target bitmap + + + + + The load block buffer on locking + + + + + The raise video event on GUI + + + + + Initializes a new instance of the class. + + The core media element. + + + + Gets the parent media element (platform specific). + + + + + Gets the core platform independent player component. + + + + + Gets the DPI along the X axis. + + + + + Gets the DPI along the Y axis. + + + + + Executed when the Play method is called on the parent MediaElement + + + + + Executed when the Pause method is called on the parent MediaElement + + + + + Executed when the Pause method is called on the parent MediaElement + + + + + Executed after a Seek operation is performed on the parent MediaElement + + + + + Waits for the renderer to be ready to render. + + + + + Called on every block rendering clock cycle just in case some update operation needs to be performed. + This needs to return immediately so the calling thread is not disturbed. + + The clock position. + + + + Renders the specified media block. + This needs to return immediately so the calling thread is not disturbed. + + The media block. + The clock position. + + + + Executed when the Close method is called on the parent MediaElement + + + + + Initializes the target bitmap if not available and locks it for loading the back-buffer. + + The block. + The priority. + + The locking result. Returns a null pointer on back buffer for invalid. + + + + + Loads that target data buffer with block data + + The target. + The source. + + + + Applies the scale transform according to the block's aspect ratio. + + The b. + + + + A strongly-typed resource class, for looking up localized strings, etc. + + + + + Returns the cached ResourceManager instance used by this class. + + + + + Overrides the current thread's CurrentUICulture property for all + resource lookups using this strongly typed resource class. + + + + + Looks up a localized resource of type System.Drawing.Bitmap. + + + + diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/avcodec-57.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/avcodec-57.dll index 5f72dae..7e1415f 100644 Binary files a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/avcodec-57.dll and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/avcodec-57.dll differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/avdevice-57.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/avdevice-57.dll index f76137a..b35bfe0 100644 Binary files a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/avdevice-57.dll and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/avdevice-57.dll differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/avfilter-6.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/avfilter-6.dll index d40006a..418853a 100644 Binary files a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/avfilter-6.dll and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/avfilter-6.dll differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/avformat-57.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/avformat-57.dll index e70639b..d58a585 100644 Binary files a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/avformat-57.dll and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/avformat-57.dll differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/avutil-55.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/avutil-55.dll index d255480..18ee8c6 100644 Binary files a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/avutil-55.dll and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/avutil-55.dll differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/ffmpeg.exe b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/ffmpeg.exe index 0d12bd4..847ed25 100644 Binary files a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/ffmpeg.exe and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/ffmpeg.exe differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/ffprobe.exe b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/ffprobe.exe index 9f2bd3e..0583992 100644 Binary files a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/ffprobe.exe and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/ffprobe.exe differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/postproc-54.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/postproc-54.dll new file mode 100644 index 0000000..e766950 Binary files /dev/null and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/postproc-54.dll differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/swresample-2.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/swresample-2.dll index 60ec6f9..b27fc5a 100644 Binary files a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/swresample-2.dll and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/swresample-2.dll differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/swscale-4.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/swscale-4.dll index 4cc2012..a7838cc 100644 Binary files a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/swscale-4.dll and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x64/swscale-4.dll differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/avcodec-57.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/avcodec-57.dll index 163bbe3..809c834 100644 Binary files a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/avcodec-57.dll and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/avcodec-57.dll differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/avdevice-57.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/avdevice-57.dll index f53036c..dc86c28 100644 Binary files a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/avdevice-57.dll and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/avdevice-57.dll differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/avfilter-6.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/avfilter-6.dll index 5063444..5e526d6 100644 Binary files a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/avfilter-6.dll and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/avfilter-6.dll differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/avformat-57.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/avformat-57.dll index e02a101..a646c9c 100644 Binary files a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/avformat-57.dll and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/avformat-57.dll differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/avutil-55.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/avutil-55.dll index fecc175..0a8a5ce 100644 Binary files a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/avutil-55.dll and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/avutil-55.dll differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/ffmpeg.exe b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/ffmpeg.exe index 0454cce..2aeff8c 100644 Binary files a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/ffmpeg.exe and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/ffmpeg.exe differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/ffprobe.exe b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/ffprobe.exe index 43ad293..758149f 100644 Binary files a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/ffprobe.exe and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/ffprobe.exe differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/postproc-54.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/postproc-54.dll new file mode 100644 index 0000000..b303309 Binary files /dev/null and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/postproc-54.dll differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/swresample-2.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/swresample-2.dll index 4d8c797..412e994 100644 Binary files a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/swresample-2.dll and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/swresample-2.dll differ diff --git a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/swscale-4.dll b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/swscale-4.dll index 345c91e..d2d1caa 100644 Binary files a/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/swscale-4.dll and b/QuickLook.Plugin/QuickLook.Plugin.VideoViewer/ffmpeg/x86/swscale-4.dll differ