diff --git a/.gitignore b/.gitignore index 6b43162d..e4a35a10 100644 --- a/.gitignore +++ b/.gitignore @@ -106,4 +106,8 @@ UpgradeLog*.XML *.stackdump # Local -/FFmpeg.AutoGen.Example/frame.*.jpg \ No newline at end of file +/FFmpeg.AutoGen.Example/frames + +# JetBrains Rider +.idea/ +*.sln.iml \ No newline at end of file diff --git a/Directory.Build.props b/Directory.Build.props index be59e6ae..3d77312c 100644 --- a/Directory.Build.props +++ b/Directory.Build.props @@ -1,11 +1,11 @@ $(AssemblyName) - 5.1.1 + 5.1.1.1 Ruslan Balanukhin - Rational Core + Rationale One FFmpeg.AutoGen - Copyright © Ruslan Balanukhin 2021 All rights reserved. + Copyright © Ruslan Balanukhin 2022 All rights reserved. https://github.com/Ruslan-B/FFmpeg.AutoGen Git $(Version) @@ -16,7 +16,8 @@ - true + + true true false $(MSBuildThisFileDirectory)FFmpeg.AutoGen.snk diff --git a/FFmpeg.AutoGen.Abstractions/ConstCharPtrMarshaler.cs b/FFmpeg.AutoGen.Abstractions/ConstCharPtrMarshaler.cs new file mode 100644 index 00000000..47bd8e71 --- /dev/null +++ b/FFmpeg.AutoGen.Abstractions/ConstCharPtrMarshaler.cs @@ -0,0 +1,24 @@ +using System; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen.Abstractions; + +public class ConstCharPtrMarshaler : ICustomMarshaler +{ + private static readonly ConstCharPtrMarshaler Instance = new(); + public object MarshalNativeToManaged(IntPtr pNativeData) => Marshal.PtrToStringAnsi(pNativeData); + + public IntPtr MarshalManagedToNative(object managedObj) => IntPtr.Zero; + + public void CleanUpNativeData(IntPtr pNativeData) + { + } + + public void CleanUpManagedData(object managedObj) + { + } + + public int GetNativeDataSize() => IntPtr.Size; + + public static ICustomMarshaler GetInstance(string cookie) => Instance; +} diff --git a/FFmpeg.AutoGen.Abstractions/FFmpeg.AutoGen.Abstractions.csproj b/FFmpeg.AutoGen.Abstractions/FFmpeg.AutoGen.Abstractions.csproj new file mode 100644 index 00000000..236decfa --- /dev/null +++ b/FFmpeg.AutoGen.Abstractions/FFmpeg.AutoGen.Abstractions.csproj @@ -0,0 +1,31 @@ + + + + netstandard2.1;netstandard2.0;net45 + FFmpeg auto generated unsafe bindings for C#/.NET and Mono. Abstractions todo + true + + + + True + 108;169;612;618;1573;1591;1701;1702;1705 + false + + bin\$(Configuration)\$(TargetFramework)\$(AssemblyName).xml + + + + + true + snupkg + + + + + + + + + + + diff --git a/FFmpeg.AutoGen.Abstractions/FFmpeg.cs b/FFmpeg.AutoGen.Abstractions/FFmpeg.cs new file mode 100644 index 00000000..9017ee17 --- /dev/null +++ b/FFmpeg.AutoGen.Abstractions/FFmpeg.cs @@ -0,0 +1,49 @@ +using System; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen.Abstractions; + +public static partial class ffmpeg +{ + public static readonly int EAGAIN; + + public static readonly int ENOMEM = 12; + + public static readonly int EINVAL = 22; + + public static readonly int EPIPE = 32; + + static ffmpeg() + { +#if NET + +#elif NETSTANDARD2_0_OR_GREATER + EAGAIN = RuntimeInformation.IsOSPlatform(OSPlatform.OSX) ? 35 : 11; +#else + EAGAIN = Environment.OSVersion.Platform == PlatformID.MacOSX ? 35 : 11; +#endif + + } + + public static ulong UINT64_C(T a) + => Convert.ToUInt64(a); + + public static int AVERROR(T1 a) + => -Convert.ToInt32(a); + + public static int MKTAG(T1 a, T2 b, T3 c, T4 d) + => (int)(Convert.ToUInt32(a) | (Convert.ToUInt32(b) << 8) | (Convert.ToUInt32(c) << 16) | + (Convert.ToUInt32(d) << 24)); + + public static int FFERRTAG(T1 a, T2 b, T3 c, T4 d) + => -MKTAG(a, b, c, d); + + public static int AV_VERSION_INT(T1 a, T2 b, T3 c) => + (Convert.ToInt32(a) << 16) | (Convert.ToInt32(b) << 8) | Convert.ToInt32(c); + + public static string AV_VERSION_DOT(T1 a, T2 b, T3 c) + => $"{a}.{b}.{c}"; + + public static string AV_VERSION(T1 a, T2 b, T3 c) + => AV_VERSION_DOT(a, b, c); +} diff --git a/FFmpeg.AutoGen.Abstractions/IFixedArray.cs b/FFmpeg.AutoGen.Abstractions/IFixedArray.cs new file mode 100644 index 00000000..8604aaf3 --- /dev/null +++ b/FFmpeg.AutoGen.Abstractions/IFixedArray.cs @@ -0,0 +1,13 @@ +namespace FFmpeg.AutoGen.Abstractions; + +public interface IFixedArray +{ + int Length { get; } +} + +internal interface IFixedArray : IFixedArray +{ + T this[uint index] { get; set; } + T[] ToArray(); + void UpdateFrom(T[] array); +} diff --git a/FFmpeg.AutoGen.Abstractions/UTF8Marshaler.cs b/FFmpeg.AutoGen.Abstractions/UTF8Marshaler.cs new file mode 100644 index 00000000..6b3ebbaf --- /dev/null +++ b/FFmpeg.AutoGen.Abstractions/UTF8Marshaler.cs @@ -0,0 +1,82 @@ +using System; +using System.Runtime.InteropServices; +using System.Text; + +namespace FFmpeg.AutoGen.Abstractions; + +public class UTF8Marshaler : ICustomMarshaler +{ + private static readonly UTF8Marshaler Instance = new(); + + public virtual object MarshalNativeToManaged(IntPtr pNativeData) => FromNative(Encoding.UTF8, pNativeData); + + public virtual IntPtr MarshalManagedToNative(object managedObj) + { + if (managedObj == null) + return IntPtr.Zero; + + if (managedObj is not string str) + throw new MarshalDirectiveException($"{GetType().Name} must be used on a string."); + + return FromManaged(Encoding.UTF8, str); + } + + public virtual void CleanUpNativeData(IntPtr pNativeData) + { + //Free anything allocated by MarshalManagedToNative + //This is called after the native function call completes + + if (pNativeData != IntPtr.Zero) + Marshal.FreeHGlobal(pNativeData); + } + + public void CleanUpManagedData(object managedObj) + { + //Free anything allocated by MarshalNativeToManaged + //This is called after the native function call completes + } + + public int GetNativeDataSize() => -1; // Not a value type + + public static ICustomMarshaler GetInstance(string cookie) => Instance; + + public static unsafe string FromNative(Encoding encoding, IntPtr pNativeData) => FromNative(encoding, (byte*)pNativeData); + + public static unsafe string FromNative(Encoding encoding, byte* pNativeData) + { + if (pNativeData == null) + return null; + + var start = pNativeData; + var walk = start; + + // Find the end of the string + while (*walk != 0) walk++; + + if (walk == start) + return string.Empty; + + return new string((sbyte*)pNativeData, 0, (int)(walk - start), encoding); + } + + public static unsafe IntPtr FromManaged(Encoding encoding, string value) + { + if (encoding == null || value == null) + return IntPtr.Zero; + + var length = encoding.GetByteCount(value); + var buffer = (byte*)Marshal.AllocHGlobal(length + 1).ToPointer(); + + if (length > 0) + { + fixed (char* pValue = value) + { + encoding.GetBytes(pValue, value.Length, buffer, length); + } + } + + buffer[length] = 0; + + return new IntPtr(buffer); + } +} diff --git a/FFmpeg.AutoGen.Abstractions/generated/Arrays.g.cs b/FFmpeg.AutoGen.Abstractions/generated/Arrays.g.cs new file mode 100644 index 00000000..31cd0227 --- /dev/null +++ b/FFmpeg.AutoGen.Abstractions/generated/Arrays.g.cs @@ -0,0 +1,576 @@ +using System; + +namespace FFmpeg.AutoGen.Abstractions; + +public unsafe struct AVRational2 : IFixedArray +{ + public static readonly int ArrayLength = 2; + public int Length => 2; + AVRational _0; AVRational _1; + + public AVRational this[uint i] + { + get { if (i >= 2) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { return *(p0 + i); } } + set { if (i >= 2) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { *(p0 + i) = value; } } + } + public AVRational[] ToArray() + { + fixed (AVRational* p0 = &_0) { var a = new AVRational[2]; for (uint i = 0; i < 2; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(AVRational[] array) + { + fixed (AVRational* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 2) return; } } + } + public static implicit operator AVRational[](AVRational2 @struct) => @struct.ToArray(); +} + +public unsafe struct short2 : IFixedArray +{ + public static readonly int ArrayLength = 2; + public int Length => 2; + fixed short _[2]; + + public short this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public short[] ToArray() + { + var a = new short[2]; for (uint i = 0; i < 2; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(short[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 2) return; } + } + public static implicit operator short[](short2 @struct) => @struct.ToArray(); +} + +public unsafe struct void_ptr2 : IFixedArray +{ + public static readonly int ArrayLength = 2; + public int Length => 2; + void* _0; void* _1; + + public void* this[uint i] + { + get { if (i >= 2) throw new ArgumentOutOfRangeException(); fixed (void** p0 = &_0) { return *(p0 + i); } } + set { if (i >= 2) throw new ArgumentOutOfRangeException(); fixed (void** p0 = &_0) { *(p0 + i) = value; } } + } + public void*[] ToArray() + { + fixed (void** p0 = &_0) { var a = new void*[2]; for (uint i = 0; i < 2; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(void*[] array) + { + fixed (void** p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 2) return; } } + } + public static implicit operator void*[](void_ptr2 @struct) => @struct.ToArray(); +} + +public unsafe struct AVHDRPlusColorTransformParams3 : IFixedArray +{ + public static readonly int ArrayLength = 3; + public int Length => 3; + AVHDRPlusColorTransformParams _0; AVHDRPlusColorTransformParams _1; AVHDRPlusColorTransformParams _2; + + public AVHDRPlusColorTransformParams this[uint i] + { + get { if (i >= 3) throw new ArgumentOutOfRangeException(); fixed (AVHDRPlusColorTransformParams* p0 = &_0) { return *(p0 + i); } } + set { if (i >= 3) throw new ArgumentOutOfRangeException(); fixed (AVHDRPlusColorTransformParams* p0 = &_0) { *(p0 + i) = value; } } + } + public AVHDRPlusColorTransformParams[] ToArray() + { + fixed (AVHDRPlusColorTransformParams* p0 = &_0) { var a = new AVHDRPlusColorTransformParams[3]; for (uint i = 0; i < 3; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(AVHDRPlusColorTransformParams[] array) + { + fixed (AVHDRPlusColorTransformParams* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 3) return; } } + } + public static implicit operator AVHDRPlusColorTransformParams[](AVHDRPlusColorTransformParams3 @struct) => @struct.ToArray(); +} + +public unsafe struct AVRational3 : IFixedArray +{ + public static readonly int ArrayLength = 3; + public int Length => 3; + AVRational _0; AVRational _1; AVRational _2; + + public AVRational this[uint i] + { + get { if (i >= 3) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { return *(p0 + i); } } + set { if (i >= 3) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { *(p0 + i) = value; } } + } + public AVRational[] ToArray() + { + fixed (AVRational* p0 = &_0) { var a = new AVRational[3]; for (uint i = 0; i < 3; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(AVRational[] array) + { + fixed (AVRational* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 3) return; } } + } + public static implicit operator AVRational[](AVRational3 @struct) => @struct.ToArray(); +} + +public unsafe struct AVRational3x2 : IFixedArray +{ + public static readonly int ArrayLength = 3; + public int Length => 3; + AVRational2 _0; AVRational2 _1; AVRational2 _2; + + public AVRational2 this[uint i] + { + get { if (i >= 3) throw new ArgumentOutOfRangeException(); fixed (AVRational2* p0 = &_0) { return *(p0 + i); } } + set { if (i >= 3) throw new ArgumentOutOfRangeException(); fixed (AVRational2* p0 = &_0) { *(p0 + i) = value; } } + } + public AVRational2[] ToArray() + { + fixed (AVRational2* p0 = &_0) { var a = new AVRational2[3]; for (uint i = 0; i < 3; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(AVRational2[] array) + { + fixed (AVRational2* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 3) return; } } + } + public static implicit operator AVRational2[](AVRational3x2 @struct) => @struct.ToArray(); +} + +public unsafe struct byte_ptr3 : IFixedArray +{ + public static readonly int ArrayLength = 3; + public int Length => 3; + byte* _0; byte* _1; byte* _2; + + public byte* this[uint i] + { + get { if (i >= 3) throw new ArgumentOutOfRangeException(); fixed (byte** p0 = &_0) { return *(p0 + i); } } + set { if (i >= 3) throw new ArgumentOutOfRangeException(); fixed (byte** p0 = &_0) { *(p0 + i) = value; } } + } + public byte*[] ToArray() + { + fixed (byte** p0 = &_0) { var a = new byte*[3]; for (uint i = 0; i < 3; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(byte*[] array) + { + fixed (byte** p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 3) return; } } + } + public static implicit operator byte*[](byte_ptr3 @struct) => @struct.ToArray(); +} + +public unsafe struct int3 : IFixedArray +{ + public static readonly int ArrayLength = 3; + public int Length => 3; + fixed int _[3]; + + public int this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public int[] ToArray() + { + var a = new int[3]; for (uint i = 0; i < 3; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(int[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 3) return; } + } + public static implicit operator int[](int3 @struct) => @struct.ToArray(); +} + +public unsafe struct short3x2 : IFixedArray +{ + public static readonly int ArrayLength = 3; + public int Length => 3; + short2 _0; short2 _1; short2 _2; + + public short2 this[uint i] + { + get { if (i >= 3) throw new ArgumentOutOfRangeException(); fixed (short2* p0 = &_0) { return *(p0 + i); } } + set { if (i >= 3) throw new ArgumentOutOfRangeException(); fixed (short2* p0 = &_0) { *(p0 + i) = value; } } + } + public short2[] ToArray() + { + fixed (short2* p0 = &_0) { var a = new short2[3]; for (uint i = 0; i < 3; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(short2[] array) + { + fixed (short2* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 3) return; } } + } + public static implicit operator short2[](short3x2 @struct) => @struct.ToArray(); +} + +public unsafe struct AVComponentDescriptor4 : IFixedArray +{ + public static readonly int ArrayLength = 4; + public int Length => 4; + AVComponentDescriptor _0; AVComponentDescriptor _1; AVComponentDescriptor _2; AVComponentDescriptor _3; + + public AVComponentDescriptor this[uint i] + { + get { if (i >= 4) throw new ArgumentOutOfRangeException(); fixed (AVComponentDescriptor* p0 = &_0) { return *(p0 + i); } } + set { if (i >= 4) throw new ArgumentOutOfRangeException(); fixed (AVComponentDescriptor* p0 = &_0) { *(p0 + i) = value; } } + } + public AVComponentDescriptor[] ToArray() + { + fixed (AVComponentDescriptor* p0 = &_0) { var a = new AVComponentDescriptor[4]; for (uint i = 0; i < 4; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(AVComponentDescriptor[] array) + { + fixed (AVComponentDescriptor* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 4) return; } } + } + public static implicit operator AVComponentDescriptor[](AVComponentDescriptor4 @struct) => @struct.ToArray(); +} + +public unsafe struct byte_ptr4 : IFixedArray +{ + public static readonly int ArrayLength = 4; + public int Length => 4; + byte* _0; byte* _1; byte* _2; byte* _3; + + public byte* this[uint i] + { + get { if (i >= 4) throw new ArgumentOutOfRangeException(); fixed (byte** p0 = &_0) { return *(p0 + i); } } + set { if (i >= 4) throw new ArgumentOutOfRangeException(); fixed (byte** p0 = &_0) { *(p0 + i) = value; } } + } + public byte*[] ToArray() + { + fixed (byte** p0 = &_0) { var a = new byte*[4]; for (uint i = 0; i < 4; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(byte*[] array) + { + fixed (byte** p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 4) return; } } + } + public static implicit operator byte*[](byte_ptr4 @struct) => @struct.ToArray(); +} + +public unsafe struct int4 : IFixedArray +{ + public static readonly int ArrayLength = 4; + public int Length => 4; + fixed int _[4]; + + public int this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public int[] ToArray() + { + var a = new int[4]; for (uint i = 0; i < 4; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(int[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 4) return; } + } + public static implicit operator int[](int4 @struct) => @struct.ToArray(); +} + +public unsafe struct long4 : IFixedArray +{ + public static readonly int ArrayLength = 4; + public int Length => 4; + fixed long _[4]; + + public long this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public long[] ToArray() + { + var a = new long[4]; for (uint i = 0; i < 4; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(long[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 4) return; } + } + public static implicit operator long[](long4 @struct) => @struct.ToArray(); +} + +public unsafe struct ulong4 : IFixedArray +{ + public static readonly int ArrayLength = 4; + public int Length => 4; + fixed ulong _[4]; + + public ulong this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public ulong[] ToArray() + { + var a = new ulong[4]; for (uint i = 0; i < 4; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(ulong[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 4) return; } + } + public static implicit operator ulong[](ulong4 @struct) => @struct.ToArray(); +} + +public unsafe struct int7 : IFixedArray +{ + public static readonly int ArrayLength = 7; + public int Length => 7; + fixed int _[7]; + + public int this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public int[] ToArray() + { + var a = new int[7]; for (uint i = 0; i < 7; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(int[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 7) return; } + } + public static implicit operator int[](int7 @struct) => @struct.ToArray(); +} + +public unsafe struct AVBufferRef_ptr8 : IFixedArray +{ + public static readonly int ArrayLength = 8; + public int Length => 8; + AVBufferRef* _0; AVBufferRef* _1; AVBufferRef* _2; AVBufferRef* _3; AVBufferRef* _4; AVBufferRef* _5; AVBufferRef* _6; AVBufferRef* _7; + + public AVBufferRef* this[uint i] + { + get { if (i >= 8) throw new ArgumentOutOfRangeException(); fixed (AVBufferRef** p0 = &_0) { return *(p0 + i); } } + set { if (i >= 8) throw new ArgumentOutOfRangeException(); fixed (AVBufferRef** p0 = &_0) { *(p0 + i) = value; } } + } + public AVBufferRef*[] ToArray() + { + fixed (AVBufferRef** p0 = &_0) { var a = new AVBufferRef*[8]; for (uint i = 0; i < 8; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(AVBufferRef*[] array) + { + fixed (AVBufferRef** p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 8) return; } } + } + public static implicit operator AVBufferRef*[](AVBufferRef_ptr8 @struct) => @struct.ToArray(); +} + +public unsafe struct byte_ptr8 : IFixedArray +{ + public static readonly int ArrayLength = 8; + public int Length => 8; + byte* _0; byte* _1; byte* _2; byte* _3; byte* _4; byte* _5; byte* _6; byte* _7; + + public byte* this[uint i] + { + get { if (i >= 8) throw new ArgumentOutOfRangeException(); fixed (byte** p0 = &_0) { return *(p0 + i); } } + set { if (i >= 8) throw new ArgumentOutOfRangeException(); fixed (byte** p0 = &_0) { *(p0 + i) = value; } } + } + public byte*[] ToArray() + { + fixed (byte** p0 = &_0) { var a = new byte*[8]; for (uint i = 0; i < 8; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(byte*[] array) + { + fixed (byte** p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 8) return; } } + } + public static implicit operator byte*[](byte_ptr8 @struct) => @struct.ToArray(); +} + +public unsafe struct byte8 : IFixedArray +{ + public static readonly int ArrayLength = 8; + public int Length => 8; + fixed byte _[8]; + + public byte this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public byte[] ToArray() + { + var a = new byte[8]; for (uint i = 0; i < 8; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(byte[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 8) return; } + } + public static implicit operator byte[](byte8 @struct) => @struct.ToArray(); +} + +public unsafe struct int8 : IFixedArray +{ + public static readonly int ArrayLength = 8; + public int Length => 8; + fixed int _[8]; + + public int this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public int[] ToArray() + { + var a = new int[8]; for (uint i = 0; i < 8; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(int[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 8) return; } + } + public static implicit operator int[](int8 @struct) => @struct.ToArray(); +} + +public unsafe struct ulong8 : IFixedArray +{ + public static readonly int ArrayLength = 8; + public int Length => 8; + fixed ulong _[8]; + + public ulong this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public ulong[] ToArray() + { + var a = new ulong[8]; for (uint i = 0; i < 8; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(ulong[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 8) return; } + } + public static implicit operator ulong[](ulong8 @struct) => @struct.ToArray(); +} + +public unsafe struct AVHDRPlusPercentile15 : IFixedArray +{ + public static readonly int ArrayLength = 15; + public int Length => 15; + AVHDRPlusPercentile _0; AVHDRPlusPercentile _1; AVHDRPlusPercentile _2; AVHDRPlusPercentile _3; AVHDRPlusPercentile _4; AVHDRPlusPercentile _5; AVHDRPlusPercentile _6; AVHDRPlusPercentile _7; AVHDRPlusPercentile _8; AVHDRPlusPercentile _9; AVHDRPlusPercentile _10; AVHDRPlusPercentile _11; AVHDRPlusPercentile _12; AVHDRPlusPercentile _13; AVHDRPlusPercentile _14; + + public AVHDRPlusPercentile this[uint i] + { + get { if (i >= 15) throw new ArgumentOutOfRangeException(); fixed (AVHDRPlusPercentile* p0 = &_0) { return *(p0 + i); } } + set { if (i >= 15) throw new ArgumentOutOfRangeException(); fixed (AVHDRPlusPercentile* p0 = &_0) { *(p0 + i) = value; } } + } + public AVHDRPlusPercentile[] ToArray() + { + fixed (AVHDRPlusPercentile* p0 = &_0) { var a = new AVHDRPlusPercentile[15]; for (uint i = 0; i < 15; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(AVHDRPlusPercentile[] array) + { + fixed (AVHDRPlusPercentile* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 15) return; } } + } + public static implicit operator AVHDRPlusPercentile[](AVHDRPlusPercentile15 @struct) => @struct.ToArray(); +} + +public unsafe struct AVRational15 : IFixedArray +{ + public static readonly int ArrayLength = 15; + public int Length => 15; + AVRational _0; AVRational _1; AVRational _2; AVRational _3; AVRational _4; AVRational _5; AVRational _6; AVRational _7; AVRational _8; AVRational _9; AVRational _10; AVRational _11; AVRational _12; AVRational _13; AVRational _14; + + public AVRational this[uint i] + { + get { if (i >= 15) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { return *(p0 + i); } } + set { if (i >= 15) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { *(p0 + i) = value; } } + } + public AVRational[] ToArray() + { + fixed (AVRational* p0 = &_0) { var a = new AVRational[15]; for (uint i = 0; i < 15; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(AVRational[] array) + { + fixed (AVRational* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 15) return; } } + } + public static implicit operator AVRational[](AVRational15 @struct) => @struct.ToArray(); +} + +public unsafe struct byte16 : IFixedArray +{ + public static readonly int ArrayLength = 16; + public int Length => 16; + fixed byte _[16]; + + public byte this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public byte[] ToArray() + { + var a = new byte[16]; for (uint i = 0; i < 16; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(byte[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 16) return; } + } + public static implicit operator byte[](byte16 @struct) => @struct.ToArray(); +} + +public unsafe struct AVRational25 : IFixedArray +{ + public static readonly int ArrayLength = 25; + public int Length => 25; + AVRational _0; AVRational _1; AVRational _2; AVRational _3; AVRational _4; AVRational _5; AVRational _6; AVRational _7; AVRational _8; AVRational _9; AVRational _10; AVRational _11; AVRational _12; AVRational _13; AVRational _14; AVRational _15; AVRational _16; AVRational _17; AVRational _18; AVRational _19; AVRational _20; AVRational _21; AVRational _22; AVRational _23; AVRational _24; + + public AVRational this[uint i] + { + get { if (i >= 25) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { return *(p0 + i); } } + set { if (i >= 25) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { *(p0 + i) = value; } } + } + public AVRational[] ToArray() + { + fixed (AVRational* p0 = &_0) { var a = new AVRational[25]; for (uint i = 0; i < 25; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(AVRational[] array) + { + fixed (AVRational* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 25) return; } } + } + public static implicit operator AVRational[](AVRational25 @struct) => @struct.ToArray(); +} + +public unsafe struct AVRational25x25 : IFixedArray +{ + public static readonly int ArrayLength = 25; + public int Length => 25; + AVRational25 _0; AVRational25 _1; AVRational25 _2; AVRational25 _3; AVRational25 _4; AVRational25 _5; AVRational25 _6; AVRational25 _7; AVRational25 _8; AVRational25 _9; AVRational25 _10; AVRational25 _11; AVRational25 _12; AVRational25 _13; AVRational25 _14; AVRational25 _15; AVRational25 _16; AVRational25 _17; AVRational25 _18; AVRational25 _19; AVRational25 _20; AVRational25 _21; AVRational25 _22; AVRational25 _23; AVRational25 _24; + + public AVRational25 this[uint i] + { + get { if (i >= 25) throw new ArgumentOutOfRangeException(); fixed (AVRational25* p0 = &_0) { return *(p0 + i); } } + set { if (i >= 25) throw new ArgumentOutOfRangeException(); fixed (AVRational25* p0 = &_0) { *(p0 + i) = value; } } + } + public AVRational25[] ToArray() + { + fixed (AVRational25* p0 = &_0) { var a = new AVRational25[25]; for (uint i = 0; i < 25; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(AVRational25[] array) + { + fixed (AVRational25* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 25) return; } } + } + public static implicit operator AVRational25[](AVRational25x25 @struct) => @struct.ToArray(); +} + +public unsafe struct byte61440 : IFixedArray +{ + public static readonly int ArrayLength = 61440; + public int Length => 61440; + fixed byte _[61440]; + + public byte this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public byte[] ToArray() + { + var a = new byte[61440]; for (uint i = 0; i < 61440; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(byte[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 61440) return; } + } + public static implicit operator byte[](byte61440 @struct) => @struct.ToArray(); +} + diff --git a/FFmpeg.AutoGen.Abstractions/generated/Delegates.g.cs b/FFmpeg.AutoGen.Abstractions/generated/Delegates.g.cs new file mode 100644 index 00000000..1e63bb6b --- /dev/null +++ b/FFmpeg.AutoGen.Abstractions/generated/Delegates.g.cs @@ -0,0 +1,707 @@ +using System; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen.Abstractions; + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int _query_func (AVFilterContext* @p0); +public unsafe struct _query_func_func +{ + public IntPtr Pointer; + public static implicit operator _query_func_func(_query_func func) => new _query_func_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void av_buffer_create_free (void* @opaque, byte* @data); +public unsafe struct av_buffer_create_free_func +{ + public IntPtr Pointer; + public static implicit operator av_buffer_create_free_func(av_buffer_create_free func) => new av_buffer_create_free_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate AVBufferRef* av_buffer_pool_init_alloc (ulong @size); +public unsafe struct av_buffer_pool_init_alloc_func +{ + public IntPtr Pointer; + public static implicit operator av_buffer_pool_init_alloc_func(av_buffer_pool_init_alloc func) => new av_buffer_pool_init_alloc_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate AVBufferRef* av_buffer_pool_init2_alloc (void* @opaque, ulong @size); +public unsafe struct av_buffer_pool_init2_alloc_func +{ + public IntPtr Pointer; + public static implicit operator av_buffer_pool_init2_alloc_func(av_buffer_pool_init2_alloc func) => new av_buffer_pool_init2_alloc_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void av_buffer_pool_init2_pool_free (void* @opaque); +public unsafe struct av_buffer_pool_init2_pool_free_func +{ + public IntPtr Pointer; + public static implicit operator av_buffer_pool_init2_pool_free_func(av_buffer_pool_init2_pool_free func) => new av_buffer_pool_init2_pool_free_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void av_log_set_callback_callback (void* @p0, int @p1, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @p2, byte* @p3); +public unsafe struct av_log_set_callback_callback_func +{ + public IntPtr Pointer; + public static implicit operator av_log_set_callback_callback_func(av_log_set_callback_callback func) => new av_log_set_callback_callback_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int av_tree_enumerate_cmp (void* @opaque, void* @elem); +public unsafe struct av_tree_enumerate_cmp_func +{ + public IntPtr Pointer; + public static implicit operator av_tree_enumerate_cmp_func(av_tree_enumerate_cmp func) => new av_tree_enumerate_cmp_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int av_tree_enumerate_enu (void* @opaque, void* @elem); +public unsafe struct av_tree_enumerate_enu_func +{ + public IntPtr Pointer; + public static implicit operator av_tree_enumerate_enu_func(av_tree_enumerate_enu func) => new av_tree_enumerate_enu_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int av_tree_find_cmp (void* @key, void* @b); +public unsafe struct av_tree_find_cmp_func +{ + public IntPtr Pointer; + public static implicit operator av_tree_find_cmp_func(av_tree_find_cmp func) => new av_tree_find_cmp_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int av_tree_insert_cmp (void* @key, void* @b); +public unsafe struct av_tree_insert_cmp_func +{ + public IntPtr Pointer; + public static implicit operator av_tree_insert_cmp_func(av_tree_insert_cmp func) => new av_tree_insert_cmp_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate AVClass* AVClass_child_class_iterate (void** @iter); +public unsafe struct AVClass_child_class_iterate_func +{ + public IntPtr Pointer; + public static implicit operator AVClass_child_class_iterate_func(AVClass_child_class_iterate func) => new AVClass_child_class_iterate_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void* AVClass_child_next (void* @obj, void* @prev); +public unsafe struct AVClass_child_next_func +{ + public IntPtr Pointer; + public static implicit operator AVClass_child_next_func(AVClass_child_next func) => new AVClass_child_next_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate AVClassCategory AVClass_get_category (void* @ctx); +public unsafe struct AVClass_get_category_func +{ + public IntPtr Pointer; + public static implicit operator AVClass_get_category_func(AVClass_get_category func) => new AVClass_get_category_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate string AVClass_item_name (void* @ctx); +public unsafe struct AVClass_item_name_func +{ + public IntPtr Pointer; + public static implicit operator AVClass_item_name_func(AVClass_item_name func) => new AVClass_item_name_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVClass_query_ranges (AVOptionRanges** @p0, void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, int @flags); +public unsafe struct AVClass_query_ranges_func +{ + public IntPtr Pointer; + public static implicit operator AVClass_query_ranges_func(AVClass_query_ranges func) => new AVClass_query_ranges_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int avcodec_default_execute_func (AVCodecContext* @c2, void* @arg2); +public unsafe struct avcodec_default_execute_func_func +{ + public IntPtr Pointer; + public static implicit operator avcodec_default_execute_func_func(avcodec_default_execute_func func) => new avcodec_default_execute_func_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int avcodec_default_execute2_func (AVCodecContext* @c2, void* @arg2, int @p2, int @p3); +public unsafe struct avcodec_default_execute2_func_func +{ + public IntPtr Pointer; + public static implicit operator avcodec_default_execute2_func_func(avcodec_default_execute2_func func) => new avcodec_default_execute2_func_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void AVCodecContext_draw_horiz_band (AVCodecContext* @s, AVFrame* @src, ref int8 @offset, int @y, int @type, int @height); +public unsafe struct AVCodecContext_draw_horiz_band_func +{ + public IntPtr Pointer; + public static implicit operator AVCodecContext_draw_horiz_band_func(AVCodecContext_draw_horiz_band func) => new AVCodecContext_draw_horiz_band_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVCodecContext_execute (AVCodecContext* @c, func_func @func, void* @arg2, int* @ret, int @count, int @size); +public unsafe struct AVCodecContext_execute_func +{ + public IntPtr Pointer; + public static implicit operator AVCodecContext_execute_func(AVCodecContext_execute func) => new AVCodecContext_execute_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVCodecContext_execute2 (AVCodecContext* @c, func_func @func, void* @arg2, int* @ret, int @count); +public unsafe struct AVCodecContext_execute2_func +{ + public IntPtr Pointer; + public static implicit operator AVCodecContext_execute2_func(AVCodecContext_execute2 func) => new AVCodecContext_execute2_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVCodecContext_get_buffer2 (AVCodecContext* @s, AVFrame* @frame, int @flags); +public unsafe struct AVCodecContext_get_buffer2_func +{ + public IntPtr Pointer; + public static implicit operator AVCodecContext_get_buffer2_func(AVCodecContext_get_buffer2 func) => new AVCodecContext_get_buffer2_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVCodecContext_get_encode_buffer (AVCodecContext* @s, AVPacket* @pkt, int @flags); +public unsafe struct AVCodecContext_get_encode_buffer_func +{ + public IntPtr Pointer; + public static implicit operator AVCodecContext_get_encode_buffer_func(AVCodecContext_get_encode_buffer func) => new AVCodecContext_get_encode_buffer_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate AVPixelFormat AVCodecContext_get_format (AVCodecContext* @s, AVPixelFormat* @fmt); +public unsafe struct AVCodecContext_get_format_func +{ + public IntPtr Pointer; + public static implicit operator AVCodecContext_get_format_func(AVCodecContext_get_format func) => new AVCodecContext_get_format_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void AVCodecParser_parser_close (AVCodecParserContext* @s); +public unsafe struct AVCodecParser_parser_close_func +{ + public IntPtr Pointer; + public static implicit operator AVCodecParser_parser_close_func(AVCodecParser_parser_close func) => new AVCodecParser_parser_close_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVCodecParser_parser_init (AVCodecParserContext* @s); +public unsafe struct AVCodecParser_parser_init_func +{ + public IntPtr Pointer; + public static implicit operator AVCodecParser_parser_init_func(AVCodecParser_parser_init func) => new AVCodecParser_parser_init_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVCodecParser_parser_parse (AVCodecParserContext* @s, AVCodecContext* @avctx, byte** @poutbuf, int* @poutbuf_size, byte* @buf, int @buf_size); +public unsafe struct AVCodecParser_parser_parse_func +{ + public IntPtr Pointer; + public static implicit operator AVCodecParser_parser_parse_func(AVCodecParser_parser_parse func) => new AVCodecParser_parser_parse_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVCodecParser_split (AVCodecContext* @avctx, byte* @buf, int @buf_size); +public unsafe struct AVCodecParser_split_func +{ + public IntPtr Pointer; + public static implicit operator AVCodecParser_split_func(AVCodecParser_split func) => new AVCodecParser_split_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void AVD3D11VADeviceContext_lock (void* @lock_ctx); +public unsafe struct AVD3D11VADeviceContext_lock_func +{ + public IntPtr Pointer; + public static implicit operator AVD3D11VADeviceContext_lock_func(AVD3D11VADeviceContext_lock func) => new AVD3D11VADeviceContext_lock_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void AVD3D11VADeviceContext_unlock (void* @lock_ctx); +public unsafe struct AVD3D11VADeviceContext_unlock_func +{ + public IntPtr Pointer; + public static implicit operator AVD3D11VADeviceContext_unlock_func(AVD3D11VADeviceContext_unlock func) => new AVD3D11VADeviceContext_unlock_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVFilter_activate (AVFilterContext* @ctx); +public unsafe struct AVFilter_activate_func +{ + public IntPtr Pointer; + public static implicit operator AVFilter_activate_func(AVFilter_activate func) => new AVFilter_activate_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVFilter_init_dict (AVFilterContext* @ctx, AVDictionary** @options); +public unsafe struct AVFilter_init_dict_func +{ + public IntPtr Pointer; + public static implicit operator AVFilter_init_dict_func(AVFilter_init_dict func) => new AVFilter_init_dict_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVFilter_init (AVFilterContext* @ctx); +public unsafe struct AVFilter_init_func +{ + public IntPtr Pointer; + public static implicit operator AVFilter_init_func(AVFilter_init func) => new AVFilter_init_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVFilter_preinit (AVFilterContext* @ctx); +public unsafe struct AVFilter_preinit_func +{ + public IntPtr Pointer; + public static implicit operator AVFilter_preinit_func(AVFilter_preinit func) => new AVFilter_preinit_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVFilter_process_command (AVFilterContext* @p0, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @cmd, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @arg, byte* @res, int @res_len, int @flags); +public unsafe struct AVFilter_process_command_func +{ + public IntPtr Pointer; + public static implicit operator AVFilter_process_command_func(AVFilter_process_command func) => new AVFilter_process_command_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void AVFilter_uninit (AVFilterContext* @ctx); +public unsafe struct AVFilter_uninit_func +{ + public IntPtr Pointer; + public static implicit operator AVFilter_uninit_func(AVFilter_uninit func) => new AVFilter_uninit_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVFilterGraph_execute (AVFilterContext* @ctx, func_func @func, void* @arg, int* @ret, int @nb_jobs); +public unsafe struct AVFilterGraph_execute_func +{ + public IntPtr Pointer; + public static implicit operator AVFilterGraph_execute_func(AVFilterGraph_execute func) => new AVFilterGraph_execute_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVFormatContext_control_message_cb (AVFormatContext* @s, int @type, void* @data, ulong @data_size); +public unsafe struct AVFormatContext_control_message_cb_func +{ + public IntPtr Pointer; + public static implicit operator AVFormatContext_control_message_cb_func(AVFormatContext_control_message_cb func) => new AVFormatContext_control_message_cb_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void AVFormatContext_io_close (AVFormatContext* @s, AVIOContext* @pb); +public unsafe struct AVFormatContext_io_close_func +{ + public IntPtr Pointer; + public static implicit operator AVFormatContext_io_close_func(AVFormatContext_io_close func) => new AVFormatContext_io_close_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVFormatContext_io_close2 (AVFormatContext* @s, AVIOContext* @pb); +public unsafe struct AVFormatContext_io_close2_func +{ + public IntPtr Pointer; + public static implicit operator AVFormatContext_io_close2_func(AVFormatContext_io_close2 func) => new AVFormatContext_io_close2_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVFormatContext_io_open (AVFormatContext* @s, AVIOContext** @pb, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, int @flags, AVDictionary** @options); +public unsafe struct AVFormatContext_io_open_func +{ + public IntPtr Pointer; + public static implicit operator AVFormatContext_io_open_func(AVFormatContext_io_open func) => new AVFormatContext_io_open_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVHWAccel_alloc_frame (AVCodecContext* @avctx, AVFrame* @frame); +public unsafe struct AVHWAccel_alloc_frame_func +{ + public IntPtr Pointer; + public static implicit operator AVHWAccel_alloc_frame_func(AVHWAccel_alloc_frame func) => new AVHWAccel_alloc_frame_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVHWAccel_decode_params (AVCodecContext* @avctx, int @type, byte* @buf, uint @buf_size); +public unsafe struct AVHWAccel_decode_params_func +{ + public IntPtr Pointer; + public static implicit operator AVHWAccel_decode_params_func(AVHWAccel_decode_params func) => new AVHWAccel_decode_params_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVHWAccel_decode_slice (AVCodecContext* @avctx, byte* @buf, uint @buf_size); +public unsafe struct AVHWAccel_decode_slice_func +{ + public IntPtr Pointer; + public static implicit operator AVHWAccel_decode_slice_func(AVHWAccel_decode_slice func) => new AVHWAccel_decode_slice_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVHWAccel_end_frame (AVCodecContext* @avctx); +public unsafe struct AVHWAccel_end_frame_func +{ + public IntPtr Pointer; + public static implicit operator AVHWAccel_end_frame_func(AVHWAccel_end_frame func) => new AVHWAccel_end_frame_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVHWAccel_frame_params (AVCodecContext* @avctx, AVBufferRef* @hw_frames_ctx); +public unsafe struct AVHWAccel_frame_params_func +{ + public IntPtr Pointer; + public static implicit operator AVHWAccel_frame_params_func(AVHWAccel_frame_params func) => new AVHWAccel_frame_params_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVHWAccel_init (AVCodecContext* @avctx); +public unsafe struct AVHWAccel_init_func +{ + public IntPtr Pointer; + public static implicit operator AVHWAccel_init_func(AVHWAccel_init func) => new AVHWAccel_init_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVHWAccel_start_frame (AVCodecContext* @avctx, byte* @buf, uint @buf_size); +public unsafe struct AVHWAccel_start_frame_func +{ + public IntPtr Pointer; + public static implicit operator AVHWAccel_start_frame_func(AVHWAccel_start_frame func) => new AVHWAccel_start_frame_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVHWAccel_uninit (AVCodecContext* @avctx); +public unsafe struct AVHWAccel_uninit_func +{ + public IntPtr Pointer; + public static implicit operator AVHWAccel_uninit_func(AVHWAccel_uninit func) => new AVHWAccel_uninit_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void AVHWDeviceContext_free (AVHWDeviceContext* @ctx); +public unsafe struct AVHWDeviceContext_free_func +{ + public IntPtr Pointer; + public static implicit operator AVHWDeviceContext_free_func(AVHWDeviceContext_free func) => new AVHWDeviceContext_free_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void AVHWFramesContext_free (AVHWFramesContext* @ctx); +public unsafe struct AVHWFramesContext_free_func +{ + public IntPtr Pointer; + public static implicit operator AVHWFramesContext_free_func(AVHWFramesContext_free func) => new AVHWFramesContext_free_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVInputFormat_get_device_list (AVFormatContext* @s, AVDeviceInfoList* @device_list); +public unsafe struct AVInputFormat_get_device_list_func +{ + public IntPtr Pointer; + public static implicit operator AVInputFormat_get_device_list_func(AVInputFormat_get_device_list func) => new AVInputFormat_get_device_list_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVInputFormat_read_close (AVFormatContext* @p0); +public unsafe struct AVInputFormat_read_close_func +{ + public IntPtr Pointer; + public static implicit operator AVInputFormat_read_close_func(AVInputFormat_read_close func) => new AVInputFormat_read_close_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVInputFormat_read_header (AVFormatContext* @p0); +public unsafe struct AVInputFormat_read_header_func +{ + public IntPtr Pointer; + public static implicit operator AVInputFormat_read_header_func(AVInputFormat_read_header func) => new AVInputFormat_read_header_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVInputFormat_read_packet (AVFormatContext* @p0, AVPacket* @pkt); +public unsafe struct AVInputFormat_read_packet_func +{ + public IntPtr Pointer; + public static implicit operator AVInputFormat_read_packet_func(AVInputFormat_read_packet func) => new AVInputFormat_read_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVInputFormat_read_pause (AVFormatContext* @p0); +public unsafe struct AVInputFormat_read_pause_func +{ + public IntPtr Pointer; + public static implicit operator AVInputFormat_read_pause_func(AVInputFormat_read_pause func) => new AVInputFormat_read_pause_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVInputFormat_read_play (AVFormatContext* @p0); +public unsafe struct AVInputFormat_read_play_func +{ + public IntPtr Pointer; + public static implicit operator AVInputFormat_read_play_func(AVInputFormat_read_play func) => new AVInputFormat_read_play_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVInputFormat_read_probe (AVProbeData* @p0); +public unsafe struct AVInputFormat_read_probe_func +{ + public IntPtr Pointer; + public static implicit operator AVInputFormat_read_probe_func(AVInputFormat_read_probe func) => new AVInputFormat_read_probe_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVInputFormat_read_seek (AVFormatContext* @p0, int @stream_index, long @timestamp, int @flags); +public unsafe struct AVInputFormat_read_seek_func +{ + public IntPtr Pointer; + public static implicit operator AVInputFormat_read_seek_func(AVInputFormat_read_seek func) => new AVInputFormat_read_seek_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVInputFormat_read_seek2 (AVFormatContext* @s, int @stream_index, long @min_ts, long @ts, long @max_ts, int @flags); +public unsafe struct AVInputFormat_read_seek2_func +{ + public IntPtr Pointer; + public static implicit operator AVInputFormat_read_seek2_func(AVInputFormat_read_seek2 func) => new AVInputFormat_read_seek2_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate long AVInputFormat_read_timestamp (AVFormatContext* @s, int @stream_index, long* @pos, long @pos_limit); +public unsafe struct AVInputFormat_read_timestamp_func +{ + public IntPtr Pointer; + public static implicit operator AVInputFormat_read_timestamp_func(AVInputFormat_read_timestamp func) => new AVInputFormat_read_timestamp_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int avio_alloc_context_read_packet (void* @opaque, byte* @buf, int @buf_size); +public unsafe struct avio_alloc_context_read_packet_func +{ + public IntPtr Pointer; + public static implicit operator avio_alloc_context_read_packet_func(avio_alloc_context_read_packet func) => new avio_alloc_context_read_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate long avio_alloc_context_seek (void* @opaque, long @offset, int @whence); +public unsafe struct avio_alloc_context_seek_func +{ + public IntPtr Pointer; + public static implicit operator avio_alloc_context_seek_func(avio_alloc_context_seek func) => new avio_alloc_context_seek_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int avio_alloc_context_write_packet (void* @opaque, byte* @buf, int @buf_size); +public unsafe struct avio_alloc_context_write_packet_func +{ + public IntPtr Pointer; + public static implicit operator avio_alloc_context_write_packet_func(avio_alloc_context_write_packet func) => new avio_alloc_context_write_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVIOContext_read_packet (void* @opaque, byte* @buf, int @buf_size); +public unsafe struct AVIOContext_read_packet_func +{ + public IntPtr Pointer; + public static implicit operator AVIOContext_read_packet_func(AVIOContext_read_packet func) => new AVIOContext_read_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVIOContext_read_pause (void* @opaque, int @pause); +public unsafe struct AVIOContext_read_pause_func +{ + public IntPtr Pointer; + public static implicit operator AVIOContext_read_pause_func(AVIOContext_read_pause func) => new AVIOContext_read_pause_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate long AVIOContext_read_seek (void* @opaque, int @stream_index, long @timestamp, int @flags); +public unsafe struct AVIOContext_read_seek_func +{ + public IntPtr Pointer; + public static implicit operator AVIOContext_read_seek_func(AVIOContext_read_seek func) => new AVIOContext_read_seek_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate long AVIOContext_seek (void* @opaque, long @offset, int @whence); +public unsafe struct AVIOContext_seek_func +{ + public IntPtr Pointer; + public static implicit operator AVIOContext_seek_func(AVIOContext_seek func) => new AVIOContext_seek_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate ulong AVIOContext_update_checksum (ulong @checksum, byte* @buf, uint @size); +public unsafe struct AVIOContext_update_checksum_func +{ + public IntPtr Pointer; + public static implicit operator AVIOContext_update_checksum_func(AVIOContext_update_checksum func) => new AVIOContext_update_checksum_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVIOContext_write_data_type (void* @opaque, byte* @buf, int @buf_size, AVIODataMarkerType @type, long @time); +public unsafe struct AVIOContext_write_data_type_func +{ + public IntPtr Pointer; + public static implicit operator AVIOContext_write_data_type_func(AVIOContext_write_data_type func) => new AVIOContext_write_data_type_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVIOContext_write_packet (void* @opaque, byte* @buf, int @buf_size); +public unsafe struct AVIOContext_write_packet_func +{ + public IntPtr Pointer; + public static implicit operator AVIOContext_write_packet_func(AVIOContext_write_packet func) => new AVIOContext_write_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVIOInterruptCB_callback (void* @p0); +public unsafe struct AVIOInterruptCB_callback_func +{ + public IntPtr Pointer; + public static implicit operator AVIOInterruptCB_callback_func(AVIOInterruptCB_callback func) => new AVIOInterruptCB_callback_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVOutputFormat_check_bitstream (AVFormatContext* @s, AVStream* @st, AVPacket* @pkt); +public unsafe struct AVOutputFormat_check_bitstream_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_check_bitstream_func(AVOutputFormat_check_bitstream func) => new AVOutputFormat_check_bitstream_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVOutputFormat_control_message (AVFormatContext* @s, int @type, void* @data, ulong @data_size); +public unsafe struct AVOutputFormat_control_message_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_control_message_func(AVOutputFormat_control_message func) => new AVOutputFormat_control_message_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void AVOutputFormat_deinit (AVFormatContext* @p0); +public unsafe struct AVOutputFormat_deinit_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_deinit_func(AVOutputFormat_deinit func) => new AVOutputFormat_deinit_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVOutputFormat_get_device_list (AVFormatContext* @s, AVDeviceInfoList* @device_list); +public unsafe struct AVOutputFormat_get_device_list_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_get_device_list_func(AVOutputFormat_get_device_list func) => new AVOutputFormat_get_device_list_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void AVOutputFormat_get_output_timestamp (AVFormatContext* @s, int @stream, long* @dts, long* @wall); +public unsafe struct AVOutputFormat_get_output_timestamp_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_get_output_timestamp_func(AVOutputFormat_get_output_timestamp func) => new AVOutputFormat_get_output_timestamp_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVOutputFormat_init (AVFormatContext* @p0); +public unsafe struct AVOutputFormat_init_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_init_func(AVOutputFormat_init func) => new AVOutputFormat_init_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVOutputFormat_interleave_packet (AVFormatContext* @s, AVPacket* @pkt, int @flush, int @has_packet); +public unsafe struct AVOutputFormat_interleave_packet_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_interleave_packet_func(AVOutputFormat_interleave_packet func) => new AVOutputFormat_interleave_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVOutputFormat_query_codec (AVCodecID @id, int @std_compliance); +public unsafe struct AVOutputFormat_query_codec_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_query_codec_func(AVOutputFormat_query_codec func) => new AVOutputFormat_query_codec_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVOutputFormat_write_header (AVFormatContext* @p0); +public unsafe struct AVOutputFormat_write_header_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_write_header_func(AVOutputFormat_write_header func) => new AVOutputFormat_write_header_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVOutputFormat_write_packet (AVFormatContext* @p0, AVPacket* @pkt); +public unsafe struct AVOutputFormat_write_packet_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_write_packet_func(AVOutputFormat_write_packet func) => new AVOutputFormat_write_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVOutputFormat_write_trailer (AVFormatContext* @p0); +public unsafe struct AVOutputFormat_write_trailer_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_write_trailer_func(AVOutputFormat_write_trailer func) => new AVOutputFormat_write_trailer_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVOutputFormat_write_uncoded_frame (AVFormatContext* @p0, int @stream_index, AVFrame** @frame, uint @flags); +public unsafe struct AVOutputFormat_write_uncoded_frame_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_write_uncoded_frame_func(AVOutputFormat_write_uncoded_frame func) => new AVOutputFormat_write_uncoded_frame_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int func (AVFilterContext* @ctx, void* @arg, int @jobnr, int @nb_jobs); +public unsafe struct func_func +{ + public IntPtr Pointer; + public static implicit operator func_func(func func) => new func_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + diff --git a/FFmpeg.AutoGen.Abstractions/generated/Enums.g.cs b/FFmpeg.AutoGen.Abstractions/generated/Enums.g.cs new file mode 100644 index 00000000..ea3a39f1 --- /dev/null +++ b/FFmpeg.AutoGen.Abstractions/generated/Enums.g.cs @@ -0,0 +1,1736 @@ +namespace FFmpeg.AutoGen.Abstractions; + +public enum AVActiveFormatDescription : int +{ + @AV_AFD_SAME = 8, + @AV_AFD_4_3 = 9, + @AV_AFD_16_9 = 10, + @AV_AFD_14_9 = 11, + @AV_AFD_4_3_SP_14_9 = 13, + @AV_AFD_16_9_SP_14_9 = 14, + @AV_AFD_SP_4_3 = 15, +} + +/// Message types used by avdevice_app_to_dev_control_message(). +public enum AVAppToDevMessageType : int +{ + /// Dummy message. + @AV_APP_TO_DEV_NONE = 1313820229, + /// Window size change message. + @AV_APP_TO_DEV_WINDOW_SIZE = 1195724621, + /// Repaint request message. + @AV_APP_TO_DEV_WINDOW_REPAINT = 1380274241, + /// Request pause/play. + @AV_APP_TO_DEV_PAUSE = 1346458912, + /// Request pause/play. + @AV_APP_TO_DEV_PLAY = 1347174745, + /// Request pause/play. + @AV_APP_TO_DEV_TOGGLE_PAUSE = 1346458964, + /// Volume control message. + @AV_APP_TO_DEV_SET_VOLUME = 1398165324, + /// Mute control messages. + @AV_APP_TO_DEV_MUTE = 541939028, + /// Mute control messages. + @AV_APP_TO_DEV_UNMUTE = 1431131476, + /// Mute control messages. + @AV_APP_TO_DEV_TOGGLE_MUTE = 1414354260, + /// Get volume/mute messages. + @AV_APP_TO_DEV_GET_VOLUME = 1196838732, + /// Get volume/mute messages. + @AV_APP_TO_DEV_GET_MUTE = 1196250452, +} + +public enum AVAudioServiceType : int +{ + @AV_AUDIO_SERVICE_TYPE_MAIN = 0, + @AV_AUDIO_SERVICE_TYPE_EFFECTS = 1, + @AV_AUDIO_SERVICE_TYPE_VISUALLY_IMPAIRED = 2, + @AV_AUDIO_SERVICE_TYPE_HEARING_IMPAIRED = 3, + @AV_AUDIO_SERVICE_TYPE_DIALOGUE = 4, + @AV_AUDIO_SERVICE_TYPE_COMMENTARY = 5, + @AV_AUDIO_SERVICE_TYPE_EMERGENCY = 6, + @AV_AUDIO_SERVICE_TYPE_VOICE_OVER = 7, + @AV_AUDIO_SERVICE_TYPE_KARAOKE = 8, + /// Not part of ABI + @AV_AUDIO_SERVICE_TYPE_NB = 9, +} + +/// @{ +public enum AVChannel : int +{ + @AV_CHAN_NONE = -1, + @AV_CHAN_FRONT_LEFT = 0, + @AV_CHAN_FRONT_RIGHT = 1, + @AV_CHAN_FRONT_CENTER = 2, + @AV_CHAN_LOW_FREQUENCY = 3, + @AV_CHAN_BACK_LEFT = 4, + @AV_CHAN_BACK_RIGHT = 5, + @AV_CHAN_FRONT_LEFT_OF_CENTER = 6, + @AV_CHAN_FRONT_RIGHT_OF_CENTER = 7, + @AV_CHAN_BACK_CENTER = 8, + @AV_CHAN_SIDE_LEFT = 9, + @AV_CHAN_SIDE_RIGHT = 10, + @AV_CHAN_TOP_CENTER = 11, + @AV_CHAN_TOP_FRONT_LEFT = 12, + @AV_CHAN_TOP_FRONT_CENTER = 13, + @AV_CHAN_TOP_FRONT_RIGHT = 14, + @AV_CHAN_TOP_BACK_LEFT = 15, + @AV_CHAN_TOP_BACK_CENTER = 16, + @AV_CHAN_TOP_BACK_RIGHT = 17, + /// Stereo downmix. + @AV_CHAN_STEREO_LEFT = 29, + /// See above. + @AV_CHAN_STEREO_RIGHT = 30, + /// See above. + @AV_CHAN_WIDE_LEFT = 31, + /// See above. + @AV_CHAN_WIDE_RIGHT = 32, + /// See above. + @AV_CHAN_SURROUND_DIRECT_LEFT = 33, + /// See above. + @AV_CHAN_SURROUND_DIRECT_RIGHT = 34, + /// See above. + @AV_CHAN_LOW_FREQUENCY_2 = 35, + /// See above. + @AV_CHAN_TOP_SIDE_LEFT = 36, + /// See above. + @AV_CHAN_TOP_SIDE_RIGHT = 37, + /// See above. + @AV_CHAN_BOTTOM_FRONT_CENTER = 38, + /// See above. + @AV_CHAN_BOTTOM_FRONT_LEFT = 39, + /// See above. + @AV_CHAN_BOTTOM_FRONT_RIGHT = 40, + /// Channel is empty can be safely skipped. + @AV_CHAN_UNUSED = 512, + /// Channel contains data, but its position is unknown. + @AV_CHAN_UNKNOWN = 768, + /// Range of channels between AV_CHAN_AMBISONIC_BASE and AV_CHAN_AMBISONIC_END represent Ambisonic components using the ACN system. + @AV_CHAN_AMBISONIC_BASE = 1024, + /// Range of channels between AV_CHAN_AMBISONIC_BASE and AV_CHAN_AMBISONIC_END represent Ambisonic components using the ACN system. + @AV_CHAN_AMBISONIC_END = 2047, +} + +public enum AVChannelOrder : int +{ + /// Only the channel count is specified, without any further information about the channel order. + @AV_CHANNEL_ORDER_UNSPEC = 0, + /// The native channel order, i.e. the channels are in the same order in which they are defined in the AVChannel enum. This supports up to 63 different channels. + @AV_CHANNEL_ORDER_NATIVE = 1, + /// The channel order does not correspond to any other predefined order and is stored as an explicit map. For example, this could be used to support layouts with 64 or more channels, or with empty/skipped (AV_CHAN_SILENCE) channels at arbitrary positions. + @AV_CHANNEL_ORDER_CUSTOM = 2, + /// The audio is represented as the decomposition of the sound field into spherical harmonics. Each channel corresponds to a single expansion component. Channels are ordered according to ACN (Ambisonic Channel Number). + @AV_CHANNEL_ORDER_AMBISONIC = 3, +} + +/// Location of chroma samples. +public enum AVChromaLocation : int +{ + @AVCHROMA_LOC_UNSPECIFIED = 0, + /// MPEG-2/4 4:2:0, H.264 default for 4:2:0 + @AVCHROMA_LOC_LEFT = 1, + /// MPEG-1 4:2:0, JPEG 4:2:0, H.263 4:2:0 + @AVCHROMA_LOC_CENTER = 2, + /// ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2 + @AVCHROMA_LOC_TOPLEFT = 3, + @AVCHROMA_LOC_TOP = 4, + @AVCHROMA_LOC_BOTTOMLEFT = 5, + @AVCHROMA_LOC_BOTTOM = 6, + /// Not part of ABI + @AVCHROMA_LOC_NB = 7, +} + +public enum AVClassCategory : int +{ + @AV_CLASS_CATEGORY_NA = 0, + @AV_CLASS_CATEGORY_INPUT = 1, + @AV_CLASS_CATEGORY_OUTPUT = 2, + @AV_CLASS_CATEGORY_MUXER = 3, + @AV_CLASS_CATEGORY_DEMUXER = 4, + @AV_CLASS_CATEGORY_ENCODER = 5, + @AV_CLASS_CATEGORY_DECODER = 6, + @AV_CLASS_CATEGORY_FILTER = 7, + @AV_CLASS_CATEGORY_BITSTREAM_FILTER = 8, + @AV_CLASS_CATEGORY_SWSCALER = 9, + @AV_CLASS_CATEGORY_SWRESAMPLER = 10, + @AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT = 40, + @AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT = 41, + @AV_CLASS_CATEGORY_DEVICE_AUDIO_OUTPUT = 42, + @AV_CLASS_CATEGORY_DEVICE_AUDIO_INPUT = 43, + @AV_CLASS_CATEGORY_DEVICE_OUTPUT = 44, + @AV_CLASS_CATEGORY_DEVICE_INPUT = 45, + /// not part of ABI/API + @AV_CLASS_CATEGORY_NB = 46, +} + +/// Identify the syntax and semantics of the bitstream. The principle is roughly: Two decoders with the same ID can decode the same streams. Two encoders with the same ID can encode compatible streams. There may be slight deviations from the principle due to implementation details. +public enum AVCodecID : int +{ + @AV_CODEC_ID_NONE = 0, + @AV_CODEC_ID_MPEG1VIDEO = 1, + /// preferred ID for MPEG-1/2 video decoding + @AV_CODEC_ID_MPEG2VIDEO = 2, + @AV_CODEC_ID_H261 = 3, + @AV_CODEC_ID_H263 = 4, + @AV_CODEC_ID_RV10 = 5, + @AV_CODEC_ID_RV20 = 6, + @AV_CODEC_ID_MJPEG = 7, + @AV_CODEC_ID_MJPEGB = 8, + @AV_CODEC_ID_LJPEG = 9, + @AV_CODEC_ID_SP5X = 10, + @AV_CODEC_ID_JPEGLS = 11, + @AV_CODEC_ID_MPEG4 = 12, + @AV_CODEC_ID_RAWVIDEO = 13, + @AV_CODEC_ID_MSMPEG4V1 = 14, + @AV_CODEC_ID_MSMPEG4V2 = 15, + @AV_CODEC_ID_MSMPEG4V3 = 16, + @AV_CODEC_ID_WMV1 = 17, + @AV_CODEC_ID_WMV2 = 18, + @AV_CODEC_ID_H263P = 19, + @AV_CODEC_ID_H263I = 20, + @AV_CODEC_ID_FLV1 = 21, + @AV_CODEC_ID_SVQ1 = 22, + @AV_CODEC_ID_SVQ3 = 23, + @AV_CODEC_ID_DVVIDEO = 24, + @AV_CODEC_ID_HUFFYUV = 25, + @AV_CODEC_ID_CYUV = 26, + @AV_CODEC_ID_H264 = 27, + @AV_CODEC_ID_INDEO3 = 28, + @AV_CODEC_ID_VP3 = 29, + @AV_CODEC_ID_THEORA = 30, + @AV_CODEC_ID_ASV1 = 31, + @AV_CODEC_ID_ASV2 = 32, + @AV_CODEC_ID_FFV1 = 33, + @AV_CODEC_ID_4XM = 34, + @AV_CODEC_ID_VCR1 = 35, + @AV_CODEC_ID_CLJR = 36, + @AV_CODEC_ID_MDEC = 37, + @AV_CODEC_ID_ROQ = 38, + @AV_CODEC_ID_INTERPLAY_VIDEO = 39, + @AV_CODEC_ID_XAN_WC3 = 40, + @AV_CODEC_ID_XAN_WC4 = 41, + @AV_CODEC_ID_RPZA = 42, + @AV_CODEC_ID_CINEPAK = 43, + @AV_CODEC_ID_WS_VQA = 44, + @AV_CODEC_ID_MSRLE = 45, + @AV_CODEC_ID_MSVIDEO1 = 46, + @AV_CODEC_ID_IDCIN = 47, + @AV_CODEC_ID_8BPS = 48, + @AV_CODEC_ID_SMC = 49, + @AV_CODEC_ID_FLIC = 50, + @AV_CODEC_ID_TRUEMOTION1 = 51, + @AV_CODEC_ID_VMDVIDEO = 52, + @AV_CODEC_ID_MSZH = 53, + @AV_CODEC_ID_ZLIB = 54, + @AV_CODEC_ID_QTRLE = 55, + @AV_CODEC_ID_TSCC = 56, + @AV_CODEC_ID_ULTI = 57, + @AV_CODEC_ID_QDRAW = 58, + @AV_CODEC_ID_VIXL = 59, + @AV_CODEC_ID_QPEG = 60, + @AV_CODEC_ID_PNG = 61, + @AV_CODEC_ID_PPM = 62, + @AV_CODEC_ID_PBM = 63, + @AV_CODEC_ID_PGM = 64, + @AV_CODEC_ID_PGMYUV = 65, + @AV_CODEC_ID_PAM = 66, + @AV_CODEC_ID_FFVHUFF = 67, + @AV_CODEC_ID_RV30 = 68, + @AV_CODEC_ID_RV40 = 69, + @AV_CODEC_ID_VC1 = 70, + @AV_CODEC_ID_WMV3 = 71, + @AV_CODEC_ID_LOCO = 72, + @AV_CODEC_ID_WNV1 = 73, + @AV_CODEC_ID_AASC = 74, + @AV_CODEC_ID_INDEO2 = 75, + @AV_CODEC_ID_FRAPS = 76, + @AV_CODEC_ID_TRUEMOTION2 = 77, + @AV_CODEC_ID_BMP = 78, + @AV_CODEC_ID_CSCD = 79, + @AV_CODEC_ID_MMVIDEO = 80, + @AV_CODEC_ID_ZMBV = 81, + @AV_CODEC_ID_AVS = 82, + @AV_CODEC_ID_SMACKVIDEO = 83, + @AV_CODEC_ID_NUV = 84, + @AV_CODEC_ID_KMVC = 85, + @AV_CODEC_ID_FLASHSV = 86, + @AV_CODEC_ID_CAVS = 87, + @AV_CODEC_ID_JPEG2000 = 88, + @AV_CODEC_ID_VMNC = 89, + @AV_CODEC_ID_VP5 = 90, + @AV_CODEC_ID_VP6 = 91, + @AV_CODEC_ID_VP6F = 92, + @AV_CODEC_ID_TARGA = 93, + @AV_CODEC_ID_DSICINVIDEO = 94, + @AV_CODEC_ID_TIERTEXSEQVIDEO = 95, + @AV_CODEC_ID_TIFF = 96, + @AV_CODEC_ID_GIF = 97, + @AV_CODEC_ID_DXA = 98, + @AV_CODEC_ID_DNXHD = 99, + @AV_CODEC_ID_THP = 100, + @AV_CODEC_ID_SGI = 101, + @AV_CODEC_ID_C93 = 102, + @AV_CODEC_ID_BETHSOFTVID = 103, + @AV_CODEC_ID_PTX = 104, + @AV_CODEC_ID_TXD = 105, + @AV_CODEC_ID_VP6A = 106, + @AV_CODEC_ID_AMV = 107, + @AV_CODEC_ID_VB = 108, + @AV_CODEC_ID_PCX = 109, + @AV_CODEC_ID_SUNRAST = 110, + @AV_CODEC_ID_INDEO4 = 111, + @AV_CODEC_ID_INDEO5 = 112, + @AV_CODEC_ID_MIMIC = 113, + @AV_CODEC_ID_RL2 = 114, + @AV_CODEC_ID_ESCAPE124 = 115, + @AV_CODEC_ID_DIRAC = 116, + @AV_CODEC_ID_BFI = 117, + @AV_CODEC_ID_CMV = 118, + @AV_CODEC_ID_MOTIONPIXELS = 119, + @AV_CODEC_ID_TGV = 120, + @AV_CODEC_ID_TGQ = 121, + @AV_CODEC_ID_TQI = 122, + @AV_CODEC_ID_AURA = 123, + @AV_CODEC_ID_AURA2 = 124, + @AV_CODEC_ID_V210X = 125, + @AV_CODEC_ID_TMV = 126, + @AV_CODEC_ID_V210 = 127, + @AV_CODEC_ID_DPX = 128, + @AV_CODEC_ID_MAD = 129, + @AV_CODEC_ID_FRWU = 130, + @AV_CODEC_ID_FLASHSV2 = 131, + @AV_CODEC_ID_CDGRAPHICS = 132, + @AV_CODEC_ID_R210 = 133, + @AV_CODEC_ID_ANM = 134, + @AV_CODEC_ID_BINKVIDEO = 135, + @AV_CODEC_ID_IFF_ILBM = 136, + @AV_CODEC_ID_KGV1 = 137, + @AV_CODEC_ID_YOP = 138, + @AV_CODEC_ID_VP8 = 139, + @AV_CODEC_ID_PICTOR = 140, + @AV_CODEC_ID_ANSI = 141, + @AV_CODEC_ID_A64_MULTI = 142, + @AV_CODEC_ID_A64_MULTI5 = 143, + @AV_CODEC_ID_R10K = 144, + @AV_CODEC_ID_MXPEG = 145, + @AV_CODEC_ID_LAGARITH = 146, + @AV_CODEC_ID_PRORES = 147, + @AV_CODEC_ID_JV = 148, + @AV_CODEC_ID_DFA = 149, + @AV_CODEC_ID_WMV3IMAGE = 150, + @AV_CODEC_ID_VC1IMAGE = 151, + @AV_CODEC_ID_UTVIDEO = 152, + @AV_CODEC_ID_BMV_VIDEO = 153, + @AV_CODEC_ID_VBLE = 154, + @AV_CODEC_ID_DXTORY = 155, + @AV_CODEC_ID_V410 = 156, + @AV_CODEC_ID_XWD = 157, + @AV_CODEC_ID_CDXL = 158, + @AV_CODEC_ID_XBM = 159, + @AV_CODEC_ID_ZEROCODEC = 160, + @AV_CODEC_ID_MSS1 = 161, + @AV_CODEC_ID_MSA1 = 162, + @AV_CODEC_ID_TSCC2 = 163, + @AV_CODEC_ID_MTS2 = 164, + @AV_CODEC_ID_CLLC = 165, + @AV_CODEC_ID_MSS2 = 166, + @AV_CODEC_ID_VP9 = 167, + @AV_CODEC_ID_AIC = 168, + @AV_CODEC_ID_ESCAPE130 = 169, + @AV_CODEC_ID_G2M = 170, + @AV_CODEC_ID_WEBP = 171, + @AV_CODEC_ID_HNM4_VIDEO = 172, + @AV_CODEC_ID_HEVC = 173, + @AV_CODEC_ID_FIC = 174, + @AV_CODEC_ID_ALIAS_PIX = 175, + @AV_CODEC_ID_BRENDER_PIX = 176, + @AV_CODEC_ID_PAF_VIDEO = 177, + @AV_CODEC_ID_EXR = 178, + @AV_CODEC_ID_VP7 = 179, + @AV_CODEC_ID_SANM = 180, + @AV_CODEC_ID_SGIRLE = 181, + @AV_CODEC_ID_MVC1 = 182, + @AV_CODEC_ID_MVC2 = 183, + @AV_CODEC_ID_HQX = 184, + @AV_CODEC_ID_TDSC = 185, + @AV_CODEC_ID_HQ_HQA = 186, + @AV_CODEC_ID_HAP = 187, + @AV_CODEC_ID_DDS = 188, + @AV_CODEC_ID_DXV = 189, + @AV_CODEC_ID_SCREENPRESSO = 190, + @AV_CODEC_ID_RSCC = 191, + @AV_CODEC_ID_AVS2 = 192, + @AV_CODEC_ID_PGX = 193, + @AV_CODEC_ID_AVS3 = 194, + @AV_CODEC_ID_MSP2 = 195, + @AV_CODEC_ID_VVC = 196, + @AV_CODEC_ID_Y41P = 197, + @AV_CODEC_ID_AVRP = 198, + @AV_CODEC_ID_012V = 199, + @AV_CODEC_ID_AVUI = 200, + @AV_CODEC_ID_AYUV = 201, + @AV_CODEC_ID_TARGA_Y216 = 202, + @AV_CODEC_ID_V308 = 203, + @AV_CODEC_ID_V408 = 204, + @AV_CODEC_ID_YUV4 = 205, + @AV_CODEC_ID_AVRN = 206, + @AV_CODEC_ID_CPIA = 207, + @AV_CODEC_ID_XFACE = 208, + @AV_CODEC_ID_SNOW = 209, + @AV_CODEC_ID_SMVJPEG = 210, + @AV_CODEC_ID_APNG = 211, + @AV_CODEC_ID_DAALA = 212, + @AV_CODEC_ID_CFHD = 213, + @AV_CODEC_ID_TRUEMOTION2RT = 214, + @AV_CODEC_ID_M101 = 215, + @AV_CODEC_ID_MAGICYUV = 216, + @AV_CODEC_ID_SHEERVIDEO = 217, + @AV_CODEC_ID_YLC = 218, + @AV_CODEC_ID_PSD = 219, + @AV_CODEC_ID_PIXLET = 220, + @AV_CODEC_ID_SPEEDHQ = 221, + @AV_CODEC_ID_FMVC = 222, + @AV_CODEC_ID_SCPR = 223, + @AV_CODEC_ID_CLEARVIDEO = 224, + @AV_CODEC_ID_XPM = 225, + @AV_CODEC_ID_AV1 = 226, + @AV_CODEC_ID_BITPACKED = 227, + @AV_CODEC_ID_MSCC = 228, + @AV_CODEC_ID_SRGC = 229, + @AV_CODEC_ID_SVG = 230, + @AV_CODEC_ID_GDV = 231, + @AV_CODEC_ID_FITS = 232, + @AV_CODEC_ID_IMM4 = 233, + @AV_CODEC_ID_PROSUMER = 234, + @AV_CODEC_ID_MWSC = 235, + @AV_CODEC_ID_WCMV = 236, + @AV_CODEC_ID_RASC = 237, + @AV_CODEC_ID_HYMT = 238, + @AV_CODEC_ID_ARBC = 239, + @AV_CODEC_ID_AGM = 240, + @AV_CODEC_ID_LSCR = 241, + @AV_CODEC_ID_VP4 = 242, + @AV_CODEC_ID_IMM5 = 243, + @AV_CODEC_ID_MVDV = 244, + @AV_CODEC_ID_MVHA = 245, + @AV_CODEC_ID_CDTOONS = 246, + @AV_CODEC_ID_MV30 = 247, + @AV_CODEC_ID_NOTCHLC = 248, + @AV_CODEC_ID_PFM = 249, + @AV_CODEC_ID_MOBICLIP = 250, + @AV_CODEC_ID_PHOTOCD = 251, + @AV_CODEC_ID_IPU = 252, + @AV_CODEC_ID_ARGO = 253, + @AV_CODEC_ID_CRI = 254, + @AV_CODEC_ID_SIMBIOSIS_IMX = 255, + @AV_CODEC_ID_SGA_VIDEO = 256, + @AV_CODEC_ID_GEM = 257, + @AV_CODEC_ID_VBN = 258, + @AV_CODEC_ID_JPEGXL = 259, + @AV_CODEC_ID_QOI = 260, + @AV_CODEC_ID_PHM = 261, + /// A dummy id pointing at the start of audio codecs + @AV_CODEC_ID_FIRST_AUDIO = 65536, + @AV_CODEC_ID_PCM_S16LE = 65536, + @AV_CODEC_ID_PCM_S16BE = 65537, + @AV_CODEC_ID_PCM_U16LE = 65538, + @AV_CODEC_ID_PCM_U16BE = 65539, + @AV_CODEC_ID_PCM_S8 = 65540, + @AV_CODEC_ID_PCM_U8 = 65541, + @AV_CODEC_ID_PCM_MULAW = 65542, + @AV_CODEC_ID_PCM_ALAW = 65543, + @AV_CODEC_ID_PCM_S32LE = 65544, + @AV_CODEC_ID_PCM_S32BE = 65545, + @AV_CODEC_ID_PCM_U32LE = 65546, + @AV_CODEC_ID_PCM_U32BE = 65547, + @AV_CODEC_ID_PCM_S24LE = 65548, + @AV_CODEC_ID_PCM_S24BE = 65549, + @AV_CODEC_ID_PCM_U24LE = 65550, + @AV_CODEC_ID_PCM_U24BE = 65551, + @AV_CODEC_ID_PCM_S24DAUD = 65552, + @AV_CODEC_ID_PCM_ZORK = 65553, + @AV_CODEC_ID_PCM_S16LE_PLANAR = 65554, + @AV_CODEC_ID_PCM_DVD = 65555, + @AV_CODEC_ID_PCM_F32BE = 65556, + @AV_CODEC_ID_PCM_F32LE = 65557, + @AV_CODEC_ID_PCM_F64BE = 65558, + @AV_CODEC_ID_PCM_F64LE = 65559, + @AV_CODEC_ID_PCM_BLURAY = 65560, + @AV_CODEC_ID_PCM_LXF = 65561, + @AV_CODEC_ID_S302M = 65562, + @AV_CODEC_ID_PCM_S8_PLANAR = 65563, + @AV_CODEC_ID_PCM_S24LE_PLANAR = 65564, + @AV_CODEC_ID_PCM_S32LE_PLANAR = 65565, + @AV_CODEC_ID_PCM_S16BE_PLANAR = 65566, + @AV_CODEC_ID_PCM_S64LE = 65567, + @AV_CODEC_ID_PCM_S64BE = 65568, + @AV_CODEC_ID_PCM_F16LE = 65569, + @AV_CODEC_ID_PCM_F24LE = 65570, + @AV_CODEC_ID_PCM_VIDC = 65571, + @AV_CODEC_ID_PCM_SGA = 65572, + @AV_CODEC_ID_ADPCM_IMA_QT = 69632, + @AV_CODEC_ID_ADPCM_IMA_WAV = 69633, + @AV_CODEC_ID_ADPCM_IMA_DK3 = 69634, + @AV_CODEC_ID_ADPCM_IMA_DK4 = 69635, + @AV_CODEC_ID_ADPCM_IMA_WS = 69636, + @AV_CODEC_ID_ADPCM_IMA_SMJPEG = 69637, + @AV_CODEC_ID_ADPCM_MS = 69638, + @AV_CODEC_ID_ADPCM_4XM = 69639, + @AV_CODEC_ID_ADPCM_XA = 69640, + @AV_CODEC_ID_ADPCM_ADX = 69641, + @AV_CODEC_ID_ADPCM_EA = 69642, + @AV_CODEC_ID_ADPCM_G726 = 69643, + @AV_CODEC_ID_ADPCM_CT = 69644, + @AV_CODEC_ID_ADPCM_SWF = 69645, + @AV_CODEC_ID_ADPCM_YAMAHA = 69646, + @AV_CODEC_ID_ADPCM_SBPRO_4 = 69647, + @AV_CODEC_ID_ADPCM_SBPRO_3 = 69648, + @AV_CODEC_ID_ADPCM_SBPRO_2 = 69649, + @AV_CODEC_ID_ADPCM_THP = 69650, + @AV_CODEC_ID_ADPCM_IMA_AMV = 69651, + @AV_CODEC_ID_ADPCM_EA_R1 = 69652, + @AV_CODEC_ID_ADPCM_EA_R3 = 69653, + @AV_CODEC_ID_ADPCM_EA_R2 = 69654, + @AV_CODEC_ID_ADPCM_IMA_EA_SEAD = 69655, + @AV_CODEC_ID_ADPCM_IMA_EA_EACS = 69656, + @AV_CODEC_ID_ADPCM_EA_XAS = 69657, + @AV_CODEC_ID_ADPCM_EA_MAXIS_XA = 69658, + @AV_CODEC_ID_ADPCM_IMA_ISS = 69659, + @AV_CODEC_ID_ADPCM_G722 = 69660, + @AV_CODEC_ID_ADPCM_IMA_APC = 69661, + @AV_CODEC_ID_ADPCM_VIMA = 69662, + @AV_CODEC_ID_ADPCM_AFC = 69663, + @AV_CODEC_ID_ADPCM_IMA_OKI = 69664, + @AV_CODEC_ID_ADPCM_DTK = 69665, + @AV_CODEC_ID_ADPCM_IMA_RAD = 69666, + @AV_CODEC_ID_ADPCM_G726LE = 69667, + @AV_CODEC_ID_ADPCM_THP_LE = 69668, + @AV_CODEC_ID_ADPCM_PSX = 69669, + @AV_CODEC_ID_ADPCM_AICA = 69670, + @AV_CODEC_ID_ADPCM_IMA_DAT4 = 69671, + @AV_CODEC_ID_ADPCM_MTAF = 69672, + @AV_CODEC_ID_ADPCM_AGM = 69673, + @AV_CODEC_ID_ADPCM_ARGO = 69674, + @AV_CODEC_ID_ADPCM_IMA_SSI = 69675, + @AV_CODEC_ID_ADPCM_ZORK = 69676, + @AV_CODEC_ID_ADPCM_IMA_APM = 69677, + @AV_CODEC_ID_ADPCM_IMA_ALP = 69678, + @AV_CODEC_ID_ADPCM_IMA_MTF = 69679, + @AV_CODEC_ID_ADPCM_IMA_CUNNING = 69680, + @AV_CODEC_ID_ADPCM_IMA_MOFLEX = 69681, + @AV_CODEC_ID_ADPCM_IMA_ACORN = 69682, + @AV_CODEC_ID_AMR_NB = 73728, + @AV_CODEC_ID_AMR_WB = 73729, + @AV_CODEC_ID_RA_144 = 77824, + @AV_CODEC_ID_RA_288 = 77825, + @AV_CODEC_ID_ROQ_DPCM = 81920, + @AV_CODEC_ID_INTERPLAY_DPCM = 81921, + @AV_CODEC_ID_XAN_DPCM = 81922, + @AV_CODEC_ID_SOL_DPCM = 81923, + @AV_CODEC_ID_SDX2_DPCM = 81924, + @AV_CODEC_ID_GREMLIN_DPCM = 81925, + @AV_CODEC_ID_DERF_DPCM = 81926, + @AV_CODEC_ID_MP2 = 86016, + /// preferred ID for decoding MPEG audio layer 1, 2 or 3 + @AV_CODEC_ID_MP3 = 86017, + @AV_CODEC_ID_AAC = 86018, + @AV_CODEC_ID_AC3 = 86019, + @AV_CODEC_ID_DTS = 86020, + @AV_CODEC_ID_VORBIS = 86021, + @AV_CODEC_ID_DVAUDIO = 86022, + @AV_CODEC_ID_WMAV1 = 86023, + @AV_CODEC_ID_WMAV2 = 86024, + @AV_CODEC_ID_MACE3 = 86025, + @AV_CODEC_ID_MACE6 = 86026, + @AV_CODEC_ID_VMDAUDIO = 86027, + @AV_CODEC_ID_FLAC = 86028, + @AV_CODEC_ID_MP3ADU = 86029, + @AV_CODEC_ID_MP3ON4 = 86030, + @AV_CODEC_ID_SHORTEN = 86031, + @AV_CODEC_ID_ALAC = 86032, + @AV_CODEC_ID_WESTWOOD_SND1 = 86033, + /// as in Berlin toast format + @AV_CODEC_ID_GSM = 86034, + @AV_CODEC_ID_QDM2 = 86035, + @AV_CODEC_ID_COOK = 86036, + @AV_CODEC_ID_TRUESPEECH = 86037, + @AV_CODEC_ID_TTA = 86038, + @AV_CODEC_ID_SMACKAUDIO = 86039, + @AV_CODEC_ID_QCELP = 86040, + @AV_CODEC_ID_WAVPACK = 86041, + @AV_CODEC_ID_DSICINAUDIO = 86042, + @AV_CODEC_ID_IMC = 86043, + @AV_CODEC_ID_MUSEPACK7 = 86044, + @AV_CODEC_ID_MLP = 86045, + @AV_CODEC_ID_GSM_MS = 86046, + @AV_CODEC_ID_ATRAC3 = 86047, + @AV_CODEC_ID_APE = 86048, + @AV_CODEC_ID_NELLYMOSER = 86049, + @AV_CODEC_ID_MUSEPACK8 = 86050, + @AV_CODEC_ID_SPEEX = 86051, + @AV_CODEC_ID_WMAVOICE = 86052, + @AV_CODEC_ID_WMAPRO = 86053, + @AV_CODEC_ID_WMALOSSLESS = 86054, + @AV_CODEC_ID_ATRAC3P = 86055, + @AV_CODEC_ID_EAC3 = 86056, + @AV_CODEC_ID_SIPR = 86057, + @AV_CODEC_ID_MP1 = 86058, + @AV_CODEC_ID_TWINVQ = 86059, + @AV_CODEC_ID_TRUEHD = 86060, + @AV_CODEC_ID_MP4ALS = 86061, + @AV_CODEC_ID_ATRAC1 = 86062, + @AV_CODEC_ID_BINKAUDIO_RDFT = 86063, + @AV_CODEC_ID_BINKAUDIO_DCT = 86064, + @AV_CODEC_ID_AAC_LATM = 86065, + @AV_CODEC_ID_QDMC = 86066, + @AV_CODEC_ID_CELT = 86067, + @AV_CODEC_ID_G723_1 = 86068, + @AV_CODEC_ID_G729 = 86069, + @AV_CODEC_ID_8SVX_EXP = 86070, + @AV_CODEC_ID_8SVX_FIB = 86071, + @AV_CODEC_ID_BMV_AUDIO = 86072, + @AV_CODEC_ID_RALF = 86073, + @AV_CODEC_ID_IAC = 86074, + @AV_CODEC_ID_ILBC = 86075, + @AV_CODEC_ID_OPUS = 86076, + @AV_CODEC_ID_COMFORT_NOISE = 86077, + @AV_CODEC_ID_TAK = 86078, + @AV_CODEC_ID_METASOUND = 86079, + @AV_CODEC_ID_PAF_AUDIO = 86080, + @AV_CODEC_ID_ON2AVC = 86081, + @AV_CODEC_ID_DSS_SP = 86082, + @AV_CODEC_ID_CODEC2 = 86083, + @AV_CODEC_ID_FFWAVESYNTH = 86084, + @AV_CODEC_ID_SONIC = 86085, + @AV_CODEC_ID_SONIC_LS = 86086, + @AV_CODEC_ID_EVRC = 86087, + @AV_CODEC_ID_SMV = 86088, + @AV_CODEC_ID_DSD_LSBF = 86089, + @AV_CODEC_ID_DSD_MSBF = 86090, + @AV_CODEC_ID_DSD_LSBF_PLANAR = 86091, + @AV_CODEC_ID_DSD_MSBF_PLANAR = 86092, + @AV_CODEC_ID_4GV = 86093, + @AV_CODEC_ID_INTERPLAY_ACM = 86094, + @AV_CODEC_ID_XMA1 = 86095, + @AV_CODEC_ID_XMA2 = 86096, + @AV_CODEC_ID_DST = 86097, + @AV_CODEC_ID_ATRAC3AL = 86098, + @AV_CODEC_ID_ATRAC3PAL = 86099, + @AV_CODEC_ID_DOLBY_E = 86100, + @AV_CODEC_ID_APTX = 86101, + @AV_CODEC_ID_APTX_HD = 86102, + @AV_CODEC_ID_SBC = 86103, + @AV_CODEC_ID_ATRAC9 = 86104, + @AV_CODEC_ID_HCOM = 86105, + @AV_CODEC_ID_ACELP_KELVIN = 86106, + @AV_CODEC_ID_MPEGH_3D_AUDIO = 86107, + @AV_CODEC_ID_SIREN = 86108, + @AV_CODEC_ID_HCA = 86109, + @AV_CODEC_ID_FASTAUDIO = 86110, + @AV_CODEC_ID_MSNSIREN = 86111, + @AV_CODEC_ID_DFPWM = 86112, + /// A dummy ID pointing at the start of subtitle codecs. + @AV_CODEC_ID_FIRST_SUBTITLE = 94208, + @AV_CODEC_ID_DVD_SUBTITLE = 94208, + @AV_CODEC_ID_DVB_SUBTITLE = 94209, + /// raw UTF-8 text + @AV_CODEC_ID_TEXT = 94210, + @AV_CODEC_ID_XSUB = 94211, + @AV_CODEC_ID_SSA = 94212, + @AV_CODEC_ID_MOV_TEXT = 94213, + @AV_CODEC_ID_HDMV_PGS_SUBTITLE = 94214, + @AV_CODEC_ID_DVB_TELETEXT = 94215, + @AV_CODEC_ID_SRT = 94216, + @AV_CODEC_ID_MICRODVD = 94217, + @AV_CODEC_ID_EIA_608 = 94218, + @AV_CODEC_ID_JACOSUB = 94219, + @AV_CODEC_ID_SAMI = 94220, + @AV_CODEC_ID_REALTEXT = 94221, + @AV_CODEC_ID_STL = 94222, + @AV_CODEC_ID_SUBVIEWER1 = 94223, + @AV_CODEC_ID_SUBVIEWER = 94224, + @AV_CODEC_ID_SUBRIP = 94225, + @AV_CODEC_ID_WEBVTT = 94226, + @AV_CODEC_ID_MPL2 = 94227, + @AV_CODEC_ID_VPLAYER = 94228, + @AV_CODEC_ID_PJS = 94229, + @AV_CODEC_ID_ASS = 94230, + @AV_CODEC_ID_HDMV_TEXT_SUBTITLE = 94231, + @AV_CODEC_ID_TTML = 94232, + @AV_CODEC_ID_ARIB_CAPTION = 94233, + /// A dummy ID pointing at the start of various fake codecs. + @AV_CODEC_ID_FIRST_UNKNOWN = 98304, + @AV_CODEC_ID_TTF = 98304, + /// Contain timestamp estimated through PCR of program stream. + @AV_CODEC_ID_SCTE_35 = 98305, + @AV_CODEC_ID_EPG = 98306, + @AV_CODEC_ID_BINTEXT = 98307, + @AV_CODEC_ID_XBIN = 98308, + @AV_CODEC_ID_IDF = 98309, + @AV_CODEC_ID_OTF = 98310, + @AV_CODEC_ID_SMPTE_KLV = 98311, + @AV_CODEC_ID_DVD_NAV = 98312, + @AV_CODEC_ID_TIMED_ID3 = 98313, + @AV_CODEC_ID_BIN_DATA = 98314, + /// codec_id is not known (like AV_CODEC_ID_NONE) but lavf should attempt to identify it + @AV_CODEC_ID_PROBE = 102400, + /// _FAKE_ codec to indicate a raw MPEG-2 TS stream (only used by libavformat) + @AV_CODEC_ID_MPEG2TS = 131072, + /// _FAKE_ codec to indicate a MPEG-4 Systems stream (only used by libavformat) + @AV_CODEC_ID_MPEG4SYSTEMS = 131073, + /// Dummy codec for streams containing only metadata information. + @AV_CODEC_ID_FFMETADATA = 135168, + /// Passthrough codec, AVFrames wrapped in AVPacket + @AV_CODEC_ID_WRAPPED_AVFRAME = 135169, +} + +/// Chromaticity coordinates of the source primaries. These values match the ones defined by ISO/IEC 23091-2_2019 subclause 8.1 and ITU-T H.273. +public enum AVColorPrimaries : int +{ + @AVCOL_PRI_RESERVED0 = 0, + /// also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B + @AVCOL_PRI_BT709 = 1, + @AVCOL_PRI_UNSPECIFIED = 2, + @AVCOL_PRI_RESERVED = 3, + /// also FCC Title 47 Code of Federal Regulations 73.682 (a)(20) + @AVCOL_PRI_BT470M = 4, + /// also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM + @AVCOL_PRI_BT470BG = 5, + /// also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC + @AVCOL_PRI_SMPTE170M = 6, + /// identical to above, also called "SMPTE C" even though it uses D65 + @AVCOL_PRI_SMPTE240M = 7, + /// colour filters using Illuminant C + @AVCOL_PRI_FILM = 8, + /// ITU-R BT2020 + @AVCOL_PRI_BT2020 = 9, + /// SMPTE ST 428-1 (CIE 1931 XYZ) + @AVCOL_PRI_SMPTE428 = 10, + @AVCOL_PRI_SMPTEST428_1 = 10, + /// SMPTE ST 431-2 (2011) / DCI P3 + @AVCOL_PRI_SMPTE431 = 11, + /// SMPTE ST 432-1 (2010) / P3 D65 / Display P3 + @AVCOL_PRI_SMPTE432 = 12, + /// EBU Tech. 3213-E (nothing there) / one of JEDEC P22 group phosphors + @AVCOL_PRI_EBU3213 = 22, + @AVCOL_PRI_JEDEC_P22 = 22, + /// Not part of ABI + @AVCOL_PRI_NB = 23, +} + +/// Visual content value range. +public enum AVColorRange : int +{ + @AVCOL_RANGE_UNSPECIFIED = 0, + /// Narrow or limited range content. + @AVCOL_RANGE_MPEG = 1, + /// Full range content. + @AVCOL_RANGE_JPEG = 2, + /// Not part of ABI + @AVCOL_RANGE_NB = 3, +} + +/// YUV colorspace type. These values match the ones defined by ISO/IEC 23091-2_2019 subclause 8.3. +public enum AVColorSpace : int +{ + /// order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1 + @AVCOL_SPC_RGB = 0, + /// also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B + @AVCOL_SPC_BT709 = 1, + @AVCOL_SPC_UNSPECIFIED = 2, + /// reserved for future use by ITU-T and ISO/IEC just like 15-255 are + @AVCOL_SPC_RESERVED = 3, + /// FCC Title 47 Code of Federal Regulations 73.682 (a)(20) + @AVCOL_SPC_FCC = 4, + /// also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601 + @AVCOL_SPC_BT470BG = 5, + /// also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above + @AVCOL_SPC_SMPTE170M = 6, + /// derived from 170M primaries and D65 white point, 170M is derived from BT470 System M's primaries + @AVCOL_SPC_SMPTE240M = 7, + /// used by Dirac / VC-2 and H.264 FRext, see ITU-T SG16 + @AVCOL_SPC_YCGCO = 8, + @AVCOL_SPC_YCOCG = 8, + /// ITU-R BT2020 non-constant luminance system + @AVCOL_SPC_BT2020_NCL = 9, + /// ITU-R BT2020 constant luminance system + @AVCOL_SPC_BT2020_CL = 10, + /// SMPTE 2085, Y'D'zD'x + @AVCOL_SPC_SMPTE2085 = 11, + /// Chromaticity-derived non-constant luminance system + @AVCOL_SPC_CHROMA_DERIVED_NCL = 12, + /// Chromaticity-derived constant luminance system + @AVCOL_SPC_CHROMA_DERIVED_CL = 13, + /// ITU-R BT.2100-0, ICtCp + @AVCOL_SPC_ICTCP = 14, + /// Not part of ABI + @AVCOL_SPC_NB = 15, +} + +/// Color Transfer Characteristic. These values match the ones defined by ISO/IEC 23091-2_2019 subclause 8.2. +public enum AVColorTransferCharacteristic : int +{ + @AVCOL_TRC_RESERVED0 = 0, + /// also ITU-R BT1361 + @AVCOL_TRC_BT709 = 1, + @AVCOL_TRC_UNSPECIFIED = 2, + @AVCOL_TRC_RESERVED = 3, + /// also ITU-R BT470M / ITU-R BT1700 625 PAL & SECAM + @AVCOL_TRC_GAMMA22 = 4, + /// also ITU-R BT470BG + @AVCOL_TRC_GAMMA28 = 5, + /// also ITU-R BT601-6 525 or 625 / ITU-R BT1358 525 or 625 / ITU-R BT1700 NTSC + @AVCOL_TRC_SMPTE170M = 6, + @AVCOL_TRC_SMPTE240M = 7, + /// "Linear transfer characteristics" + @AVCOL_TRC_LINEAR = 8, + /// "Logarithmic transfer characteristic (100:1 range)" + @AVCOL_TRC_LOG = 9, + /// "Logarithmic transfer characteristic (100 * Sqrt(10) : 1 range)" + @AVCOL_TRC_LOG_SQRT = 10, + /// IEC 61966-2-4 + @AVCOL_TRC_IEC61966_2_4 = 11, + /// ITU-R BT1361 Extended Colour Gamut + @AVCOL_TRC_BT1361_ECG = 12, + /// IEC 61966-2-1 (sRGB or sYCC) + @AVCOL_TRC_IEC61966_2_1 = 13, + /// ITU-R BT2020 for 10-bit system + @AVCOL_TRC_BT2020_10 = 14, + /// ITU-R BT2020 for 12-bit system + @AVCOL_TRC_BT2020_12 = 15, + /// SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems + @AVCOL_TRC_SMPTE2084 = 16, + @AVCOL_TRC_SMPTEST2084 = 16, + /// SMPTE ST 428-1 + @AVCOL_TRC_SMPTE428 = 17, + @AVCOL_TRC_SMPTEST428_1 = 17, + /// ARIB STD-B67, known as "Hybrid log-gamma" + @AVCOL_TRC_ARIB_STD_B67 = 18, + /// Not part of ABI + @AVCOL_TRC_NB = 19, +} + +/// Message types used by avdevice_dev_to_app_control_message(). +public enum AVDevToAppMessageType : int +{ + /// Dummy message. + @AV_DEV_TO_APP_NONE = 1313820229, + /// Create window buffer message. + @AV_DEV_TO_APP_CREATE_WINDOW_BUFFER = 1111708229, + /// Prepare window buffer message. + @AV_DEV_TO_APP_PREPARE_WINDOW_BUFFER = 1112560197, + /// Display window buffer message. + @AV_DEV_TO_APP_DISPLAY_WINDOW_BUFFER = 1111771475, + /// Destroy window buffer message. + @AV_DEV_TO_APP_DESTROY_WINDOW_BUFFER = 1111770451, + /// Buffer fullness status messages. + @AV_DEV_TO_APP_BUFFER_OVERFLOW = 1112491596, + /// Buffer fullness status messages. + @AV_DEV_TO_APP_BUFFER_UNDERFLOW = 1112884812, + /// Buffer readable/writable. + @AV_DEV_TO_APP_BUFFER_READABLE = 1112687648, + /// Buffer readable/writable. + @AV_DEV_TO_APP_BUFFER_WRITABLE = 1113018912, + /// Mute state change message. + @AV_DEV_TO_APP_MUTE_STATE_CHANGED = 1129141588, + /// Volume level change message. + @AV_DEV_TO_APP_VOLUME_LEVEL_CHANGED = 1129729868, +} + +public enum AVDiscard : int +{ + /// discard nothing + @AVDISCARD_NONE = -16, + /// discard useless packets like 0 size packets in avi + @AVDISCARD_DEFAULT = 0, + /// discard all non reference + @AVDISCARD_NONREF = 8, + /// discard all bidirectional frames + @AVDISCARD_BIDIR = 16, + /// discard all non intra frames + @AVDISCARD_NONINTRA = 24, + /// discard all frames except keyframes + @AVDISCARD_NONKEY = 32, + /// discard all + @AVDISCARD_ALL = 48, +} + +/// The duration of a video can be estimated through various ways, and this enum can be used to know how the duration was estimated. +public enum AVDurationEstimationMethod : int +{ + /// Duration accurately estimated from PTSes + @AVFMT_DURATION_FROM_PTS = 0, + /// Duration estimated from a stream with a known duration + @AVFMT_DURATION_FROM_STREAM = 1, + /// Duration estimated from bitrate (less accurate) + @AVFMT_DURATION_FROM_BITRATE = 2, +} + +public enum AVFieldOrder : int +{ + @AV_FIELD_UNKNOWN = 0, + @AV_FIELD_PROGRESSIVE = 1, + @AV_FIELD_TT = 2, + @AV_FIELD_BB = 3, + @AV_FIELD_TB = 4, + @AV_FIELD_BT = 5, +} + +/// stage of the initialization of the link properties (dimensions, etc) +public enum AVFilterLink_init_state : int +{ + /// not started + @AVLINK_UNINIT = 0, + /// started, but incomplete + @AVLINK_STARTINIT = 1, + /// complete + @AVLINK_INIT = 2, +} + +/// @{ AVFrame is an abstraction for reference-counted raw multimedia data. +public enum AVFrameSideDataType : int +{ + /// The data is the AVPanScan struct defined in libavcodec. + @AV_FRAME_DATA_PANSCAN = 0, + /// ATSC A53 Part 4 Closed Captions. A53 CC bitstream is stored as uint8_t in AVFrameSideData.data. The number of bytes of CC data is AVFrameSideData.size. + @AV_FRAME_DATA_A53_CC = 1, + /// Stereoscopic 3d metadata. The data is the AVStereo3D struct defined in libavutil/stereo3d.h. + @AV_FRAME_DATA_STEREO3D = 2, + /// The data is the AVMatrixEncoding enum defined in libavutil/channel_layout.h. + @AV_FRAME_DATA_MATRIXENCODING = 3, + /// Metadata relevant to a downmix procedure. The data is the AVDownmixInfo struct defined in libavutil/downmix_info.h. + @AV_FRAME_DATA_DOWNMIX_INFO = 4, + /// ReplayGain information in the form of the AVReplayGain struct. + @AV_FRAME_DATA_REPLAYGAIN = 5, + /// This side data contains a 3x3 transformation matrix describing an affine transformation that needs to be applied to the frame for correct presentation. + @AV_FRAME_DATA_DISPLAYMATRIX = 6, + /// Active Format Description data consisting of a single byte as specified in ETSI TS 101 154 using AVActiveFormatDescription enum. + @AV_FRAME_DATA_AFD = 7, + /// Motion vectors exported by some codecs (on demand through the export_mvs flag set in the libavcodec AVCodecContext flags2 option). The data is the AVMotionVector struct defined in libavutil/motion_vector.h. + @AV_FRAME_DATA_MOTION_VECTORS = 8, + /// Recommmends skipping the specified number of samples. This is exported only if the "skip_manual" AVOption is set in libavcodec. This has the same format as AV_PKT_DATA_SKIP_SAMPLES. + @AV_FRAME_DATA_SKIP_SAMPLES = 9, + /// This side data must be associated with an audio frame and corresponds to enum AVAudioServiceType defined in avcodec.h. + @AV_FRAME_DATA_AUDIO_SERVICE_TYPE = 10, + /// Mastering display metadata associated with a video frame. The payload is an AVMasteringDisplayMetadata type and contains information about the mastering display color volume. + @AV_FRAME_DATA_MASTERING_DISPLAY_METADATA = 11, + /// The GOP timecode in 25 bit timecode format. Data format is 64-bit integer. This is set on the first frame of a GOP that has a temporal reference of 0. + @AV_FRAME_DATA_GOP_TIMECODE = 12, + /// The data represents the AVSphericalMapping structure defined in libavutil/spherical.h. + @AV_FRAME_DATA_SPHERICAL = 13, + /// Content light level (based on CTA-861.3). This payload contains data in the form of the AVContentLightMetadata struct. + @AV_FRAME_DATA_CONTENT_LIGHT_LEVEL = 14, + /// The data contains an ICC profile as an opaque octet buffer following the format described by ISO 15076-1 with an optional name defined in the metadata key entry "name". + @AV_FRAME_DATA_ICC_PROFILE = 15, + /// Timecode which conforms to SMPTE ST 12-1. The data is an array of 4 uint32_t where the first uint32_t describes how many (1-3) of the other timecodes are used. The timecode format is described in the documentation of av_timecode_get_smpte_from_framenum() function in libavutil/timecode.h. + @AV_FRAME_DATA_S12M_TIMECODE = 16, + /// HDR dynamic metadata associated with a video frame. The payload is an AVDynamicHDRPlus type and contains information for color volume transform - application 4 of SMPTE 2094-40:2016 standard. + @AV_FRAME_DATA_DYNAMIC_HDR_PLUS = 17, + /// Regions Of Interest, the data is an array of AVRegionOfInterest type, the number of array element is implied by AVFrameSideData.size / AVRegionOfInterest.self_size. + @AV_FRAME_DATA_REGIONS_OF_INTEREST = 18, + /// Encoding parameters for a video frame, as described by AVVideoEncParams. + @AV_FRAME_DATA_VIDEO_ENC_PARAMS = 19, + /// User data unregistered metadata associated with a video frame. This is the H.26[45] UDU SEI message, and shouldn't be used for any other purpose The data is stored as uint8_t in AVFrameSideData.data which is 16 bytes of uuid_iso_iec_11578 followed by AVFrameSideData.size - 16 bytes of user_data_payload_byte. + @AV_FRAME_DATA_SEI_UNREGISTERED = 20, + /// Film grain parameters for a frame, described by AVFilmGrainParams. Must be present for every frame which should have film grain applied. + @AV_FRAME_DATA_FILM_GRAIN_PARAMS = 21, + /// Bounding boxes for object detection and classification, as described by AVDetectionBBoxHeader. + @AV_FRAME_DATA_DETECTION_BBOXES = 22, + /// Dolby Vision RPU raw data, suitable for passing to x265 or other libraries. Array of uint8_t, with NAL emulation bytes intact. + @AV_FRAME_DATA_DOVI_RPU_BUFFER = 23, + /// Parsed Dolby Vision metadata, suitable for passing to a software implementation. The payload is the AVDOVIMetadata struct defined in libavutil/dovi_meta.h. + @AV_FRAME_DATA_DOVI_METADATA = 24, + /// HDR Vivid dynamic metadata associated with a video frame. The payload is an AVDynamicHDRVivid type and contains information for color volume transform - CUVA 005.1-2021. + @AV_FRAME_DATA_DYNAMIC_HDR_VIVID = 25, +} + +/// Option for overlapping elliptical pixel selectors in an image. +public enum AVHDRPlusOverlapProcessOption : int +{ + @AV_HDR_PLUS_OVERLAP_PROCESS_WEIGHTED_AVERAGING = 0, + @AV_HDR_PLUS_OVERLAP_PROCESS_LAYERING = 1, +} + +public enum AVHWDeviceType : int +{ + @AV_HWDEVICE_TYPE_NONE = 0, + @AV_HWDEVICE_TYPE_VDPAU = 1, + @AV_HWDEVICE_TYPE_CUDA = 2, + @AV_HWDEVICE_TYPE_VAAPI = 3, + @AV_HWDEVICE_TYPE_DXVA2 = 4, + @AV_HWDEVICE_TYPE_QSV = 5, + @AV_HWDEVICE_TYPE_VIDEOTOOLBOX = 6, + @AV_HWDEVICE_TYPE_D3D11VA = 7, + @AV_HWDEVICE_TYPE_DRM = 8, + @AV_HWDEVICE_TYPE_OPENCL = 9, + @AV_HWDEVICE_TYPE_MEDIACODEC = 10, + @AV_HWDEVICE_TYPE_VULKAN = 11, +} + +public enum AVHWFrameTransferDirection : int +{ + /// Transfer the data from the queried hw frame. + @AV_HWFRAME_TRANSFER_DIRECTION_FROM = 0, + /// Transfer the data to the queried hw frame. + @AV_HWFRAME_TRANSFER_DIRECTION_TO = 1, +} + +/// Different data types that can be returned via the AVIO write_data_type callback. +public enum AVIODataMarkerType : int +{ + /// Header data; this needs to be present for the stream to be decodeable. + @AVIO_DATA_MARKER_HEADER = 0, + /// A point in the output bytestream where a decoder can start decoding (i.e. a keyframe). A demuxer/decoder given the data flagged with AVIO_DATA_MARKER_HEADER, followed by any AVIO_DATA_MARKER_SYNC_POINT, should give decodeable results. + @AVIO_DATA_MARKER_SYNC_POINT = 1, + /// A point in the output bytestream where a demuxer can start parsing (for non self synchronizing bytestream formats). That is, any non-keyframe packet start point. + @AVIO_DATA_MARKER_BOUNDARY_POINT = 2, + /// This is any, unlabelled data. It can either be a muxer not marking any positions at all, it can be an actual boundary/sync point that the muxer chooses not to mark, or a later part of a packet/fragment that is cut into multiple write callbacks due to limited IO buffer size. + @AVIO_DATA_MARKER_UNKNOWN = 3, + /// Trailer data, which doesn't contain actual content, but only for finalizing the output file. + @AVIO_DATA_MARKER_TRAILER = 4, + /// A point in the output bytestream where the underlying AVIOContext might flush the buffer depending on latency or buffering requirements. Typically means the end of a packet. + @AVIO_DATA_MARKER_FLUSH_POINT = 5, +} + +/// Directory entry types. +public enum AVIODirEntryType : int +{ + @AVIO_ENTRY_UNKNOWN = 0, + @AVIO_ENTRY_BLOCK_DEVICE = 1, + @AVIO_ENTRY_CHARACTER_DEVICE = 2, + @AVIO_ENTRY_DIRECTORY = 3, + @AVIO_ENTRY_NAMED_PIPE = 4, + @AVIO_ENTRY_SYMBOLIC_LINK = 5, + @AVIO_ENTRY_SOCKET = 6, + @AVIO_ENTRY_FILE = 7, + @AVIO_ENTRY_SERVER = 8, + @AVIO_ENTRY_SHARE = 9, + @AVIO_ENTRY_WORKGROUP = 10, +} + +public enum AVMatrixEncoding : int +{ + @AV_MATRIX_ENCODING_NONE = 0, + @AV_MATRIX_ENCODING_DOLBY = 1, + @AV_MATRIX_ENCODING_DPLII = 2, + @AV_MATRIX_ENCODING_DPLIIX = 3, + @AV_MATRIX_ENCODING_DPLIIZ = 4, + @AV_MATRIX_ENCODING_DOLBYEX = 5, + @AV_MATRIX_ENCODING_DOLBYHEADPHONE = 6, + @AV_MATRIX_ENCODING_NB = 7, +} + +/// Media Type +public enum AVMediaType : int +{ + /// Usually treated as AVMEDIA_TYPE_DATA + @AVMEDIA_TYPE_UNKNOWN = -1, + @AVMEDIA_TYPE_VIDEO = 0, + @AVMEDIA_TYPE_AUDIO = 1, + /// Opaque data information usually continuous + @AVMEDIA_TYPE_DATA = 2, + @AVMEDIA_TYPE_SUBTITLE = 3, + /// Opaque data information usually sparse + @AVMEDIA_TYPE_ATTACHMENT = 4, + @AVMEDIA_TYPE_NB = 5, +} + +/// @{ AVOptions provide a generic system to declare options on arbitrary structs ("objects"). An option can have a help text, a type and a range of possible values. Options may then be enumerated, read and written to. +public enum AVOptionType : int +{ + @AV_OPT_TYPE_FLAGS = 0, + @AV_OPT_TYPE_INT = 1, + @AV_OPT_TYPE_INT64 = 2, + @AV_OPT_TYPE_DOUBLE = 3, + @AV_OPT_TYPE_FLOAT = 4, + @AV_OPT_TYPE_STRING = 5, + @AV_OPT_TYPE_RATIONAL = 6, + /// offset must point to a pointer immediately followed by an int for the length + @AV_OPT_TYPE_BINARY = 7, + @AV_OPT_TYPE_DICT = 8, + @AV_OPT_TYPE_UINT64 = 9, + @AV_OPT_TYPE_CONST = 10, + /// offset must point to two consecutive integers + @AV_OPT_TYPE_IMAGE_SIZE = 11, + @AV_OPT_TYPE_PIXEL_FMT = 12, + @AV_OPT_TYPE_SAMPLE_FMT = 13, + /// offset must point to AVRational + @AV_OPT_TYPE_VIDEO_RATE = 14, + @AV_OPT_TYPE_DURATION = 15, + @AV_OPT_TYPE_COLOR = 16, + @AV_OPT_TYPE_CHANNEL_LAYOUT = 17, + @AV_OPT_TYPE_BOOL = 18, + @AV_OPT_TYPE_CHLAYOUT = 19, +} + +/// Types and functions for working with AVPacket. @{ +public enum AVPacketSideDataType : int +{ + /// An AV_PKT_DATA_PALETTE side data packet contains exactly AVPALETTE_SIZE bytes worth of palette. This side data signals that a new palette is present. + @AV_PKT_DATA_PALETTE = 0, + /// The AV_PKT_DATA_NEW_EXTRADATA is used to notify the codec or the format that the extradata buffer was changed and the receiving side should act upon it appropriately. The new extradata is embedded in the side data buffer and should be immediately used for processing the current frame or packet. + @AV_PKT_DATA_NEW_EXTRADATA = 1, + /// An AV_PKT_DATA_PARAM_CHANGE side data packet is laid out as follows: + @AV_PKT_DATA_PARAM_CHANGE = 2, + /// An AV_PKT_DATA_H263_MB_INFO side data packet contains a number of structures with info about macroblocks relevant to splitting the packet into smaller packets on macroblock edges (e.g. as for RFC 2190). That is, it does not necessarily contain info about all macroblocks, as long as the distance between macroblocks in the info is smaller than the target payload size. Each MB info structure is 12 bytes, and is laid out as follows: + @AV_PKT_DATA_H263_MB_INFO = 3, + /// This side data should be associated with an audio stream and contains ReplayGain information in form of the AVReplayGain struct. + @AV_PKT_DATA_REPLAYGAIN = 4, + /// This side data contains a 3x3 transformation matrix describing an affine transformation that needs to be applied to the decoded video frames for correct presentation. + @AV_PKT_DATA_DISPLAYMATRIX = 5, + /// This side data should be associated with a video stream and contains Stereoscopic 3D information in form of the AVStereo3D struct. + @AV_PKT_DATA_STEREO3D = 6, + /// This side data should be associated with an audio stream and corresponds to enum AVAudioServiceType. + @AV_PKT_DATA_AUDIO_SERVICE_TYPE = 7, + /// This side data contains quality related information from the encoder. + @AV_PKT_DATA_QUALITY_STATS = 8, + /// This side data contains an integer value representing the stream index of a "fallback" track. A fallback track indicates an alternate track to use when the current track can not be decoded for some reason. e.g. no decoder available for codec. + @AV_PKT_DATA_FALLBACK_TRACK = 9, + /// This side data corresponds to the AVCPBProperties struct. + @AV_PKT_DATA_CPB_PROPERTIES = 10, + /// Recommmends skipping the specified number of samples + @AV_PKT_DATA_SKIP_SAMPLES = 11, + /// An AV_PKT_DATA_JP_DUALMONO side data packet indicates that the packet may contain "dual mono" audio specific to Japanese DTV and if it is true, recommends only the selected channel to be used. + @AV_PKT_DATA_JP_DUALMONO = 12, + /// A list of zero terminated key/value strings. There is no end marker for the list, so it is required to rely on the side data size to stop. + @AV_PKT_DATA_STRINGS_METADATA = 13, + /// Subtitle event position + @AV_PKT_DATA_SUBTITLE_POSITION = 14, + /// Data found in BlockAdditional element of matroska container. There is no end marker for the data, so it is required to rely on the side data size to recognize the end. 8 byte id (as found in BlockAddId) followed by data. + @AV_PKT_DATA_MATROSKA_BLOCKADDITIONAL = 15, + /// The optional first identifier line of a WebVTT cue. + @AV_PKT_DATA_WEBVTT_IDENTIFIER = 16, + /// The optional settings (rendering instructions) that immediately follow the timestamp specifier of a WebVTT cue. + @AV_PKT_DATA_WEBVTT_SETTINGS = 17, + /// A list of zero terminated key/value strings. There is no end marker for the list, so it is required to rely on the side data size to stop. This side data includes updated metadata which appeared in the stream. + @AV_PKT_DATA_METADATA_UPDATE = 18, + /// MPEGTS stream ID as uint8_t, this is required to pass the stream ID information from the demuxer to the corresponding muxer. + @AV_PKT_DATA_MPEGTS_STREAM_ID = 19, + /// Mastering display metadata (based on SMPTE-2086:2014). This metadata should be associated with a video stream and contains data in the form of the AVMasteringDisplayMetadata struct. + @AV_PKT_DATA_MASTERING_DISPLAY_METADATA = 20, + /// This side data should be associated with a video stream and corresponds to the AVSphericalMapping structure. + @AV_PKT_DATA_SPHERICAL = 21, + /// Content light level (based on CTA-861.3). This metadata should be associated with a video stream and contains data in the form of the AVContentLightMetadata struct. + @AV_PKT_DATA_CONTENT_LIGHT_LEVEL = 22, + /// ATSC A53 Part 4 Closed Captions. This metadata should be associated with a video stream. A53 CC bitstream is stored as uint8_t in AVPacketSideData.data. The number of bytes of CC data is AVPacketSideData.size. + @AV_PKT_DATA_A53_CC = 23, + /// This side data is encryption initialization data. The format is not part of ABI, use av_encryption_init_info_* methods to access. + @AV_PKT_DATA_ENCRYPTION_INIT_INFO = 24, + /// This side data contains encryption info for how to decrypt the packet. The format is not part of ABI, use av_encryption_info_* methods to access. + @AV_PKT_DATA_ENCRYPTION_INFO = 25, + /// Active Format Description data consisting of a single byte as specified in ETSI TS 101 154 using AVActiveFormatDescription enum. + @AV_PKT_DATA_AFD = 26, + /// Producer Reference Time data corresponding to the AVProducerReferenceTime struct, usually exported by some encoders (on demand through the prft flag set in the AVCodecContext export_side_data field). + @AV_PKT_DATA_PRFT = 27, + /// ICC profile data consisting of an opaque octet buffer following the format described by ISO 15076-1. + @AV_PKT_DATA_ICC_PROFILE = 28, + /// DOVI configuration ref: dolby-vision-bitstreams-within-the-iso-base-media-file-format-v2.1.2, section 2.2 dolby-vision-bitstreams-in-mpeg-2-transport-stream-multiplex-v1.2, section 3.3 Tags are stored in struct AVDOVIDecoderConfigurationRecord. + @AV_PKT_DATA_DOVI_CONF = 29, + /// Timecode which conforms to SMPTE ST 12-1:2014. The data is an array of 4 uint32_t where the first uint32_t describes how many (1-3) of the other timecodes are used. The timecode format is described in the documentation of av_timecode_get_smpte_from_framenum() function in libavutil/timecode.h. + @AV_PKT_DATA_S12M_TIMECODE = 30, + /// HDR10+ dynamic metadata associated with a video frame. The metadata is in the form of the AVDynamicHDRPlus struct and contains information for color volume transform - application 4 of SMPTE 2094-40:2016 standard. + @AV_PKT_DATA_DYNAMIC_HDR10_PLUS = 31, + /// The number of side data types. This is not part of the public API/ABI in the sense that it may change when new side data types are added. This must stay the last enum value. If its value becomes huge, some code using it needs to be updated as it assumes it to be smaller than other limits. + @AV_PKT_DATA_NB = 32, +} + +/// @{ +public enum AVPictureStructure : int +{ + @AV_PICTURE_STRUCTURE_UNKNOWN = 0, + @AV_PICTURE_STRUCTURE_TOP_FIELD = 1, + @AV_PICTURE_STRUCTURE_BOTTOM_FIELD = 2, + @AV_PICTURE_STRUCTURE_FRAME = 3, +} + +/// @} @} +public enum AVPictureType : int +{ + /// Undefined + @AV_PICTURE_TYPE_NONE = 0, + /// Intra + @AV_PICTURE_TYPE_I = 1, + /// Predicted + @AV_PICTURE_TYPE_P = 2, + /// Bi-dir predicted + @AV_PICTURE_TYPE_B = 3, + /// S(GMC)-VOP MPEG-4 + @AV_PICTURE_TYPE_S = 4, + /// Switching Intra + @AV_PICTURE_TYPE_SI = 5, + /// Switching Predicted + @AV_PICTURE_TYPE_SP = 6, + /// BI type + @AV_PICTURE_TYPE_BI = 7, +} + +/// Pixel format. +public enum AVPixelFormat : int +{ + @AV_PIX_FMT_NONE = -1, + /// planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples) + @AV_PIX_FMT_YUV420P = 0, + /// packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr + @AV_PIX_FMT_YUYV422 = 1, + /// packed RGB 8:8:8, 24bpp, RGBRGB... + @AV_PIX_FMT_RGB24 = 2, + /// packed RGB 8:8:8, 24bpp, BGRBGR... + @AV_PIX_FMT_BGR24 = 3, + /// planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples) + @AV_PIX_FMT_YUV422P = 4, + /// planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples) + @AV_PIX_FMT_YUV444P = 5, + /// planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples) + @AV_PIX_FMT_YUV410P = 6, + /// planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) + @AV_PIX_FMT_YUV411P = 7, + /// Y , 8bpp + @AV_PIX_FMT_GRAY8 = 8, + /// Y , 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb to the lsb + @AV_PIX_FMT_MONOWHITE = 9, + /// Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb + @AV_PIX_FMT_MONOBLACK = 10, + /// 8 bits with AV_PIX_FMT_RGB32 palette + @AV_PIX_FMT_PAL8 = 11, + /// planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting color_range + @AV_PIX_FMT_YUVJ420P = 12, + /// planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting color_range + @AV_PIX_FMT_YUVJ422P = 13, + /// planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting color_range + @AV_PIX_FMT_YUVJ444P = 14, + /// packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1 + @AV_PIX_FMT_UYVY422 = 15, + /// packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3 + @AV_PIX_FMT_UYYVYY411 = 16, + /// packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb) + @AV_PIX_FMT_BGR8 = 17, + /// packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits + @AV_PIX_FMT_BGR4 = 18, + /// packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb) + @AV_PIX_FMT_BGR4_BYTE = 19, + /// packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb) + @AV_PIX_FMT_RGB8 = 20, + /// packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits + @AV_PIX_FMT_RGB4 = 21, + /// packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb) + @AV_PIX_FMT_RGB4_BYTE = 22, + /// planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V) + @AV_PIX_FMT_NV12 = 23, + /// as above, but U and V bytes are swapped + @AV_PIX_FMT_NV21 = 24, + /// packed ARGB 8:8:8:8, 32bpp, ARGBARGB... + @AV_PIX_FMT_ARGB = 25, + /// packed RGBA 8:8:8:8, 32bpp, RGBARGBA... + @AV_PIX_FMT_RGBA = 26, + /// packed ABGR 8:8:8:8, 32bpp, ABGRABGR... + @AV_PIX_FMT_ABGR = 27, + /// packed BGRA 8:8:8:8, 32bpp, BGRABGRA... + @AV_PIX_FMT_BGRA = 28, + /// Y , 16bpp, big-endian + @AV_PIX_FMT_GRAY16BE = 29, + /// Y , 16bpp, little-endian + @AV_PIX_FMT_GRAY16LE = 30, + /// planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples) + @AV_PIX_FMT_YUV440P = 31, + /// planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range + @AV_PIX_FMT_YUVJ440P = 32, + /// planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples) + @AV_PIX_FMT_YUVA420P = 33, + /// packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big-endian + @AV_PIX_FMT_RGB48BE = 34, + /// packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as little-endian + @AV_PIX_FMT_RGB48LE = 35, + /// packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian + @AV_PIX_FMT_RGB565BE = 36, + /// packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian + @AV_PIX_FMT_RGB565LE = 37, + /// packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), big-endian , X=unused/undefined + @AV_PIX_FMT_RGB555BE = 38, + /// packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), little-endian, X=unused/undefined + @AV_PIX_FMT_RGB555LE = 39, + /// packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), big-endian + @AV_PIX_FMT_BGR565BE = 40, + /// packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), little-endian + @AV_PIX_FMT_BGR565LE = 41, + /// packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), big-endian , X=unused/undefined + @AV_PIX_FMT_BGR555BE = 42, + /// packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), little-endian, X=unused/undefined + @AV_PIX_FMT_BGR555LE = 43, + /// Hardware acceleration through VA-API, data[3] contains a VASurfaceID. + @AV_PIX_FMT_VAAPI = 44, + /// planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian + @AV_PIX_FMT_YUV420P16LE = 45, + /// planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian + @AV_PIX_FMT_YUV420P16BE = 46, + /// planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + @AV_PIX_FMT_YUV422P16LE = 47, + /// planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + @AV_PIX_FMT_YUV422P16BE = 48, + /// planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian + @AV_PIX_FMT_YUV444P16LE = 49, + /// planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian + @AV_PIX_FMT_YUV444P16BE = 50, + /// HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer + @AV_PIX_FMT_DXVA2_VLD = 51, + /// packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), little-endian, X=unused/undefined + @AV_PIX_FMT_RGB444LE = 52, + /// packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), big-endian, X=unused/undefined + @AV_PIX_FMT_RGB444BE = 53, + /// packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), little-endian, X=unused/undefined + @AV_PIX_FMT_BGR444LE = 54, + /// packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), big-endian, X=unused/undefined + @AV_PIX_FMT_BGR444BE = 55, + /// 8 bits gray, 8 bits alpha + @AV_PIX_FMT_YA8 = 56, + /// alias for AV_PIX_FMT_YA8 + @AV_PIX_FMT_Y400A = 56, + /// alias for AV_PIX_FMT_YA8 + @AV_PIX_FMT_GRAY8A = 56, + /// packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big-endian + @AV_PIX_FMT_BGR48BE = 57, + /// packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as little-endian + @AV_PIX_FMT_BGR48LE = 58, + /// planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian + @AV_PIX_FMT_YUV420P9BE = 59, + /// planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian + @AV_PIX_FMT_YUV420P9LE = 60, + /// planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian + @AV_PIX_FMT_YUV420P10BE = 61, + /// planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian + @AV_PIX_FMT_YUV420P10LE = 62, + /// planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + @AV_PIX_FMT_YUV422P10BE = 63, + /// planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + @AV_PIX_FMT_YUV422P10LE = 64, + /// planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian + @AV_PIX_FMT_YUV444P9BE = 65, + /// planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian + @AV_PIX_FMT_YUV444P9LE = 66, + /// planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian + @AV_PIX_FMT_YUV444P10BE = 67, + /// planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian + @AV_PIX_FMT_YUV444P10LE = 68, + /// planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + @AV_PIX_FMT_YUV422P9BE = 69, + /// planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + @AV_PIX_FMT_YUV422P9LE = 70, + /// planar GBR 4:4:4 24bpp + @AV_PIX_FMT_GBRP = 71, + @AV_PIX_FMT_GBR24P = 71, + /// planar GBR 4:4:4 27bpp, big-endian + @AV_PIX_FMT_GBRP9BE = 72, + /// planar GBR 4:4:4 27bpp, little-endian + @AV_PIX_FMT_GBRP9LE = 73, + /// planar GBR 4:4:4 30bpp, big-endian + @AV_PIX_FMT_GBRP10BE = 74, + /// planar GBR 4:4:4 30bpp, little-endian + @AV_PIX_FMT_GBRP10LE = 75, + /// planar GBR 4:4:4 48bpp, big-endian + @AV_PIX_FMT_GBRP16BE = 76, + /// planar GBR 4:4:4 48bpp, little-endian + @AV_PIX_FMT_GBRP16LE = 77, + /// planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples) + @AV_PIX_FMT_YUVA422P = 78, + /// planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples) + @AV_PIX_FMT_YUVA444P = 79, + /// planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), big-endian + @AV_PIX_FMT_YUVA420P9BE = 80, + /// planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), little-endian + @AV_PIX_FMT_YUVA420P9LE = 81, + /// planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), big-endian + @AV_PIX_FMT_YUVA422P9BE = 82, + /// planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), little-endian + @AV_PIX_FMT_YUVA422P9LE = 83, + /// planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), big-endian + @AV_PIX_FMT_YUVA444P9BE = 84, + /// planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), little-endian + @AV_PIX_FMT_YUVA444P9LE = 85, + /// planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian) + @AV_PIX_FMT_YUVA420P10BE = 86, + /// planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian) + @AV_PIX_FMT_YUVA420P10LE = 87, + /// planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian) + @AV_PIX_FMT_YUVA422P10BE = 88, + /// planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian) + @AV_PIX_FMT_YUVA422P10LE = 89, + /// planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian) + @AV_PIX_FMT_YUVA444P10BE = 90, + /// planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian) + @AV_PIX_FMT_YUVA444P10LE = 91, + /// planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian) + @AV_PIX_FMT_YUVA420P16BE = 92, + /// planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian) + @AV_PIX_FMT_YUVA420P16LE = 93, + /// planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian) + @AV_PIX_FMT_YUVA422P16BE = 94, + /// planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian) + @AV_PIX_FMT_YUVA422P16LE = 95, + /// planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian) + @AV_PIX_FMT_YUVA444P16BE = 96, + /// planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian) + @AV_PIX_FMT_YUVA444P16LE = 97, + /// HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface + @AV_PIX_FMT_VDPAU = 98, + /// packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as little-endian, the 4 lower bits are set to 0 + @AV_PIX_FMT_XYZ12LE = 99, + /// packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as big-endian, the 4 lower bits are set to 0 + @AV_PIX_FMT_XYZ12BE = 100, + /// interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples) + @AV_PIX_FMT_NV16 = 101, + /// interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + @AV_PIX_FMT_NV20LE = 102, + /// interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + @AV_PIX_FMT_NV20BE = 103, + /// packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian + @AV_PIX_FMT_RGBA64BE = 104, + /// packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian + @AV_PIX_FMT_RGBA64LE = 105, + /// packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian + @AV_PIX_FMT_BGRA64BE = 106, + /// packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian + @AV_PIX_FMT_BGRA64LE = 107, + /// packed YUV 4:2:2, 16bpp, Y0 Cr Y1 Cb + @AV_PIX_FMT_YVYU422 = 108, + /// 16 bits gray, 16 bits alpha (big-endian) + @AV_PIX_FMT_YA16BE = 109, + /// 16 bits gray, 16 bits alpha (little-endian) + @AV_PIX_FMT_YA16LE = 110, + /// planar GBRA 4:4:4:4 32bpp + @AV_PIX_FMT_GBRAP = 111, + /// planar GBRA 4:4:4:4 64bpp, big-endian + @AV_PIX_FMT_GBRAP16BE = 112, + /// planar GBRA 4:4:4:4 64bpp, little-endian + @AV_PIX_FMT_GBRAP16LE = 113, + /// HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure. + @AV_PIX_FMT_QSV = 114, + /// HW acceleration though MMAL, data[3] contains a pointer to the MMAL_BUFFER_HEADER_T structure. + @AV_PIX_FMT_MMAL = 115, + /// HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView pointer + @AV_PIX_FMT_D3D11VA_VLD = 116, + /// HW acceleration through CUDA. data[i] contain CUdeviceptr pointers exactly as for system memory frames. + @AV_PIX_FMT_CUDA = 117, + /// packed RGB 8:8:8, 32bpp, XRGBXRGB... X=unused/undefined + @AV_PIX_FMT_0RGB = 118, + /// packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined + @AV_PIX_FMT_RGB0 = 119, + /// packed BGR 8:8:8, 32bpp, XBGRXBGR... X=unused/undefined + @AV_PIX_FMT_0BGR = 120, + /// packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined + @AV_PIX_FMT_BGR0 = 121, + /// planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian + @AV_PIX_FMT_YUV420P12BE = 122, + /// planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian + @AV_PIX_FMT_YUV420P12LE = 123, + /// planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian + @AV_PIX_FMT_YUV420P14BE = 124, + /// planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian + @AV_PIX_FMT_YUV420P14LE = 125, + /// planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + @AV_PIX_FMT_YUV422P12BE = 126, + /// planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + @AV_PIX_FMT_YUV422P12LE = 127, + /// planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + @AV_PIX_FMT_YUV422P14BE = 128, + /// planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + @AV_PIX_FMT_YUV422P14LE = 129, + /// planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian + @AV_PIX_FMT_YUV444P12BE = 130, + /// planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian + @AV_PIX_FMT_YUV444P12LE = 131, + /// planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian + @AV_PIX_FMT_YUV444P14BE = 132, + /// planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian + @AV_PIX_FMT_YUV444P14LE = 133, + /// planar GBR 4:4:4 36bpp, big-endian + @AV_PIX_FMT_GBRP12BE = 134, + /// planar GBR 4:4:4 36bpp, little-endian + @AV_PIX_FMT_GBRP12LE = 135, + /// planar GBR 4:4:4 42bpp, big-endian + @AV_PIX_FMT_GBRP14BE = 136, + /// planar GBR 4:4:4 42bpp, little-endian + @AV_PIX_FMT_GBRP14LE = 137, + /// planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV411P and setting color_range + @AV_PIX_FMT_YUVJ411P = 138, + /// bayer, BGBG..(odd line), GRGR..(even line), 8-bit samples + @AV_PIX_FMT_BAYER_BGGR8 = 139, + /// bayer, RGRG..(odd line), GBGB..(even line), 8-bit samples + @AV_PIX_FMT_BAYER_RGGB8 = 140, + /// bayer, GBGB..(odd line), RGRG..(even line), 8-bit samples + @AV_PIX_FMT_BAYER_GBRG8 = 141, + /// bayer, GRGR..(odd line), BGBG..(even line), 8-bit samples + @AV_PIX_FMT_BAYER_GRBG8 = 142, + /// bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, little-endian + @AV_PIX_FMT_BAYER_BGGR16LE = 143, + /// bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, big-endian + @AV_PIX_FMT_BAYER_BGGR16BE = 144, + /// bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, little-endian + @AV_PIX_FMT_BAYER_RGGB16LE = 145, + /// bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, big-endian + @AV_PIX_FMT_BAYER_RGGB16BE = 146, + /// bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, little-endian + @AV_PIX_FMT_BAYER_GBRG16LE = 147, + /// bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, big-endian + @AV_PIX_FMT_BAYER_GBRG16BE = 148, + /// bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, little-endian + @AV_PIX_FMT_BAYER_GRBG16LE = 149, + /// bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, big-endian + @AV_PIX_FMT_BAYER_GRBG16BE = 150, + /// XVideo Motion Acceleration via common packet passing + @AV_PIX_FMT_XVMC = 151, + /// planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian + @AV_PIX_FMT_YUV440P10LE = 152, + /// planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian + @AV_PIX_FMT_YUV440P10BE = 153, + /// planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian + @AV_PIX_FMT_YUV440P12LE = 154, + /// planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian + @AV_PIX_FMT_YUV440P12BE = 155, + /// packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), little-endian + @AV_PIX_FMT_AYUV64LE = 156, + /// packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), big-endian + @AV_PIX_FMT_AYUV64BE = 157, + /// hardware decoding through Videotoolbox + @AV_PIX_FMT_VIDEOTOOLBOX = 158, + /// like NV12, with 10bpp per component, data in the high bits, zeros in the low bits, little-endian + @AV_PIX_FMT_P010LE = 159, + /// like NV12, with 10bpp per component, data in the high bits, zeros in the low bits, big-endian + @AV_PIX_FMT_P010BE = 160, + /// planar GBR 4:4:4:4 48bpp, big-endian + @AV_PIX_FMT_GBRAP12BE = 161, + /// planar GBR 4:4:4:4 48bpp, little-endian + @AV_PIX_FMT_GBRAP12LE = 162, + /// planar GBR 4:4:4:4 40bpp, big-endian + @AV_PIX_FMT_GBRAP10BE = 163, + /// planar GBR 4:4:4:4 40bpp, little-endian + @AV_PIX_FMT_GBRAP10LE = 164, + /// hardware decoding through MediaCodec + @AV_PIX_FMT_MEDIACODEC = 165, + /// Y , 12bpp, big-endian + @AV_PIX_FMT_GRAY12BE = 166, + /// Y , 12bpp, little-endian + @AV_PIX_FMT_GRAY12LE = 167, + /// Y , 10bpp, big-endian + @AV_PIX_FMT_GRAY10BE = 168, + /// Y , 10bpp, little-endian + @AV_PIX_FMT_GRAY10LE = 169, + /// like NV12, with 16bpp per component, little-endian + @AV_PIX_FMT_P016LE = 170, + /// like NV12, with 16bpp per component, big-endian + @AV_PIX_FMT_P016BE = 171, + /// Hardware surfaces for Direct3D11. + @AV_PIX_FMT_D3D11 = 172, + /// Y , 9bpp, big-endian + @AV_PIX_FMT_GRAY9BE = 173, + /// Y , 9bpp, little-endian + @AV_PIX_FMT_GRAY9LE = 174, + /// IEEE-754 single precision planar GBR 4:4:4, 96bpp, big-endian + @AV_PIX_FMT_GBRPF32BE = 175, + /// IEEE-754 single precision planar GBR 4:4:4, 96bpp, little-endian + @AV_PIX_FMT_GBRPF32LE = 176, + /// IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, big-endian + @AV_PIX_FMT_GBRAPF32BE = 177, + /// IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, little-endian + @AV_PIX_FMT_GBRAPF32LE = 178, + /// DRM-managed buffers exposed through PRIME buffer sharing. + @AV_PIX_FMT_DRM_PRIME = 179, + /// Hardware surfaces for OpenCL. + @AV_PIX_FMT_OPENCL = 180, + /// Y , 14bpp, big-endian + @AV_PIX_FMT_GRAY14BE = 181, + /// Y , 14bpp, little-endian + @AV_PIX_FMT_GRAY14LE = 182, + /// IEEE-754 single precision Y, 32bpp, big-endian + @AV_PIX_FMT_GRAYF32BE = 183, + /// IEEE-754 single precision Y, 32bpp, little-endian + @AV_PIX_FMT_GRAYF32LE = 184, + /// planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), 12b alpha, big-endian + @AV_PIX_FMT_YUVA422P12BE = 185, + /// planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), 12b alpha, little-endian + @AV_PIX_FMT_YUVA422P12LE = 186, + /// planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), 12b alpha, big-endian + @AV_PIX_FMT_YUVA444P12BE = 187, + /// planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), 12b alpha, little-endian + @AV_PIX_FMT_YUVA444P12LE = 188, + /// planar YUV 4:4:4, 24bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V) + @AV_PIX_FMT_NV24 = 189, + /// as above, but U and V bytes are swapped + @AV_PIX_FMT_NV42 = 190, + /// Vulkan hardware images. + @AV_PIX_FMT_VULKAN = 191, + /// packed YUV 4:2:2 like YUYV422, 20bpp, data in the high bits, big-endian + @AV_PIX_FMT_Y210BE = 192, + /// packed YUV 4:2:2 like YUYV422, 20bpp, data in the high bits, little-endian + @AV_PIX_FMT_Y210LE = 193, + /// packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), little-endian, X=unused/undefined + @AV_PIX_FMT_X2RGB10LE = 194, + /// packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), big-endian, X=unused/undefined + @AV_PIX_FMT_X2RGB10BE = 195, + /// packed BGR 10:10:10, 30bpp, (msb)2X 10B 10G 10R(lsb), little-endian, X=unused/undefined + @AV_PIX_FMT_X2BGR10LE = 196, + /// packed BGR 10:10:10, 30bpp, (msb)2X 10B 10G 10R(lsb), big-endian, X=unused/undefined + @AV_PIX_FMT_X2BGR10BE = 197, + /// interleaved chroma YUV 4:2:2, 20bpp, data in the high bits, big-endian + @AV_PIX_FMT_P210BE = 198, + /// interleaved chroma YUV 4:2:2, 20bpp, data in the high bits, little-endian + @AV_PIX_FMT_P210LE = 199, + /// interleaved chroma YUV 4:4:4, 30bpp, data in the high bits, big-endian + @AV_PIX_FMT_P410BE = 200, + /// interleaved chroma YUV 4:4:4, 30bpp, data in the high bits, little-endian + @AV_PIX_FMT_P410LE = 201, + /// interleaved chroma YUV 4:2:2, 32bpp, big-endian + @AV_PIX_FMT_P216BE = 202, + /// interleaved chroma YUV 4:2:2, 32bpp, little-endian + @AV_PIX_FMT_P216LE = 203, + /// interleaved chroma YUV 4:4:4, 48bpp, big-endian + @AV_PIX_FMT_P416BE = 204, + /// interleaved chroma YUV 4:4:4, 48bpp, little-endian + @AV_PIX_FMT_P416LE = 205, + /// number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of formats might differ between versions + @AV_PIX_FMT_NB = 206, +} + +/// Rounding methods. +public enum AVRounding : int +{ + /// Round toward zero. + @AV_ROUND_ZERO = 0, + /// Round away from zero. + @AV_ROUND_INF = 1, + /// Round toward -infinity. + @AV_ROUND_DOWN = 2, + /// Round toward +infinity. + @AV_ROUND_UP = 3, + /// Round to nearest and halfway cases away from zero. + @AV_ROUND_NEAR_INF = 5, + /// Flag telling rescaling functions to pass `INT64_MIN`/`MAX` through unchanged, avoiding special cases for #AV_NOPTS_VALUE. + @AV_ROUND_PASS_MINMAX = 8192, +} + +/// Audio sample formats +public enum AVSampleFormat : int +{ + @AV_SAMPLE_FMT_NONE = -1, + /// unsigned 8 bits + @AV_SAMPLE_FMT_U8 = 0, + /// signed 16 bits + @AV_SAMPLE_FMT_S16 = 1, + /// signed 32 bits + @AV_SAMPLE_FMT_S32 = 2, + /// float + @AV_SAMPLE_FMT_FLT = 3, + /// double + @AV_SAMPLE_FMT_DBL = 4, + /// unsigned 8 bits, planar + @AV_SAMPLE_FMT_U8P = 5, + /// signed 16 bits, planar + @AV_SAMPLE_FMT_S16P = 6, + /// signed 32 bits, planar + @AV_SAMPLE_FMT_S32P = 7, + /// float, planar + @AV_SAMPLE_FMT_FLTP = 8, + /// double, planar + @AV_SAMPLE_FMT_DBLP = 9, + /// signed 64 bits + @AV_SAMPLE_FMT_S64 = 10, + /// signed 64 bits, planar + @AV_SAMPLE_FMT_S64P = 11, + /// Number of sample formats. DO NOT USE if linking dynamically + @AV_SAMPLE_FMT_NB = 12, +} + +public enum AVSideDataParamChangeFlags : int +{ + @AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_COUNT = 1, + @AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_LAYOUT = 2, + @AV_SIDE_DATA_PARAM_CHANGE_SAMPLE_RATE = 4, + @AV_SIDE_DATA_PARAM_CHANGE_DIMENSIONS = 8, +} + +/// @} +public enum AVStreamParseType : int +{ + @AVSTREAM_PARSE_NONE = 0, + /// full parsing and repack + @AVSTREAM_PARSE_FULL = 1, + /// Only parse headers, do not repack. + @AVSTREAM_PARSE_HEADERS = 2, + /// full parsing and interpolation of timestamps for frames not starting on a packet boundary + @AVSTREAM_PARSE_TIMESTAMPS = 3, + /// full parsing and repack of the first frame only, only implemented for H.264 currently + @AVSTREAM_PARSE_FULL_ONCE = 4, + /// full parsing and repack with timestamp and position generation by parser for raw this assumes that each packet in the file contains no demuxer level headers and just codec level data, otherwise position generation would fail + @AVSTREAM_PARSE_FULL_RAW = 5, +} + +/// @} +public enum AVSubtitleType : int +{ + @SUBTITLE_NONE = 0, + /// A bitmap, pict will be set + @SUBTITLE_BITMAP = 1, + /// Plain text, the text field must be set by the decoder and is authoritative. ass and pict fields may contain approximations. + @SUBTITLE_TEXT = 2, + /// Formatted text, the ass field must be set by the decoder and is authoritative. pict and text fields may contain approximations. + @SUBTITLE_ASS = 3, +} + +public enum AVTimebaseSource : int +{ + @AVFMT_TBCF_AUTO = -1, + @AVFMT_TBCF_DECODER = 0, + @AVFMT_TBCF_DEMUXER = 1, + @AVFMT_TBCF_R_FRAMERATE = 2, +} + +public enum AVTimecodeFlag : int +{ + /// timecode is drop frame + @AV_TIMECODE_FLAG_DROPFRAME = 1, + /// timecode wraps after 24 hours + @AV_TIMECODE_FLAG_24HOURSMAX = 2, + /// negative time values are allowed + @AV_TIMECODE_FLAG_ALLOWNEGATIVE = 4, +} + +/// Dithering algorithms +public enum SwrDitherType : int +{ + @SWR_DITHER_NONE = 0, + @SWR_DITHER_RECTANGULAR = 1, + @SWR_DITHER_TRIANGULAR = 2, + @SWR_DITHER_TRIANGULAR_HIGHPASS = 3, + /// not part of API/ABI + @SWR_DITHER_NS = 64, + @SWR_DITHER_NS_LIPSHITZ = 65, + @SWR_DITHER_NS_F_WEIGHTED = 66, + @SWR_DITHER_NS_MODIFIED_E_WEIGHTED = 67, + @SWR_DITHER_NS_IMPROVED_E_WEIGHTED = 68, + @SWR_DITHER_NS_SHIBATA = 69, + @SWR_DITHER_NS_LOW_SHIBATA = 70, + @SWR_DITHER_NS_HIGH_SHIBATA = 71, + /// not part of API/ABI + @SWR_DITHER_NB = 72, +} + +/// Resampling Engines +public enum SwrEngine : int +{ + /// SW Resampler + @SWR_ENGINE_SWR = 0, + /// SoX Resampler + @SWR_ENGINE_SOXR = 1, + /// not part of API/ABI + @SWR_ENGINE_NB = 2, +} + +/// Resampling Filter Types +public enum SwrFilterType : int +{ + /// Cubic + @SWR_FILTER_TYPE_CUBIC = 0, + /// Blackman Nuttall windowed sinc + @SWR_FILTER_TYPE_BLACKMAN_NUTTALL = 1, + /// Kaiser windowed sinc + @SWR_FILTER_TYPE_KAISER = 2, +} + diff --git a/FFmpeg.AutoGen.Abstractions/generated/Structs.g.cs b/FFmpeg.AutoGen.Abstractions/generated/Structs.g.cs new file mode 100644 index 00000000..cf87a9e1 --- /dev/null +++ b/FFmpeg.AutoGen.Abstractions/generated/Structs.g.cs @@ -0,0 +1,2587 @@ +using System; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen.Abstractions; + +public unsafe partial struct _GUID +{ + public ulong @Data1; + public ushort @Data2; + public ushort @Data3; + public byte8 @Data4; +} + +public unsafe partial struct _iobuf +{ + public void* @_Placeholder; +} + +/// Deprecated and unused struct to use for initializing an abuffersink context. +public unsafe partial struct AVABufferSinkParams +{ + /// list of allowed sample formats, terminated by AV_SAMPLE_FMT_NONE + public AVSampleFormat* @sample_fmts; + /// list of allowed channel layouts, terminated by -1 + public long* @channel_layouts; + /// list of allowed channel counts, terminated by -1 + public int* @channel_counts; + /// if not 0, accept any channel count or layout + public int @all_channel_counts; + /// list of allowed sample rates, terminated by -1 + public int* @sample_rates; +} + +public unsafe partial struct AVBitStreamFilter +{ + public byte* @name; + /// A list of codec ids supported by the filter, terminated by AV_CODEC_ID_NONE. May be NULL, in that case the bitstream filter works with any codec id. + public AVCodecID* @codec_ids; + /// A class for the private data, used to declare bitstream filter private AVOptions. This field is NULL for bitstream filters that do not declare any options. + public AVClass* @priv_class; +} + +/// The bitstream filter state. +public unsafe partial struct AVBSFContext +{ + /// A class for logging and AVOptions + public AVClass* @av_class; + /// The bitstream filter this context is an instance of. + public AVBitStreamFilter* @filter; + /// Opaque filter-specific private data. If filter->priv_class is non-NULL, this is an AVOptions-enabled struct. + public void* @priv_data; + /// Parameters of the input stream. This field is allocated in av_bsf_alloc(), it needs to be filled by the caller before av_bsf_init(). + public AVCodecParameters* @par_in; + /// Parameters of the output stream. This field is allocated in av_bsf_alloc(), it is set by the filter in av_bsf_init(). + public AVCodecParameters* @par_out; + /// The timebase used for the timestamps of the input packets. Set by the caller before av_bsf_init(). + public AVRational @time_base_in; + /// The timebase used for the timestamps of the output packets. Set by the filter in av_bsf_init(). + public AVRational @time_base_out; +} + +/// A reference to a data buffer. +public unsafe partial struct AVBufferRef +{ + public AVBuffer* @buffer; + /// The data buffer. It is considered writable if and only if this is the only reference to the buffer, in which case av_buffer_is_writable() returns 1. + public byte* @data; + /// Size of data in bytes. + public ulong @size; +} + +/// Deprecated and unused struct to use for initializing a buffersink context. +public unsafe partial struct AVBufferSinkParams +{ + /// list of allowed pixel formats, terminated by AV_PIX_FMT_NONE + public AVPixelFormat* @pixel_fmts; +} + +/// This structure contains the parameters describing the frames that will be passed to this filter. +public unsafe partial struct AVBufferSrcParameters +{ + /// video: the pixel format, value corresponds to enum AVPixelFormat audio: the sample format, value corresponds to enum AVSampleFormat + public int @format; + /// The timebase to be used for the timestamps on the input frames. + public AVRational @time_base; + /// Video only, the display dimensions of the input frames. + public int @width; + /// Video only, the display dimensions of the input frames. + public int @height; + /// Video only, the sample (pixel) aspect ratio. + public AVRational @sample_aspect_ratio; + /// Video only, the frame rate of the input video. This field must only be set to a non-zero value if input stream has a known constant framerate and should be left at its initial value if the framerate is variable or unknown. + public AVRational @frame_rate; + /// Video with a hwaccel pixel format only. This should be a reference to an AVHWFramesContext instance describing the input frames. + public AVBufferRef* @hw_frames_ctx; + /// Audio only, the audio sampling rate in samples per second. + public int @sample_rate; + /// Audio only, the audio channel layout + [Obsolete("use ch_layout")] + public ulong @channel_layout; + /// Audio only, the audio channel layout + public AVChannelLayout @ch_layout; +} + +/// An AVChannelCustom defines a single channel within a custom order layout +public unsafe partial struct AVChannelCustom +{ + public AVChannel @id; + public byte16 @name; + public void* @opaque; +} + +/// An AVChannelLayout holds information about the channel layout of audio data. +public unsafe partial struct AVChannelLayout +{ + /// Channel order used in this layout. This is a mandatory field. + public AVChannelOrder @order; + /// Number of channels in this layout. Mandatory field. + public int @nb_channels; + public AVChannelLayout_u @u; + /// For some private data of the user. + public void* @opaque; +} + +/// Details about which channels are present in this layout. For AV_CHANNEL_ORDER_UNSPEC, this field is undefined and must not be used. +[StructLayout(LayoutKind.Explicit)] +public unsafe partial struct AVChannelLayout_u +{ + /// This member must be used for AV_CHANNEL_ORDER_NATIVE, and may be used for AV_CHANNEL_ORDER_AMBISONIC to signal non-diegetic channels. It is a bitmask, where the position of each set bit means that the AVChannel with the corresponding value is present. + [FieldOffset(0)] + public ulong @mask; + /// This member must be used when the channel order is AV_CHANNEL_ORDER_CUSTOM. It is a nb_channels-sized array, with each element signalling the presence of the AVChannel with the corresponding value in map[i].id. + [FieldOffset(0)] + public AVChannelCustom* @map; +} + +public unsafe partial struct AVChapter +{ + /// unique ID to identify the chapter + public long @id; + /// time base in which the start/end timestamps are specified + public AVRational @time_base; + /// chapter start/end time in time_base units + public long @start; + /// chapter start/end time in time_base units + public long @end; + public AVDictionary* @metadata; +} + +/// Describe the class of an AVClass context structure. That is an arbitrary struct of which the first field is a pointer to an AVClass struct (e.g. AVCodecContext, AVFormatContext etc.). +public unsafe partial struct AVClass +{ + /// The name of the class; usually it is the same name as the context structure type to which the AVClass is associated. + public byte* @class_name; + /// A pointer to a function which returns the name of a context instance ctx associated with the class. + public AVClass_item_name_func @item_name; + /// a pointer to the first option specified in the class if any or NULL + public AVOption* @option; + /// LIBAVUTIL_VERSION with which this structure was created. This is used to allow fields to be added without requiring major version bumps everywhere. + public int @version; + /// Offset in the structure where log_level_offset is stored. 0 means there is no such variable + public int @log_level_offset_offset; + /// Offset in the structure where a pointer to the parent context for logging is stored. For example a decoder could pass its AVCodecContext to eval as such a parent context, which an av_log() implementation could then leverage to display the parent context. The offset can be NULL. + public int @parent_log_context_offset; + /// Category used for visualization (like color) This is only set if the category is equal for all objects using this class. available since version (51 << 16 | 56 << 8 | 100) + public AVClassCategory @category; + /// Callback to return the category. available since version (51 << 16 | 59 << 8 | 100) + public AVClass_get_category_func @get_category; + /// Callback to return the supported/allowed ranges. available since version (52.12) + public AVClass_query_ranges_func @query_ranges; + /// Return next AVOptions-enabled child or NULL + public AVClass_child_next_func @child_next; + /// Iterate over the AVClasses corresponding to potential AVOptions-enabled children. + public AVClass_child_class_iterate_func @child_class_iterate; +} + +/// AVCodec. +public unsafe partial struct AVCodec +{ + /// Name of the codec implementation. The name is globally unique among encoders and among decoders (but an encoder and a decoder can share the same name). This is the primary way to find a codec from the user perspective. + public byte* @name; + /// Descriptive name for the codec, meant to be more human readable than name. You should use the NULL_IF_CONFIG_SMALL() macro to define it. + public byte* @long_name; + public AVMediaType @type; + public AVCodecID @id; + /// Codec capabilities. see AV_CODEC_CAP_* + public int @capabilities; + /// maximum value for lowres supported by the decoder + public byte @max_lowres; + /// array of supported framerates, or NULL if any, array is terminated by {0,0} + public AVRational* @supported_framerates; + /// array of supported pixel formats, or NULL if unknown, array is terminated by -1 + public AVPixelFormat* @pix_fmts; + /// array of supported audio samplerates, or NULL if unknown, array is terminated by 0 + public int* @supported_samplerates; + /// array of supported sample formats, or NULL if unknown, array is terminated by -1 + public AVSampleFormat* @sample_fmts; + /// array of support channel layouts, or NULL if unknown. array is terminated by 0 + public ulong* @channel_layouts; + /// AVClass for the private context + public AVClass* @priv_class; + /// array of recognized profiles, or NULL if unknown, array is terminated by {FF_PROFILE_UNKNOWN} + public AVProfile* @profiles; + /// Group name of the codec implementation. This is a short symbolic name of the wrapper backing this codec. A wrapper uses some kind of external implementation for the codec, such as an external library, or a codec implementation provided by the OS or the hardware. If this field is NULL, this is a builtin, libavcodec native codec. If non-NULL, this will be the suffix in AVCodec.name in most cases (usually AVCodec.name will be of the form "<codec_name>_<wrapper_name>"). + public byte* @wrapper_name; + /// Array of supported channel layouts, terminated with a zeroed layout. + public AVChannelLayout* @ch_layouts; +} + +/// main external API structure. New fields can be added to the end with minor version bumps. Removal, reordering and changes to existing fields require a major version bump. You can use AVOptions (av_opt* / av_set/get*()) to access these fields from user applications. The name string for AVOptions options matches the associated command line parameter name and can be found in libavcodec/options_table.h The AVOption/command line parameter names differ in some cases from the C structure field names for historic reasons or brevity. sizeof(AVCodecContext) must not be used outside libav*. +public unsafe partial struct AVCodecContext +{ + /// information on struct for av_log - set by avcodec_alloc_context3 + public AVClass* @av_class; + public int @log_level_offset; + public AVMediaType @codec_type; + public AVCodec* @codec; + public AVCodecID @codec_id; + /// fourcc (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + 'A'). This is used to work around some encoder bugs. A demuxer should set this to what is stored in the field used to identify the codec. If there are multiple such fields in a container then the demuxer should choose the one which maximizes the information about the used codec. If the codec tag field in a container is larger than 32 bits then the demuxer should remap the longer ID to 32 bits with a table or other structure. Alternatively a new extra_codec_tag + size could be added but for this a clear advantage must be demonstrated first. - encoding: Set by user, if not then the default based on codec_id will be used. - decoding: Set by user, will be converted to uppercase by libavcodec during init. + public uint @codec_tag; + public void* @priv_data; + /// Private context used for internal data. + public AVCodecInternal* @internal; + /// Private data of the user, can be used to carry app specific stuff. - encoding: Set by user. - decoding: Set by user. + public void* @opaque; + /// the average bitrate - encoding: Set by user; unused for constant quantizer encoding. - decoding: Set by user, may be overwritten by libavcodec if this info is available in the stream + public long @bit_rate; + /// number of bits the bitstream is allowed to diverge from the reference. the reference can be CBR (for CBR pass1) or VBR (for pass2) - encoding: Set by user; unused for constant quantizer encoding. - decoding: unused + public int @bit_rate_tolerance; + /// Global quality for codecs which cannot change it per frame. This should be proportional to MPEG-1/2/4 qscale. - encoding: Set by user. - decoding: unused + public int @global_quality; + /// - encoding: Set by user. - decoding: unused + public int @compression_level; + /// AV_CODEC_FLAG_*. - encoding: Set by user. - decoding: Set by user. + public int @flags; + /// AV_CODEC_FLAG2_* - encoding: Set by user. - decoding: Set by user. + public int @flags2; + /// some codecs need / can use extradata like Huffman tables. MJPEG: Huffman tables rv10: additional flags MPEG-4: global headers (they can be in the bitstream or here) The allocated memory should be AV_INPUT_BUFFER_PADDING_SIZE bytes larger than extradata_size to avoid problems if it is read with the bitstream reader. The bytewise contents of extradata must not depend on the architecture or CPU endianness. Must be allocated with the av_malloc() family of functions. - encoding: Set/allocated/freed by libavcodec. - decoding: Set/allocated/freed by user. + public byte* @extradata; + public int @extradata_size; + /// This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented. For fixed-fps content, timebase should be 1/framerate and timestamp increments should be identically 1. This often, but not always is the inverse of the frame rate or field rate for video. 1/time_base is not the average frame rate if the frame rate is not constant. + public AVRational @time_base; + /// For some codecs, the time base is closer to the field rate than the frame rate. Most notably, H.264 and MPEG-2 specify time_base as half of frame duration if no telecine is used ... + public int @ticks_per_frame; + /// Codec delay. + public int @delay; + /// picture width / height. + public int @width; + /// picture width / height. + public int @height; + /// Bitstream width / height, may be different from width/height e.g. when the decoded frame is cropped before being output or lowres is enabled. + public int @coded_width; + /// Bitstream width / height, may be different from width/height e.g. when the decoded frame is cropped before being output or lowres is enabled. + public int @coded_height; + /// the number of pictures in a group of pictures, or 0 for intra_only - encoding: Set by user. - decoding: unused + public int @gop_size; + /// Pixel format, see AV_PIX_FMT_xxx. May be set by the demuxer if known from headers. May be overridden by the decoder if it knows better. + public AVPixelFormat @pix_fmt; + /// If non NULL, 'draw_horiz_band' is called by the libavcodec decoder to draw a horizontal band. It improves cache usage. Not all codecs can do that. You must check the codec capabilities beforehand. When multithreading is used, it may be called from multiple threads at the same time; threads might draw different parts of the same AVFrame, or multiple AVFrames, and there is no guarantee that slices will be drawn in order. The function is also used by hardware acceleration APIs. It is called at least once during frame decoding to pass the data needed for hardware render. In that mode instead of pixel data, AVFrame points to a structure specific to the acceleration API. The application reads the structure and can change some fields to indicate progress or mark state. - encoding: unused - decoding: Set by user. + public AVCodecContext_draw_horiz_band_func @draw_horiz_band; + /// Callback to negotiate the pixel format. Decoding only, may be set by the caller before avcodec_open2(). + public AVCodecContext_get_format_func @get_format; + /// maximum number of B-frames between non-B-frames Note: The output will be delayed by max_b_frames+1 relative to the input. - encoding: Set by user. - decoding: unused + public int @max_b_frames; + /// qscale factor between IP and B-frames If > 0 then the last P-frame quantizer will be used (q= lastp_q*factor+offset). If < 0 then normal ratecontrol will be done (q= -normal_q*factor+offset). - encoding: Set by user. - decoding: unused + public float @b_quant_factor; + /// qscale offset between IP and B-frames - encoding: Set by user. - decoding: unused + public float @b_quant_offset; + /// Size of the frame reordering buffer in the decoder. For MPEG-2 it is 1 IPB or 0 low delay IP. - encoding: Set by libavcodec. - decoding: Set by libavcodec. + public int @has_b_frames; + /// qscale factor between P- and I-frames If > 0 then the last P-frame quantizer will be used (q = lastp_q * factor + offset). If < 0 then normal ratecontrol will be done (q= -normal_q*factor+offset). - encoding: Set by user. - decoding: unused + public float @i_quant_factor; + /// qscale offset between P and I-frames - encoding: Set by user. - decoding: unused + public float @i_quant_offset; + /// luminance masking (0-> disabled) - encoding: Set by user. - decoding: unused + public float @lumi_masking; + /// temporary complexity masking (0-> disabled) - encoding: Set by user. - decoding: unused + public float @temporal_cplx_masking; + /// spatial complexity masking (0-> disabled) - encoding: Set by user. - decoding: unused + public float @spatial_cplx_masking; + /// p block masking (0-> disabled) - encoding: Set by user. - decoding: unused + public float @p_masking; + /// darkness masking (0-> disabled) - encoding: Set by user. - decoding: unused + public float @dark_masking; + /// slice count - encoding: Set by libavcodec. - decoding: Set by user (or 0). + public int @slice_count; + /// slice offsets in the frame in bytes - encoding: Set/allocated by libavcodec. - decoding: Set/allocated by user (or NULL). + public int* @slice_offset; + /// sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel. Numerator and denominator must be relatively prime and smaller than 256 for some video standards. - encoding: Set by user. - decoding: Set by libavcodec. + public AVRational @sample_aspect_ratio; + /// motion estimation comparison function - encoding: Set by user. - decoding: unused + public int @me_cmp; + /// subpixel motion estimation comparison function - encoding: Set by user. - decoding: unused + public int @me_sub_cmp; + /// macroblock comparison function (not supported yet) - encoding: Set by user. - decoding: unused + public int @mb_cmp; + /// interlaced DCT comparison function - encoding: Set by user. - decoding: unused + public int @ildct_cmp; + /// ME diamond size & shape - encoding: Set by user. - decoding: unused + public int @dia_size; + /// amount of previous MV predictors (2a+1 x 2a+1 square) - encoding: Set by user. - decoding: unused + public int @last_predictor_count; + /// motion estimation prepass comparison function - encoding: Set by user. - decoding: unused + public int @me_pre_cmp; + /// ME prepass diamond size & shape - encoding: Set by user. - decoding: unused + public int @pre_dia_size; + /// subpel ME quality - encoding: Set by user. - decoding: unused + public int @me_subpel_quality; + /// maximum motion estimation search range in subpel units If 0 then no limit. + public int @me_range; + /// slice flags - encoding: unused - decoding: Set by user. + public int @slice_flags; + /// macroblock decision mode - encoding: Set by user. - decoding: unused + public int @mb_decision; + /// custom intra quantization matrix Must be allocated with the av_malloc() family of functions, and will be freed in avcodec_free_context(). - encoding: Set/allocated by user, freed by libavcodec. Can be NULL. - decoding: Set/allocated/freed by libavcodec. + public ushort* @intra_matrix; + /// custom inter quantization matrix Must be allocated with the av_malloc() family of functions, and will be freed in avcodec_free_context(). - encoding: Set/allocated by user, freed by libavcodec. Can be NULL. - decoding: Set/allocated/freed by libavcodec. + public ushort* @inter_matrix; + /// precision of the intra DC coefficient - 8 - encoding: Set by user. - decoding: Set by libavcodec + public int @intra_dc_precision; + /// Number of macroblock rows at the top which are skipped. - encoding: unused - decoding: Set by user. + public int @skip_top; + /// Number of macroblock rows at the bottom which are skipped. - encoding: unused - decoding: Set by user. + public int @skip_bottom; + /// minimum MB Lagrange multiplier - encoding: Set by user. - decoding: unused + public int @mb_lmin; + /// maximum MB Lagrange multiplier - encoding: Set by user. - decoding: unused + public int @mb_lmax; + /// - encoding: Set by user. - decoding: unused + public int @bidir_refine; + /// minimum GOP size - encoding: Set by user. - decoding: unused + public int @keyint_min; + /// number of reference frames - encoding: Set by user. - decoding: Set by lavc. + public int @refs; + /// Note: Value depends upon the compare function used for fullpel ME. - encoding: Set by user. - decoding: unused + public int @mv0_threshold; + /// Chromaticity coordinates of the source primaries. - encoding: Set by user - decoding: Set by libavcodec + public AVColorPrimaries @color_primaries; + /// Color Transfer Characteristic. - encoding: Set by user - decoding: Set by libavcodec + public AVColorTransferCharacteristic @color_trc; + /// YUV colorspace type. - encoding: Set by user - decoding: Set by libavcodec + public AVColorSpace @colorspace; + /// MPEG vs JPEG YUV range. - encoding: Set by user - decoding: Set by libavcodec + public AVColorRange @color_range; + /// This defines the location of chroma samples. - encoding: Set by user - decoding: Set by libavcodec + public AVChromaLocation @chroma_sample_location; + /// Number of slices. Indicates number of picture subdivisions. Used for parallelized decoding. - encoding: Set by user - decoding: unused + public int @slices; + /// Field order - encoding: set by libavcodec - decoding: Set by user. + public AVFieldOrder @field_order; + /// samples per second + public int @sample_rate; + /// number of audio channels + [Obsolete("use ch_layout.nb_channels")] + public int @channels; + /// sample format + public AVSampleFormat @sample_fmt; + /// Number of samples per channel in an audio frame. + public int @frame_size; + /// Frame counter, set by libavcodec. + public int @frame_number; + /// number of bytes per packet if constant and known or 0 Used by some WAV based audio codecs. + public int @block_align; + /// Audio cutoff bandwidth (0 means "automatic") - encoding: Set by user. - decoding: unused + public int @cutoff; + /// Audio channel layout. - encoding: set by user. - decoding: set by user, may be overwritten by libavcodec. + [Obsolete("use ch_layout")] + public ulong @channel_layout; + /// Request decoder to use this channel layout if it can (0 for default) - encoding: unused - decoding: Set by user. + [Obsolete("use \"downmix\" codec private option")] + public ulong @request_channel_layout; + /// Type of service that the audio stream conveys. - encoding: Set by user. - decoding: Set by libavcodec. + public AVAudioServiceType @audio_service_type; + /// desired sample format - encoding: Not used. - decoding: Set by user. Decoder will decode to this format if it can. + public AVSampleFormat @request_sample_fmt; + /// This callback is called at the beginning of each frame to get data buffer(s) for it. There may be one contiguous buffer for all the data or there may be a buffer per each data plane or anything in between. What this means is, you may set however many entries in buf[] you feel necessary. Each buffer must be reference-counted using the AVBuffer API (see description of buf[] below). + public AVCodecContext_get_buffer2_func @get_buffer2; + /// amount of qscale change between easy & hard scenes (0.0-1.0) + public float @qcompress; + /// amount of qscale smoothing over time (0.0-1.0) + public float @qblur; + /// minimum quantizer - encoding: Set by user. - decoding: unused + public int @qmin; + /// maximum quantizer - encoding: Set by user. - decoding: unused + public int @qmax; + /// maximum quantizer difference between frames - encoding: Set by user. - decoding: unused + public int @max_qdiff; + /// decoder bitstream buffer size - encoding: Set by user. - decoding: unused + public int @rc_buffer_size; + /// ratecontrol override, see RcOverride - encoding: Allocated/set/freed by user. - decoding: unused + public int @rc_override_count; + public RcOverride* @rc_override; + /// maximum bitrate - encoding: Set by user. - decoding: Set by user, may be overwritten by libavcodec. + public long @rc_max_rate; + /// minimum bitrate - encoding: Set by user. - decoding: unused + public long @rc_min_rate; + /// Ratecontrol attempt to use, at maximum, <value> of what can be used without an underflow. - encoding: Set by user. - decoding: unused. + public float @rc_max_available_vbv_use; + /// Ratecontrol attempt to use, at least, <value> times the amount needed to prevent a vbv overflow. - encoding: Set by user. - decoding: unused. + public float @rc_min_vbv_overflow_use; + /// Number of bits which should be loaded into the rc buffer before decoding starts. - encoding: Set by user. - decoding: unused + public int @rc_initial_buffer_occupancy; + /// trellis RD quantization - encoding: Set by user. - decoding: unused + public int @trellis; + /// pass1 encoding statistics output buffer - encoding: Set by libavcodec. - decoding: unused + public byte* @stats_out; + /// pass2 encoding statistics input buffer Concatenated stuff from stats_out of pass1 should be placed here. - encoding: Allocated/set/freed by user. - decoding: unused + public byte* @stats_in; + /// Work around bugs in encoders which sometimes cannot be detected automatically. - encoding: Set by user - decoding: Set by user + public int @workaround_bugs; + /// strictly follow the standard (MPEG-4, ...). - encoding: Set by user. - decoding: Set by user. Setting this to STRICT or higher means the encoder and decoder will generally do stupid things, whereas setting it to unofficial or lower will mean the encoder might produce output that is not supported by all spec-compliant decoders. Decoders don't differentiate between normal, unofficial and experimental (that is, they always try to decode things when they can) unless they are explicitly asked to behave stupidly (=strictly conform to the specs) + public int @strict_std_compliance; + /// error concealment flags - encoding: unused - decoding: Set by user. + public int @error_concealment; + /// debug - encoding: Set by user. - decoding: Set by user. + public int @debug; + /// Error recognition; may misdetect some more or less valid parts as errors. - encoding: Set by user. - decoding: Set by user. + public int @err_recognition; + /// opaque 64-bit number (generally a PTS) that will be reordered and output in AVFrame.reordered_opaque - encoding: Set by libavcodec to the reordered_opaque of the input frame corresponding to the last returned packet. Only supported by encoders with the AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE capability. - decoding: Set by user. + public long @reordered_opaque; + /// Hardware accelerator in use - encoding: unused. - decoding: Set by libavcodec + public AVHWAccel* @hwaccel; + /// Hardware accelerator context. For some hardware accelerators, a global context needs to be provided by the user. In that case, this holds display-dependent data FFmpeg cannot instantiate itself. Please refer to the FFmpeg HW accelerator documentation to know how to fill this. - encoding: unused - decoding: Set by user + public void* @hwaccel_context; + /// error - encoding: Set by libavcodec if flags & AV_CODEC_FLAG_PSNR. - decoding: unused + public ulong8 @error; + /// DCT algorithm, see FF_DCT_* below - encoding: Set by user. - decoding: unused + public int @dct_algo; + /// IDCT algorithm, see FF_IDCT_* below. - encoding: Set by user. - decoding: Set by user. + public int @idct_algo; + /// bits per sample/pixel from the demuxer (needed for huffyuv). - encoding: Set by libavcodec. - decoding: Set by user. + public int @bits_per_coded_sample; + /// Bits per sample/pixel of internal libavcodec pixel/sample format. - encoding: set by user. - decoding: set by libavcodec. + public int @bits_per_raw_sample; + /// low resolution decoding, 1-> 1/2 size, 2->1/4 size - encoding: unused - decoding: Set by user. + public int @lowres; + /// thread count is used to decide how many independent tasks should be passed to execute() - encoding: Set by user. - decoding: Set by user. + public int @thread_count; + /// Which multithreading methods to use. Use of FF_THREAD_FRAME will increase decoding delay by one frame per thread, so clients which cannot provide future frames should not use it. + public int @thread_type; + /// Which multithreading methods are in use by the codec. - encoding: Set by libavcodec. - decoding: Set by libavcodec. + public int @active_thread_type; + /// Set by the client if its custom get_buffer() callback can be called synchronously from another thread, which allows faster multithreaded decoding. draw_horiz_band() will be called from other threads regardless of this setting. Ignored if the default get_buffer() is used. - encoding: Set by user. - decoding: Set by user. + [Obsolete("the custom get_buffer2() callback should always be thread-safe. Thread-unsafe get_buffer2() implementations will be invalid starting with LIBAVCODEC_VERSION_MAJOR=60; in other words, libavcodec will behave as if this field was always set to 1. Callers that want to be forward compatible with future libavcodec versions should wrap access to this field in #if LIBAVCODEC_VERSION_MAJOR < 60")] + public int @thread_safe_callbacks; + /// The codec may call this to execute several independent things. It will return only after finishing all tasks. The user may replace this with some multithreaded implementation, the default implementation will execute the parts serially. + public AVCodecContext_execute_func @execute; + /// The codec may call this to execute several independent things. It will return only after finishing all tasks. The user may replace this with some multithreaded implementation, the default implementation will execute the parts serially. + public AVCodecContext_execute2_func @execute2; + /// noise vs. sse weight for the nsse comparison function - encoding: Set by user. - decoding: unused + public int @nsse_weight; + /// profile - encoding: Set by user. - decoding: Set by libavcodec. + public int @profile; + /// level - encoding: Set by user. - decoding: Set by libavcodec. + public int @level; + /// Skip loop filtering for selected frames. - encoding: unused - decoding: Set by user. + public AVDiscard @skip_loop_filter; + /// Skip IDCT/dequantization for selected frames. - encoding: unused - decoding: Set by user. + public AVDiscard @skip_idct; + /// Skip decoding for selected frames. - encoding: unused - decoding: Set by user. + public AVDiscard @skip_frame; + /// Header containing style information for text subtitles. For SUBTITLE_ASS subtitle type, it should contain the whole ASS [Script Info] and [V4+ Styles] section, plus the [Events] line and the Format line following. It shouldn't include any Dialogue line. - encoding: Set/allocated/freed by user (before avcodec_open2()) - decoding: Set/allocated/freed by libavcodec (by avcodec_open2()) + public byte* @subtitle_header; + public int @subtitle_header_size; + /// Audio only. The number of "priming" samples (padding) inserted by the encoder at the beginning of the audio. I.e. this number of leading decoded samples must be discarded by the caller to get the original audio without leading padding. + public int @initial_padding; + /// - decoding: For codecs that store a framerate value in the compressed bitstream, the decoder may export it here. { 0, 1} when unknown. - encoding: May be used to signal the framerate of CFR content to an encoder. + public AVRational @framerate; + /// Nominal unaccelerated pixel format, see AV_PIX_FMT_xxx. - encoding: unused. - decoding: Set by libavcodec before calling get_format() + public AVPixelFormat @sw_pix_fmt; + /// Timebase in which pkt_dts/pts and AVPacket.dts/pts are. - encoding unused. - decoding set by user. + public AVRational @pkt_timebase; + /// AVCodecDescriptor - encoding: unused. - decoding: set by libavcodec. + public AVCodecDescriptor* @codec_descriptor; + /// Current statistics for PTS correction. - decoding: maintained and used by libavcodec, not intended to be used by user apps - encoding: unused + public long @pts_correction_num_faulty_pts; + /// Number of incorrect PTS values so far + public long @pts_correction_num_faulty_dts; + /// Number of incorrect DTS values so far + public long @pts_correction_last_pts; + /// PTS of the last frame + public long @pts_correction_last_dts; + /// Character encoding of the input subtitles file. - decoding: set by user - encoding: unused + public byte* @sub_charenc; + /// Subtitles character encoding mode. Formats or codecs might be adjusting this setting (if they are doing the conversion themselves for instance). - decoding: set by libavcodec - encoding: unused + public int @sub_charenc_mode; + /// Skip processing alpha if supported by codec. Note that if the format uses pre-multiplied alpha (common with VP6, and recommended due to better video quality/compression) the image will look as if alpha-blended onto a black background. However for formats that do not use pre-multiplied alpha there might be serious artefacts (though e.g. libswscale currently assumes pre-multiplied alpha anyway). + public int @skip_alpha; + /// Number of samples to skip after a discontinuity - decoding: unused - encoding: set by libavcodec + public int @seek_preroll; + [Obsolete("unused")] + public int @debug_mv; + /// custom intra quantization matrix - encoding: Set by user, can be NULL. - decoding: unused. + public ushort* @chroma_intra_matrix; + /// dump format separator. can be ", " or " " or anything else - encoding: Set by user. - decoding: Set by user. + public byte* @dump_separator; + /// ',' separated list of allowed decoders. If NULL then all are allowed - encoding: unused - decoding: set by user + public byte* @codec_whitelist; + /// Properties of the stream that gets decoded - encoding: unused - decoding: set by libavcodec + public uint @properties; + /// Additional data associated with the entire coded stream. + public AVPacketSideData* @coded_side_data; + public int @nb_coded_side_data; + /// A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames. The reference is set by the caller and afterwards owned (and freed) by libavcodec - it should never be read by the caller after being set. + public AVBufferRef* @hw_frames_ctx; + [Obsolete("unused")] + public int @sub_text_format; + /// Audio only. The amount of padding (in samples) appended by the encoder to the end of the audio. I.e. this number of decoded samples must be discarded by the caller from the end of the stream to get the original audio without any trailing padding. + public int @trailing_padding; + /// The number of pixels per image to maximally accept. + public long @max_pixels; + /// A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/decoder. The reference is set by the caller and afterwards owned (and freed) by libavcodec. + public AVBufferRef* @hw_device_ctx; + /// Bit set of AV_HWACCEL_FLAG_* flags, which affect hardware accelerated decoding (if active). - encoding: unused - decoding: Set by user (either before avcodec_open2(), or in the AVCodecContext.get_format callback) + public int @hwaccel_flags; + /// Video decoding only. Certain video codecs support cropping, meaning that only a sub-rectangle of the decoded frame is intended for display. This option controls how cropping is handled by libavcodec. + public int @apply_cropping; + public int @extra_hw_frames; + /// The percentage of damaged samples to discard a frame. + public int @discard_damaged_percentage; + /// The number of samples per frame to maximally accept. + public long @max_samples; + /// Bit set of AV_CODEC_EXPORT_DATA_* flags, which affects the kind of metadata exported in frame, packet, or coded stream side data by decoders and encoders. + public int @export_side_data; + /// This callback is called at the beginning of each packet to get a data buffer for it. + public AVCodecContext_get_encode_buffer_func @get_encode_buffer; + /// Audio channel layout. - encoding: must be set by the caller, to one of AVCodec.ch_layouts. - decoding: may be set by the caller if known e.g. from the container. The decoder can then override during decoding as needed. + public AVChannelLayout @ch_layout; +} + +/// This struct describes the properties of a single codec described by an AVCodecID. +public unsafe partial struct AVCodecDescriptor +{ + public AVCodecID @id; + public AVMediaType @type; + /// Name of the codec described by this descriptor. It is non-empty and unique for each codec descriptor. It should contain alphanumeric characters and '_' only. + public byte* @name; + /// A more descriptive name for this codec. May be NULL. + public byte* @long_name; + /// Codec properties, a combination of AV_CODEC_PROP_* flags. + public int @props; + /// MIME type(s) associated with the codec. May be NULL; if not, a NULL-terminated array of MIME types. The first item is always non-NULL and is the preferred MIME type. + public byte** @mime_types; + /// If non-NULL, an array of profiles recognized for this codec. Terminated with FF_PROFILE_UNKNOWN. + public AVProfile* @profiles; +} + +public unsafe partial struct AVCodecHWConfig +{ + /// For decoders, a hardware pixel format which that decoder may be able to decode to if suitable hardware is available. + public AVPixelFormat @pix_fmt; + /// Bit set of AV_CODEC_HW_CONFIG_METHOD_* flags, describing the possible setup methods which can be used with this configuration. + public int @methods; + /// The device type associated with the configuration. + public AVHWDeviceType @device_type; +} + +/// This struct describes the properties of an encoded stream. +public unsafe partial struct AVCodecParameters +{ + /// General type of the encoded data. + public AVMediaType @codec_type; + /// Specific type of the encoded data (the codec used). + public AVCodecID @codec_id; + /// Additional information about the codec (corresponds to the AVI FOURCC). + public uint @codec_tag; + /// Extra binary data needed for initializing the decoder, codec-dependent. + public byte* @extradata; + /// Size of the extradata content in bytes. + public int @extradata_size; + /// - video: the pixel format, the value corresponds to enum AVPixelFormat. - audio: the sample format, the value corresponds to enum AVSampleFormat. + public int @format; + /// The average bitrate of the encoded data (in bits per second). + public long @bit_rate; + /// The number of bits per sample in the codedwords. + public int @bits_per_coded_sample; + /// This is the number of valid bits in each output sample. If the sample format has more bits, the least significant bits are additional padding bits, which are always 0. Use right shifts to reduce the sample to its actual size. For example, audio formats with 24 bit samples will have bits_per_raw_sample set to 24, and format set to AV_SAMPLE_FMT_S32. To get the original sample use "(int32_t)sample >> 8"." + public int @bits_per_raw_sample; + /// Codec-specific bitstream restrictions that the stream conforms to. + public int @profile; + public int @level; + /// Video only. The dimensions of the video frame in pixels. + public int @width; + public int @height; + /// Video only. The aspect ratio (width / height) which a single pixel should have when displayed. + public AVRational @sample_aspect_ratio; + /// Video only. The order of the fields in interlaced video. + public AVFieldOrder @field_order; + /// Video only. Additional colorspace characteristics. + public AVColorRange @color_range; + public AVColorPrimaries @color_primaries; + public AVColorTransferCharacteristic @color_trc; + public AVColorSpace @color_space; + public AVChromaLocation @chroma_location; + /// Video only. Number of delayed frames. + public int @video_delay; + /// Audio only. The channel layout bitmask. May be 0 if the channel layout is unknown or unspecified, otherwise the number of bits set must be equal to the channels field. + [Obsolete("use ch_layout")] + public ulong @channel_layout; + /// Audio only. The number of audio channels. + [Obsolete("use ch_layout.nb_channels")] + public int @channels; + /// Audio only. The number of audio samples per second. + public int @sample_rate; + /// Audio only. The number of bytes per coded audio frame, required by some formats. + public int @block_align; + /// Audio only. Audio frame size, if known. Required by some formats to be static. + public int @frame_size; + /// Audio only. The amount of padding (in samples) inserted by the encoder at the beginning of the audio. I.e. this number of leading decoded samples must be discarded by the caller to get the original audio without leading padding. + public int @initial_padding; + /// Audio only. The amount of padding (in samples) appended by the encoder to the end of the audio. I.e. this number of decoded samples must be discarded by the caller from the end of the stream to get the original audio without any trailing padding. + public int @trailing_padding; + /// Audio only. Number of samples to skip after a discontinuity. + public int @seek_preroll; + /// Audio only. The channel layout and number of channels. + public AVChannelLayout @ch_layout; +} + +public unsafe partial struct AVCodecParser +{ + public int7 @codec_ids; + public int @priv_data_size; + public AVCodecParser_parser_init_func @parser_init; + public AVCodecParser_parser_parse_func @parser_parse; + public AVCodecParser_parser_close_func @parser_close; + public AVCodecParser_split_func @split; +} + +public unsafe partial struct AVCodecParserContext +{ + public void* @priv_data; + public AVCodecParser* @parser; + public long @frame_offset; + public long @cur_offset; + public long @next_frame_offset; + public int @pict_type; + /// This field is used for proper frame duration computation in lavf. It signals, how much longer the frame duration of the current frame is compared to normal frame duration. + public int @repeat_pict; + public long @pts; + public long @dts; + public long @last_pts; + public long @last_dts; + public int @fetch_timestamp; + public int @cur_frame_start_index; + public long4 @cur_frame_offset; + public long4 @cur_frame_pts; + public long4 @cur_frame_dts; + public int @flags; + /// byte offset from starting packet start + public long @offset; + public long4 @cur_frame_end; + /// Set by parser to 1 for key frames and 0 for non-key frames. It is initialized to -1, so if the parser doesn't set this flag, old-style fallback using AV_PICTURE_TYPE_I picture type as key frames will be used. + public int @key_frame; + /// Synchronization point for start of timestamp generation. + public int @dts_sync_point; + /// Offset of the current timestamp against last timestamp sync point in units of AVCodecContext.time_base. + public int @dts_ref_dts_delta; + /// Presentation delay of current frame in units of AVCodecContext.time_base. + public int @pts_dts_delta; + /// Position of the packet in file. + public long4 @cur_frame_pos; + /// Byte position of currently parsed frame in stream. + public long @pos; + /// Previous frame byte position. + public long @last_pos; + /// Duration of the current frame. For audio, this is in units of 1 / AVCodecContext.sample_rate. For all other types, this is in units of AVCodecContext.time_base. + public int @duration; + public AVFieldOrder @field_order; + /// Indicate whether a picture is coded as a frame, top field or bottom field. + public AVPictureStructure @picture_structure; + /// Picture number incremented in presentation or output order. This field may be reinitialized at the first picture of a new sequence. + public int @output_picture_number; + /// Dimensions of the decoded video intended for presentation. + public int @width; + public int @height; + /// Dimensions of the coded video. + public int @coded_width; + public int @coded_height; + /// The format of the coded data, corresponds to enum AVPixelFormat for video and for enum AVSampleFormat for audio. + public int @format; +} + +public unsafe partial struct AVComponentDescriptor +{ + /// Which of the 4 planes contains the component. + public int @plane; + /// Number of elements between 2 horizontally consecutive pixels. Elements are bits for bitstream formats, bytes otherwise. + public int @step; + /// Number of elements before the component of the first pixel. Elements are bits for bitstream formats, bytes otherwise. + public int @offset; + /// Number of least significant bits that must be shifted away to get the value. + public int @shift; + /// Number of bits in the component. + public int @depth; +} + +/// Content light level needed by to transmit HDR over HDMI (CTA-861.3). +public unsafe partial struct AVContentLightMetadata +{ + /// Max content light level (cd/m^2). + public uint @MaxCLL; + /// Max average light level per frame (cd/m^2). + public uint @MaxFALL; +} + +/// This structure describes the bitrate properties of an encoded bitstream. It roughly corresponds to a subset the VBV parameters for MPEG-2 or HRD parameters for H.264/HEVC. +public unsafe partial struct AVCPBProperties +{ + /// Maximum bitrate of the stream, in bits per second. Zero if unknown or unspecified. + public long @max_bitrate; + /// Minimum bitrate of the stream, in bits per second. Zero if unknown or unspecified. + public long @min_bitrate; + /// Average bitrate of the stream, in bits per second. Zero if unknown or unspecified. + public long @avg_bitrate; + /// The size of the buffer to which the ratecontrol is applied, in bits. Zero if unknown or unspecified. + public long @buffer_size; + /// The delay between the time the packet this structure is associated with is received and the time when it should be decoded, in periods of a 27MHz clock. + public ulong @vbv_delay; +} + +/// D3D11 frame descriptor for pool allocation. +public unsafe partial struct AVD3D11FrameDescriptor +{ + /// The texture in which the frame is located. The reference count is managed by the AVBufferRef, and destroying the reference will release the interface. + public ID3D11Texture2D* @texture; + /// The index into the array texture element representing the frame, or 0 if the texture is not an array texture. + public long @index; +} + +/// This structure is used to provides the necessary configurations and data to the Direct3D11 FFmpeg HWAccel implementation. +public unsafe partial struct AVD3D11VAContext +{ + /// D3D11 decoder object + public ID3D11VideoDecoder* @decoder; + /// D3D11 VideoContext + public ID3D11VideoContext* @video_context; + /// D3D11 configuration used to create the decoder + public D3D11_VIDEO_DECODER_CONFIG* @cfg; + /// The number of surface in the surface array + public uint @surface_count; + /// The array of Direct3D surfaces used to create the decoder + public ID3D11VideoDecoderOutputView** @surface; + /// A bit field configuring the workarounds needed for using the decoder + public ulong @workaround; + /// Private to the FFmpeg AVHWAccel implementation + public uint @report_id; + /// Mutex to access video_context + public void* @context_mutex; +} + +/// This struct is allocated as AVHWDeviceContext.hwctx +public unsafe partial struct AVD3D11VADeviceContext +{ + /// Device used for texture creation and access. This can also be used to set the libavcodec decoding device. + public ID3D11Device* @device; + /// If unset, this will be set from the device field on init. + public ID3D11DeviceContext* @device_context; + /// If unset, this will be set from the device field on init. + public ID3D11VideoDevice* @video_device; + /// If unset, this will be set from the device_context field on init. + public ID3D11VideoContext* @video_context; + /// Callbacks for locking. They protect accesses to device_context and video_context calls. They also protect access to the internal staging texture (for av_hwframe_transfer_data() calls). They do NOT protect access to hwcontext or decoder state in general. + public AVD3D11VADeviceContext_lock_func @lock; + public AVD3D11VADeviceContext_unlock_func @unlock; + public void* @lock_ctx; +} + +/// This struct is allocated as AVHWFramesContext.hwctx +public unsafe partial struct AVD3D11VAFramesContext +{ + /// The canonical texture used for pool allocation. If this is set to NULL on init, the hwframes implementation will allocate and set an array texture if initial_pool_size > 0. + public ID3D11Texture2D* @texture; + /// D3D11_TEXTURE2D_DESC.BindFlags used for texture creation. The user must at least set D3D11_BIND_DECODER if the frames context is to be used for video decoding. This field is ignored/invalid if a user-allocated texture is provided. + public uint @BindFlags; + /// D3D11_TEXTURE2D_DESC.MiscFlags used for texture creation. This field is ignored/invalid if a user-allocated texture is provided. + public uint @MiscFlags; + /// In case if texture structure member above is not NULL contains the same texture pointer for all elements and different indexes into the array texture. In case if texture structure member above is NULL, all elements contains pointers to separate non-array textures and 0 indexes. This field is ignored/invalid if a user-allocated texture is provided. + public AVD3D11FrameDescriptor* @texture_infos; +} + +/// Structure describes device capabilities. +public unsafe partial struct AVDeviceCapabilitiesQuery +{ + public AVClass* @av_class; + public AVFormatContext* @device_context; + public AVCodecID @codec; + public AVSampleFormat @sample_format; + public AVPixelFormat @pixel_format; + public int @sample_rate; + public int @channels; + public long @channel_layout; + public int @window_width; + public int @window_height; + public int @frame_width; + public int @frame_height; + public AVRational @fps; +} + +/// Structure describes basic parameters of the device. +public unsafe partial struct AVDeviceInfo +{ + /// device name, format depends on device + public byte* @device_name; + /// human friendly name + public byte* @device_description; + /// array indicating what media types(s), if any, a device can provide. If null, cannot provide any + public AVMediaType* @media_types; + /// length of media_types array, 0 if device cannot provide any media types + public int @nb_media_types; +} + +/// List of devices. +public unsafe partial struct AVDeviceInfoList +{ + /// list of autodetected devices + public AVDeviceInfo** @devices; + /// number of autodetected devices + public int @nb_devices; + /// index of default device or -1 if no default + public int @default_device; +} + +public unsafe partial struct AVDeviceRect +{ + /// x coordinate of top left corner + public int @x; + /// y coordinate of top left corner + public int @y; + /// width + public int @width; + /// height + public int @height; +} + +public unsafe partial struct AVDictionaryEntry +{ + public byte* @key; + public byte* @value; +} + +/// This struct is allocated as AVHWDeviceContext.hwctx +public unsafe partial struct AVDXVA2DeviceContext +{ + public IDirect3DDeviceManager9* @devmgr; +} + +/// This struct is allocated as AVHWFramesContext.hwctx +public unsafe partial struct AVDXVA2FramesContext +{ + /// The surface type (e.g. DXVA2_VideoProcessorRenderTarget or DXVA2_VideoDecoderRenderTarget). Must be set by the caller. + public ulong @surface_type; + /// The surface pool. When an external pool is not provided by the caller, this will be managed (allocated and filled on init, freed on uninit) by libavutil. + public IDirect3DSurface9** @surfaces; + public int @nb_surfaces; + /// Certain drivers require the decoder to be destroyed before the surfaces. To allow internally managed pools to work properly in such cases, this field is provided. + public IDirectXVideoDecoder* @decoder_to_release; +} + +/// This struct represents dynamic metadata for color volume transform - application 4 of SMPTE 2094-40:2016 standard. +public unsafe partial struct AVDynamicHDRPlus +{ + /// Country code by Rec. ITU-T T.35 Annex A. The value shall be 0xB5. + public byte @itu_t_t35_country_code; + /// Application version in the application defining document in ST-2094 suite. The value shall be set to 0. + public byte @application_version; + /// The number of processing windows. The value shall be in the range of 1 to 3, inclusive. + public byte @num_windows; + /// The color transform parameters for every processing window. + public AVHDRPlusColorTransformParams3 @params; + /// The nominal maximum display luminance of the targeted system display, in units of 0.0001 candelas per square metre. The value shall be in the range of 0 to 10000, inclusive. + public AVRational @targeted_system_display_maximum_luminance; + /// This flag shall be equal to 0 in bit streams conforming to this version of this Specification. The value 1 is reserved for future use. + public byte @targeted_system_display_actual_peak_luminance_flag; + /// The number of rows in the targeted system_display_actual_peak_luminance array. The value shall be in the range of 2 to 25, inclusive. + public byte @num_rows_targeted_system_display_actual_peak_luminance; + /// The number of columns in the targeted_system_display_actual_peak_luminance array. The value shall be in the range of 2 to 25, inclusive. + public byte @num_cols_targeted_system_display_actual_peak_luminance; + /// The normalized actual peak luminance of the targeted system display. The values should be in the range of 0 to 1, inclusive and in multiples of 1/15. + public AVRational25x25 @targeted_system_display_actual_peak_luminance; + /// This flag shall be equal to 0 in bitstreams conforming to this version of this Specification. The value 1 is reserved for future use. + public byte @mastering_display_actual_peak_luminance_flag; + /// The number of rows in the mastering_display_actual_peak_luminance array. The value shall be in the range of 2 to 25, inclusive. + public byte @num_rows_mastering_display_actual_peak_luminance; + /// The number of columns in the mastering_display_actual_peak_luminance array. The value shall be in the range of 2 to 25, inclusive. + public byte @num_cols_mastering_display_actual_peak_luminance; + /// The normalized actual peak luminance of the mastering display used for mastering the image essence. The values should be in the range of 0 to 1, inclusive and in multiples of 1/15. + public AVRational25x25 @mastering_display_actual_peak_luminance; +} + +/// Filter definition. This defines the pads a filter contains, and all the callback functions used to interact with the filter. +public unsafe partial struct AVFilter +{ + /// Filter name. Must be non-NULL and unique among filters. + public byte* @name; + /// A description of the filter. May be NULL. + public byte* @description; + /// List of static inputs. + public AVFilterPad* @inputs; + /// List of static outputs. + public AVFilterPad* @outputs; + /// A class for the private data, used to declare filter private AVOptions. This field is NULL for filters that do not declare any options. + public AVClass* @priv_class; + /// A combination of AVFILTER_FLAG_* + public int @flags; + /// The number of entries in the list of inputs. + public byte @nb_inputs; + /// The number of entries in the list of outputs. + public byte @nb_outputs; + /// This field determines the state of the formats union. It is an enum FilterFormatsState value. + public byte @formats_state; + /// Filter pre-initialization function + public AVFilter_preinit_func @preinit; + /// Filter initialization function. + public AVFilter_init_func @init; + /// Should be set instead of AVFilter.init "init" by the filters that want to pass a dictionary of AVOptions to nested contexts that are allocated during init. + public AVFilter_init_dict_func @init_dict; + /// Filter uninitialization function. + public AVFilter_uninit_func @uninit; + public AVFilter_formats @formats; + /// size of private data to allocate for the filter + public int @priv_size; + /// Additional flags for avfilter internal use only. + public int @flags_internal; + /// Make the filter instance process a command. + public AVFilter_process_command_func @process_command; + /// Filter activation function. + public AVFilter_activate_func @activate; +} + +/// The state of the following union is determined by formats_state. See the documentation of enum FilterFormatsState in internal.h. +[StructLayout(LayoutKind.Explicit)] +public unsafe partial struct AVFilter_formats +{ + /// Query formats supported by the filter on its inputs and outputs. + [FieldOffset(0)] + public _query_func_func @query_func; + /// A pointer to an array of admissible pixel formats delimited by AV_PIX_FMT_NONE. The generic code will use this list to indicate that this filter supports each of these pixel formats, provided that all inputs and outputs use the same pixel format. + [FieldOffset(0)] + public AVPixelFormat* @pixels_list; + /// Analogous to pixels, but delimited by AV_SAMPLE_FMT_NONE and restricted to filters that only have AVMEDIA_TYPE_AUDIO inputs and outputs. + [FieldOffset(0)] + public AVSampleFormat* @samples_list; + /// Equivalent to { pix_fmt, AV_PIX_FMT_NONE } as pixels_list. + [FieldOffset(0)] + public AVPixelFormat @pix_fmt; + /// Equivalent to { sample_fmt, AV_SAMPLE_FMT_NONE } as samples_list. + [FieldOffset(0)] + public AVSampleFormat @sample_fmt; +} + +/// An instance of a filter +public unsafe partial struct AVFilterContext +{ + /// needed for av_log() and filters common options + public AVClass* @av_class; + /// the AVFilter of which this is an instance + public AVFilter* @filter; + /// name of this filter instance + public byte* @name; + /// array of input pads + public AVFilterPad* @input_pads; + /// array of pointers to input links + public AVFilterLink** @inputs; + /// number of input pads + public uint @nb_inputs; + /// array of output pads + public AVFilterPad* @output_pads; + /// array of pointers to output links + public AVFilterLink** @outputs; + /// number of output pads + public uint @nb_outputs; + /// private data for use by the filter + public void* @priv; + /// filtergraph this filter belongs to + public AVFilterGraph* @graph; + /// Type of multithreading being allowed/used. A combination of AVFILTER_THREAD_* flags. + public int @thread_type; + /// An opaque struct for libavfilter internal use. + public AVFilterInternal* @internal; + public AVFilterCommand* @command_queue; + /// enable expression string + public byte* @enable_str; + /// parsed expression (AVExpr*) + public void* @enable; + /// variable values for the enable expression + public double* @var_values; + /// the enabled state from the last expression evaluation + public int @is_disabled; + /// For filters which will create hardware frames, sets the device the filter should create them in. All other filters will ignore this field: in particular, a filter which consumes or processes hardware frames will instead use the hw_frames_ctx field in AVFilterLink to carry the hardware context information. + public AVBufferRef* @hw_device_ctx; + /// Max number of threads allowed in this filter instance. If <= 0, its value is ignored. Overrides global number of threads set per filter graph. + public int @nb_threads; + /// Ready status of the filter. A non-0 value means that the filter needs activating; a higher value suggests a more urgent activation. + public uint @ready; + /// Sets the number of extra hardware frames which the filter will allocate on its output links for use in following filters or by the caller. + public int @extra_hw_frames; +} + +/// Lists of formats / etc. supported by an end of a link. +public unsafe partial struct AVFilterFormatsConfig +{ + /// List of supported formats (pixel or sample). + public AVFilterFormats* @formats; + /// Lists of supported sample rates, only for audio. + public AVFilterFormats* @samplerates; + /// Lists of supported channel layouts, only for audio. + public AVFilterChannelLayouts* @channel_layouts; +} + +public unsafe partial struct AVFilterGraph +{ + public AVClass* @av_class; + public AVFilterContext** @filters; + public uint @nb_filters; + /// sws options to use for the auto-inserted scale filters + public byte* @scale_sws_opts; + /// Type of multithreading allowed for filters in this graph. A combination of AVFILTER_THREAD_* flags. + public int @thread_type; + /// Maximum number of threads used by filters in this graph. May be set by the caller before adding any filters to the filtergraph. Zero (the default) means that the number of threads is determined automatically. + public int @nb_threads; + /// Opaque object for libavfilter internal use. + public AVFilterGraphInternal* @internal; + /// Opaque user data. May be set by the caller to an arbitrary value, e.g. to be used from callbacks like AVFilterGraph.execute. Libavfilter will not touch this field in any way. + public void* @opaque; + /// This callback may be set by the caller immediately after allocating the graph and before adding any filters to it, to provide a custom multithreading implementation. + public AVFilterGraph_execute_func @execute; + /// swr options to use for the auto-inserted aresample filters, Access ONLY through AVOptions + public byte* @aresample_swr_opts; + /// Private fields + public AVFilterLink** @sink_links; + public int @sink_links_count; + public uint @disable_auto_convert; +} + +/// A linked-list of the inputs/outputs of the filter chain. +public unsafe partial struct AVFilterInOut +{ + /// unique name for this input/output in the list + public byte* @name; + /// filter context associated to this input/output + public AVFilterContext* @filter_ctx; + /// index of the filt_ctx pad to use for linking + public int @pad_idx; + /// next input/input in the list, NULL if this is the last + public AVFilterInOut* @next; +} + +/// A link between two filters. This contains pointers to the source and destination filters between which this link exists, and the indexes of the pads involved. In addition, this link also contains the parameters which have been negotiated and agreed upon between the filter, such as image dimensions, format, etc. +public unsafe partial struct AVFilterLink +{ + /// source filter + public AVFilterContext* @src; + /// output pad on the source filter + public AVFilterPad* @srcpad; + /// dest filter + public AVFilterContext* @dst; + /// input pad on the dest filter + public AVFilterPad* @dstpad; + /// filter media type + public AVMediaType @type; + /// agreed upon image width + public int @w; + /// agreed upon image height + public int @h; + /// agreed upon sample aspect ratio + public AVRational @sample_aspect_ratio; + /// channel layout of current buffer (see libavutil/channel_layout.h) + [Obsolete("use ch_layout")] + public ulong @channel_layout; + /// samples per second + public int @sample_rate; + /// agreed upon media format + public int @format; + /// Define the time base used by the PTS of the frames/samples which will pass through this link. During the configuration stage, each filter is supposed to change only the output timebase, while the timebase of the input link is assumed to be an unchangeable property. + public AVRational @time_base; + /// channel layout of current buffer (see libavutil/channel_layout.h) + public AVChannelLayout @ch_layout; + /// Lists of supported formats / etc. supported by the input filter. + public AVFilterFormatsConfig @incfg; + /// Lists of supported formats / etc. supported by the output filter. + public AVFilterFormatsConfig @outcfg; + public AVFilterLink_init_state @init_state; + /// Graph the filter belongs to. + public AVFilterGraph* @graph; + /// Current timestamp of the link, as defined by the most recent frame(s), in link time_base units. + public long @current_pts; + /// Current timestamp of the link, as defined by the most recent frame(s), in AV_TIME_BASE units. + public long @current_pts_us; + /// Index in the age array. + public int @age_index; + /// Frame rate of the stream on the link, or 1/0 if unknown or variable; if left to 0/0, will be automatically copied from the first input of the source filter if it exists. + public AVRational @frame_rate; + /// Minimum number of samples to filter at once. If filter_frame() is called with fewer samples, it will accumulate them in fifo. This field and the related ones must not be changed after filtering has started. If 0, all related fields are ignored. + public int @min_samples; + /// Maximum number of samples to filter at once. If filter_frame() is called with more samples, it will split them. + public int @max_samples; + /// Number of past frames sent through the link. + public long @frame_count_in; + /// Number of past frames sent through the link. + public long @frame_count_out; + /// Number of past samples sent through the link. + public long @sample_count_in; + /// Number of past samples sent through the link. + public long @sample_count_out; + /// A pointer to a FFFramePool struct. + public void* @frame_pool; + /// True if a frame is currently wanted on the output of this filter. Set when ff_request_frame() is called by the output, cleared when a frame is filtered. + public int @frame_wanted_out; + /// For hwaccel pixel formats, this should be a reference to the AVHWFramesContext describing the frames. + public AVBufferRef* @hw_frames_ctx; + /// Internal structure members. The fields below this limit are internal for libavfilter's use and must in no way be accessed by applications. + public byte61440 @reserved; +} + +/// Format I/O context. New fields can be added to the end with minor version bumps. Removal, reordering and changes to existing fields require a major version bump. sizeof(AVFormatContext) must not be used outside libav*, use avformat_alloc_context() to create an AVFormatContext. +public unsafe partial struct AVFormatContext +{ + /// A class for logging and avoptions. Set by avformat_alloc_context(). Exports (de)muxer private options if they exist. + public AVClass* @av_class; + /// The input container format. + public AVInputFormat* @iformat; + /// The output container format. + public AVOutputFormat* @oformat; + /// Format private data. This is an AVOptions-enabled struct if and only if iformat/oformat.priv_class is not NULL. + public void* @priv_data; + /// I/O context. + public AVIOContext* @pb; + /// Flags signalling stream properties. A combination of AVFMTCTX_*. Set by libavformat. + public int @ctx_flags; + /// Number of elements in AVFormatContext.streams. + public uint @nb_streams; + /// A list of all streams in the file. New streams are created with avformat_new_stream(). + public AVStream** @streams; + /// input or output URL. Unlike the old filename field, this field has no length restriction. + public byte* @url; + /// Position of the first frame of the component, in AV_TIME_BASE fractional seconds. NEVER set this value directly: It is deduced from the AVStream values. + public long @start_time; + /// Duration of the stream, in AV_TIME_BASE fractional seconds. Only set this value if you know none of the individual stream durations and also do not set any of them. This is deduced from the AVStream values if not set. + public long @duration; + /// Total stream bitrate in bit/s, 0 if not available. Never set it directly if the file_size and the duration are known as FFmpeg can compute it automatically. + public long @bit_rate; + public uint @packet_size; + public int @max_delay; + /// Flags modifying the (de)muxer behaviour. A combination of AVFMT_FLAG_*. Set by the user before avformat_open_input() / avformat_write_header(). + public int @flags; + /// Maximum number of bytes read from input in order to determine stream properties. Used when reading the global header and in avformat_find_stream_info(). + public long @probesize; + /// Maximum duration (in AV_TIME_BASE units) of the data read from input in avformat_find_stream_info(). Demuxing only, set by the caller before avformat_find_stream_info(). Can be set to 0 to let avformat choose using a heuristic. + public long @max_analyze_duration; + public byte* @key; + public int @keylen; + public uint @nb_programs; + public AVProgram** @programs; + /// Forced video codec_id. Demuxing: Set by user. + public AVCodecID @video_codec_id; + /// Forced audio codec_id. Demuxing: Set by user. + public AVCodecID @audio_codec_id; + /// Forced subtitle codec_id. Demuxing: Set by user. + public AVCodecID @subtitle_codec_id; + /// Maximum amount of memory in bytes to use for the index of each stream. If the index exceeds this size, entries will be discarded as needed to maintain a smaller size. This can lead to slower or less accurate seeking (depends on demuxer). Demuxers for which a full in-memory index is mandatory will ignore this. - muxing: unused - demuxing: set by user + public uint @max_index_size; + /// Maximum amount of memory in bytes to use for buffering frames obtained from realtime capture devices. + public uint @max_picture_buffer; + /// Number of chapters in AVChapter array. When muxing, chapters are normally written in the file header, so nb_chapters should normally be initialized before write_header is called. Some muxers (e.g. mov and mkv) can also write chapters in the trailer. To write chapters in the trailer, nb_chapters must be zero when write_header is called and non-zero when write_trailer is called. - muxing: set by user - demuxing: set by libavformat + public uint @nb_chapters; + public AVChapter** @chapters; + /// Metadata that applies to the whole file. + public AVDictionary* @metadata; + /// Start time of the stream in real world time, in microseconds since the Unix epoch (00:00 1st January 1970). That is, pts=0 in the stream was captured at this real world time. - muxing: Set by the caller before avformat_write_header(). If set to either 0 or AV_NOPTS_VALUE, then the current wall-time will be used. - demuxing: Set by libavformat. AV_NOPTS_VALUE if unknown. Note that the value may become known after some number of frames have been received. + public long @start_time_realtime; + /// The number of frames used for determining the framerate in avformat_find_stream_info(). Demuxing only, set by the caller before avformat_find_stream_info(). + public int @fps_probe_size; + /// Error recognition; higher values will detect more errors but may misdetect some more or less valid parts as errors. Demuxing only, set by the caller before avformat_open_input(). + public int @error_recognition; + /// Custom interrupt callbacks for the I/O layer. + public AVIOInterruptCB @interrupt_callback; + /// Flags to enable debugging. + public int @debug; + /// Maximum buffering duration for interleaving. + public long @max_interleave_delta; + /// Allow non-standard and experimental extension + public int @strict_std_compliance; + /// Flags indicating events happening on the file, a combination of AVFMT_EVENT_FLAG_*. + public int @event_flags; + /// Maximum number of packets to read while waiting for the first timestamp. Decoding only. + public int @max_ts_probe; + /// Avoid negative timestamps during muxing. Any value of the AVFMT_AVOID_NEG_TS_* constants. Note, this works better when using av_interleaved_write_frame(). - muxing: Set by user - demuxing: unused + public int @avoid_negative_ts; + /// Transport stream id. This will be moved into demuxer private options. Thus no API/ABI compatibility + public int @ts_id; + /// Audio preload in microseconds. Note, not all formats support this and unpredictable things may happen if it is used when not supported. - encoding: Set by user - decoding: unused + public int @audio_preload; + /// Max chunk time in microseconds. Note, not all formats support this and unpredictable things may happen if it is used when not supported. - encoding: Set by user - decoding: unused + public int @max_chunk_duration; + /// Max chunk size in bytes Note, not all formats support this and unpredictable things may happen if it is used when not supported. - encoding: Set by user - decoding: unused + public int @max_chunk_size; + /// forces the use of wallclock timestamps as pts/dts of packets This has undefined results in the presence of B frames. - encoding: unused - decoding: Set by user + public int @use_wallclock_as_timestamps; + /// avio flags, used to force AVIO_FLAG_DIRECT. - encoding: unused - decoding: Set by user + public int @avio_flags; + /// The duration field can be estimated through various ways, and this field can be used to know how the duration was estimated. - encoding: unused - decoding: Read by user + public AVDurationEstimationMethod @duration_estimation_method; + /// Skip initial bytes when opening stream - encoding: unused - decoding: Set by user + public long @skip_initial_bytes; + /// Correct single timestamp overflows - encoding: unused - decoding: Set by user + public uint @correct_ts_overflow; + /// Force seeking to any (also non key) frames. - encoding: unused - decoding: Set by user + public int @seek2any; + /// Flush the I/O context after each packet. - encoding: Set by user - decoding: unused + public int @flush_packets; + /// format probing score. The maximal score is AVPROBE_SCORE_MAX, its set when the demuxer probes the format. - encoding: unused - decoding: set by avformat, read by user + public int @probe_score; + /// Maximum number of bytes read from input in order to identify the AVInputFormat "input format". Only used when the format is not set explicitly by the caller. + public int @format_probesize; + /// ',' separated list of allowed decoders. If NULL then all are allowed - encoding: unused - decoding: set by user + public byte* @codec_whitelist; + /// ',' separated list of allowed demuxers. If NULL then all are allowed - encoding: unused - decoding: set by user + public byte* @format_whitelist; + /// IO repositioned flag. This is set by avformat when the underlaying IO context read pointer is repositioned, for example when doing byte based seeking. Demuxers can use the flag to detect such changes. + public int @io_repositioned; + /// Forced video codec. This allows forcing a specific decoder, even when there are multiple with the same codec_id. Demuxing: Set by user + public AVCodec* @video_codec; + /// Forced audio codec. This allows forcing a specific decoder, even when there are multiple with the same codec_id. Demuxing: Set by user + public AVCodec* @audio_codec; + /// Forced subtitle codec. This allows forcing a specific decoder, even when there are multiple with the same codec_id. Demuxing: Set by user + public AVCodec* @subtitle_codec; + /// Forced data codec. This allows forcing a specific decoder, even when there are multiple with the same codec_id. Demuxing: Set by user + public AVCodec* @data_codec; + /// Number of bytes to be written as padding in a metadata header. Demuxing: Unused. Muxing: Set by user via av_format_set_metadata_header_padding. + public int @metadata_header_padding; + /// User data. This is a place for some private data of the user. + public void* @opaque; + /// Callback used by devices to communicate with application. + public AVFormatContext_control_message_cb_func @control_message_cb; + /// Output timestamp offset, in microseconds. Muxing: set by user + public long @output_ts_offset; + /// dump format separator. can be ", " or " " or anything else - muxing: Set by user. - demuxing: Set by user. + public byte* @dump_separator; + /// Forced Data codec_id. Demuxing: Set by user. + public AVCodecID @data_codec_id; + /// ',' separated list of allowed protocols. - encoding: unused - decoding: set by user + public byte* @protocol_whitelist; + /// A callback for opening new IO streams. + public AVFormatContext_io_open_func @io_open; + /// A callback for closing the streams opened with AVFormatContext.io_open(). + public AVFormatContext_io_close_func @io_close; + /// ',' separated list of disallowed protocols. - encoding: unused - decoding: set by user + public byte* @protocol_blacklist; + /// The maximum number of streams. - encoding: unused - decoding: set by user + public int @max_streams; + /// Skip duration calcuation in estimate_timings_from_pts. - encoding: unused - decoding: set by user + public int @skip_estimate_duration_from_pts; + /// Maximum number of packets that can be probed - encoding: unused - decoding: set by user + public int @max_probe_packets; + /// A callback for closing the streams opened with AVFormatContext.io_open(). + public AVFormatContext_io_close2_func @io_close2; +} + +/// This structure describes decoded (raw) audio or video data. +public unsafe partial struct AVFrame +{ + /// pointer to the picture/channel planes. This might be different from the first allocated byte. For video, it could even point to the end of the image data. + public byte_ptr8 @data; + /// For video, a positive or negative value, which is typically indicating the size in bytes of each picture line, but it can also be: - the negative byte size of lines for vertical flipping (with data[n] pointing to the end of the data - a positive or negative multiple of the byte size as for accessing even and odd fields of a frame (possibly flipped) + public int8 @linesize; + /// pointers to the data planes/channels. + public byte** @extended_data; + /// Video frames only. The coded dimensions (in pixels) of the video frame, i.e. the size of the rectangle that contains some well-defined values. + public int @width; + /// Video frames only. The coded dimensions (in pixels) of the video frame, i.e. the size of the rectangle that contains some well-defined values. + public int @height; + /// number of audio samples (per channel) described by this frame + public int @nb_samples; + /// format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames, enum AVSampleFormat for audio) + public int @format; + /// 1 -> keyframe, 0-> not + public int @key_frame; + /// Picture type of the frame. + public AVPictureType @pict_type; + /// Sample aspect ratio for the video frame, 0/1 if unknown/unspecified. + public AVRational @sample_aspect_ratio; + /// Presentation timestamp in time_base units (time when frame should be shown to user). + public long @pts; + /// DTS copied from the AVPacket that triggered returning this frame. (if frame threading isn't used) This is also the Presentation time of this AVFrame calculated from only AVPacket.dts values without pts values. + public long @pkt_dts; + /// Time base for the timestamps in this frame. In the future, this field may be set on frames output by decoders or filters, but its value will be by default ignored on input to encoders or filters. + public AVRational @time_base; + /// picture number in bitstream order + public int @coded_picture_number; + /// picture number in display order + public int @display_picture_number; + /// quality (between 1 (good) and FF_LAMBDA_MAX (bad)) + public int @quality; + /// for some private data of the user + public void* @opaque; + /// When decoding, this signals how much the picture must be delayed. extra_delay = repeat_pict / (2*fps) + public int @repeat_pict; + /// The content of the picture is interlaced. + public int @interlaced_frame; + /// If the content is interlaced, is top field displayed first. + public int @top_field_first; + /// Tell user application that palette has changed from previous frame. + public int @palette_has_changed; + /// reordered opaque 64 bits (generally an integer or a double precision float PTS but can be anything). The user sets AVCodecContext.reordered_opaque to represent the input at that time, the decoder reorders values as needed and sets AVFrame.reordered_opaque to exactly one of the values provided by the user through AVCodecContext.reordered_opaque + public long @reordered_opaque; + /// Sample rate of the audio data. + public int @sample_rate; + /// Channel layout of the audio data. + [Obsolete("use ch_layout instead")] + public ulong @channel_layout; + /// AVBuffer references backing the data for this frame. All the pointers in data and extended_data must point inside one of the buffers in buf or extended_buf. This array must be filled contiguously -- if buf[i] is non-NULL then buf[j] must also be non-NULL for all j < i. + public AVBufferRef_ptr8 @buf; + /// For planar audio which requires more than AV_NUM_DATA_POINTERS AVBufferRef pointers, this array will hold all the references which cannot fit into AVFrame.buf. + public AVBufferRef** @extended_buf; + /// Number of elements in extended_buf. + public int @nb_extended_buf; + public AVFrameSideData** @side_data; + public int @nb_side_data; + /// Frame flags, a combination of lavu_frame_flags + public int @flags; + /// MPEG vs JPEG YUV range. - encoding: Set by user - decoding: Set by libavcodec + public AVColorRange @color_range; + public AVColorPrimaries @color_primaries; + public AVColorTransferCharacteristic @color_trc; + /// YUV colorspace type. - encoding: Set by user - decoding: Set by libavcodec + public AVColorSpace @colorspace; + public AVChromaLocation @chroma_location; + /// frame timestamp estimated using various heuristics, in stream time base - encoding: unused - decoding: set by libavcodec, read by user. + public long @best_effort_timestamp; + /// reordered pos from the last AVPacket that has been input into the decoder - encoding: unused - decoding: Read by user. + public long @pkt_pos; + /// duration of the corresponding packet, expressed in AVStream->time_base units, 0 if unknown. - encoding: unused - decoding: Read by user. + public long @pkt_duration; + /// metadata. - encoding: Set by user. - decoding: Set by libavcodec. + public AVDictionary* @metadata; + /// decode error flags of the frame, set to a combination of FF_DECODE_ERROR_xxx flags if the decoder produced a frame, but there were errors during the decoding. - encoding: unused - decoding: set by libavcodec, read by user. + public int @decode_error_flags; + /// number of audio channels, only used for audio. - encoding: unused - decoding: Read by user. + [Obsolete("use ch_layout instead")] + public int @channels; + /// size of the corresponding packet containing the compressed frame. It is set to a negative value if unknown. - encoding: unused - decoding: set by libavcodec, read by user. + public int @pkt_size; + /// For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame. + public AVBufferRef* @hw_frames_ctx; + /// AVBufferRef for free use by the API user. FFmpeg will never check the contents of the buffer ref. FFmpeg calls av_buffer_unref() on it when the frame is unreferenced. av_frame_copy_props() calls create a new reference with av_buffer_ref() for the target frame's opaque_ref field. + public AVBufferRef* @opaque_ref; + /// cropping Video frames only. The number of pixels to discard from the the top/bottom/left/right border of the frame to obtain the sub-rectangle of the frame intended for presentation. @{ + public ulong @crop_top; + public ulong @crop_bottom; + public ulong @crop_left; + public ulong @crop_right; + /// AVBufferRef for internal use by a single libav* library. Must not be used to transfer data between libraries. Has to be NULL when ownership of the frame leaves the respective library. + public AVBufferRef* @private_ref; + /// Channel layout of the audio data. + public AVChannelLayout @ch_layout; +} + +/// Structure to hold side data for an AVFrame. +public unsafe partial struct AVFrameSideData +{ + public AVFrameSideDataType @type; + public byte* @data; + public ulong @size; + public AVDictionary* @metadata; + public AVBufferRef* @buf; +} + +/// Color transform parameters at a processing window in a dynamic metadata for SMPTE 2094-40. +public unsafe partial struct AVHDRPlusColorTransformParams +{ + /// The relative x coordinate of the top left pixel of the processing window. The value shall be in the range of 0 and 1, inclusive and in multiples of 1/(width of Picture - 1). The value 1 corresponds to the absolute coordinate of width of Picture - 1. The value for first processing window shall be 0. + public AVRational @window_upper_left_corner_x; + /// The relative y coordinate of the top left pixel of the processing window. The value shall be in the range of 0 and 1, inclusive and in multiples of 1/(height of Picture - 1). The value 1 corresponds to the absolute coordinate of height of Picture - 1. The value for first processing window shall be 0. + public AVRational @window_upper_left_corner_y; + /// The relative x coordinate of the bottom right pixel of the processing window. The value shall be in the range of 0 and 1, inclusive and in multiples of 1/(width of Picture - 1). The value 1 corresponds to the absolute coordinate of width of Picture - 1. The value for first processing window shall be 1. + public AVRational @window_lower_right_corner_x; + /// The relative y coordinate of the bottom right pixel of the processing window. The value shall be in the range of 0 and 1, inclusive and in multiples of 1/(height of Picture - 1). The value 1 corresponds to the absolute coordinate of height of Picture - 1. The value for first processing window shall be 1. + public AVRational @window_lower_right_corner_y; + /// The x coordinate of the center position of the concentric internal and external ellipses of the elliptical pixel selector in the processing window. The value shall be in the range of 0 to (width of Picture - 1), inclusive and in multiples of 1 pixel. + public ushort @center_of_ellipse_x; + /// The y coordinate of the center position of the concentric internal and external ellipses of the elliptical pixel selector in the processing window. The value shall be in the range of 0 to (height of Picture - 1), inclusive and in multiples of 1 pixel. + public ushort @center_of_ellipse_y; + /// The clockwise rotation angle in degree of arc with respect to the positive direction of the x-axis of the concentric internal and external ellipses of the elliptical pixel selector in the processing window. The value shall be in the range of 0 to 180, inclusive and in multiples of 1. + public byte @rotation_angle; + /// The semi-major axis value of the internal ellipse of the elliptical pixel selector in amount of pixels in the processing window. The value shall be in the range of 1 to 65535, inclusive and in multiples of 1 pixel. + public ushort @semimajor_axis_internal_ellipse; + /// The semi-major axis value of the external ellipse of the elliptical pixel selector in amount of pixels in the processing window. The value shall not be less than semimajor_axis_internal_ellipse of the current processing window. The value shall be in the range of 1 to 65535, inclusive and in multiples of 1 pixel. + public ushort @semimajor_axis_external_ellipse; + /// The semi-minor axis value of the external ellipse of the elliptical pixel selector in amount of pixels in the processing window. The value shall be in the range of 1 to 65535, inclusive and in multiples of 1 pixel. + public ushort @semiminor_axis_external_ellipse; + /// Overlap process option indicates one of the two methods of combining rendered pixels in the processing window in an image with at least one elliptical pixel selector. For overlapping elliptical pixel selectors in an image, overlap_process_option shall have the same value. + public AVHDRPlusOverlapProcessOption @overlap_process_option; + /// The maximum of the color components of linearized RGB values in the processing window in the scene. The values should be in the range of 0 to 1, inclusive and in multiples of 0.00001. maxscl[ 0 ], maxscl[ 1 ], and maxscl[ 2 ] are corresponding to R, G, B color components respectively. + public AVRational3 @maxscl; + /// The average of linearized maxRGB values in the processing window in the scene. The value should be in the range of 0 to 1, inclusive and in multiples of 0.00001. + public AVRational @average_maxrgb; + /// The number of linearized maxRGB values at given percentiles in the processing window in the scene. The maximum value shall be 15. + public byte @num_distribution_maxrgb_percentiles; + /// The linearized maxRGB values at given percentiles in the processing window in the scene. + public AVHDRPlusPercentile15 @distribution_maxrgb; + /// The fraction of selected pixels in the image that contains the brightest pixel in the scene. The value shall be in the range of 0 to 1, inclusive and in multiples of 0.001. + public AVRational @fraction_bright_pixels; + /// This flag indicates that the metadata for the tone mapping function in the processing window is present (for value of 1). + public byte @tone_mapping_flag; + /// The x coordinate of the separation point between the linear part and the curved part of the tone mapping function. The value shall be in the range of 0 to 1, excluding 0 and in multiples of 1/4095. + public AVRational @knee_point_x; + /// The y coordinate of the separation point between the linear part and the curved part of the tone mapping function. The value shall be in the range of 0 to 1, excluding 0 and in multiples of 1/4095. + public AVRational @knee_point_y; + /// The number of the intermediate anchor parameters of the tone mapping function in the processing window. The maximum value shall be 15. + public byte @num_bezier_curve_anchors; + /// The intermediate anchor parameters of the tone mapping function in the processing window in the scene. The values should be in the range of 0 to 1, inclusive and in multiples of 1/1023. + public AVRational15 @bezier_curve_anchors; + /// This flag shall be equal to 0 in bitstreams conforming to this version of this Specification. Other values are reserved for future use. + public byte @color_saturation_mapping_flag; + /// The color saturation gain in the processing window in the scene. The value shall be in the range of 0 to 63/8, inclusive and in multiples of 1/8. The default value shall be 1. + public AVRational @color_saturation_weight; +} + +/// Represents the percentile at a specific percentage in a distribution. +public unsafe partial struct AVHDRPlusPercentile +{ + /// The percentage value corresponding to a specific percentile linearized RGB value in the processing window in the scene. The value shall be in the range of 0 to100, inclusive. + public byte @percentage; + /// The linearized maxRGB value at a specific percentile in the processing window in the scene. The value shall be in the range of 0 to 1, inclusive and in multiples of 0.00001. + public AVRational @percentile; +} + +public unsafe partial struct AVHWAccel +{ + /// Name of the hardware accelerated codec. The name is globally unique among encoders and among decoders (but an encoder and a decoder can share the same name). + public byte* @name; + /// Type of codec implemented by the hardware accelerator. + public AVMediaType @type; + /// Codec implemented by the hardware accelerator. + public AVCodecID @id; + /// Supported pixel format. + public AVPixelFormat @pix_fmt; + /// Hardware accelerated codec capabilities. see AV_HWACCEL_CODEC_CAP_* + public int @capabilities; + /// Allocate a custom buffer + public AVHWAccel_alloc_frame_func @alloc_frame; + /// Called at the beginning of each frame or field picture. + public AVHWAccel_start_frame_func @start_frame; + /// Callback for parameter data (SPS/PPS/VPS etc). + public AVHWAccel_decode_params_func @decode_params; + /// Callback for each slice. + public AVHWAccel_decode_slice_func @decode_slice; + /// Called at the end of each frame or field picture. + public AVHWAccel_end_frame_func @end_frame; + /// Size of per-frame hardware accelerator private data. + public int @frame_priv_data_size; + /// Initialize the hwaccel private data. + public AVHWAccel_init_func @init; + /// Uninitialize the hwaccel private data. + public AVHWAccel_uninit_func @uninit; + /// Size of the private data to allocate in AVCodecInternal.hwaccel_priv_data. + public int @priv_data_size; + /// Internal hwaccel capabilities. + public int @caps_internal; + /// Fill the given hw_frames context with current codec parameters. Called from get_format. Refer to avcodec_get_hw_frames_parameters() for details. + public AVHWAccel_frame_params_func @frame_params; +} + +/// This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e. state that is not tied to a concrete processing configuration. E.g., in an API that supports hardware-accelerated encoding and decoding, this struct will (if possible) wrap the state that is common to both encoding and decoding and from which specific instances of encoders or decoders can be derived. +public unsafe partial struct AVHWDeviceContext +{ + /// A class for logging. Set by av_hwdevice_ctx_alloc(). + public AVClass* @av_class; + /// Private data used internally by libavutil. Must not be accessed in any way by the caller. + public AVHWDeviceInternal* @internal; + /// This field identifies the underlying API used for hardware access. + public AVHWDeviceType @type; + /// The format-specific data, allocated and freed by libavutil along with this context. + public void* @hwctx; + /// This field may be set by the caller before calling av_hwdevice_ctx_init(). + public AVHWDeviceContext_free_func @free; + /// Arbitrary user data, to be used e.g. by the free() callback. + public void* @user_opaque; +} + +/// This struct describes the constraints on hardware frames attached to a given device with a hardware-specific configuration. This is returned by av_hwdevice_get_hwframe_constraints() and must be freed by av_hwframe_constraints_free() after use. +public unsafe partial struct AVHWFramesConstraints +{ + /// A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE. This member will always be filled. + public AVPixelFormat* @valid_hw_formats; + /// A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE. Can be NULL if this information is not known. + public AVPixelFormat* @valid_sw_formats; + /// The minimum size of frames in this hw_frames_ctx. (Zero if not known.) + public int @min_width; + public int @min_height; + /// The maximum size of frames in this hw_frames_ctx. (INT_MAX if not known / no limit.) + public int @max_width; + public int @max_height; +} + +/// This struct describes a set or pool of "hardware" frames (i.e. those with data not located in normal system memory). All the frames in the pool are assumed to be allocated in the same way and interchangeable. +public unsafe partial struct AVHWFramesContext +{ + /// A class for logging. + public AVClass* @av_class; + /// Private data used internally by libavutil. Must not be accessed in any way by the caller. + public AVHWFramesInternal* @internal; + /// A reference to the parent AVHWDeviceContext. This reference is owned and managed by the enclosing AVHWFramesContext, but the caller may derive additional references from it. + public AVBufferRef* @device_ref; + /// The parent AVHWDeviceContext. This is simply a pointer to device_ref->data provided for convenience. + public AVHWDeviceContext* @device_ctx; + /// The format-specific data, allocated and freed automatically along with this context. + public void* @hwctx; + /// This field may be set by the caller before calling av_hwframe_ctx_init(). + public AVHWFramesContext_free_func @free; + /// Arbitrary user data, to be used e.g. by the free() callback. + public void* @user_opaque; + /// A pool from which the frames are allocated by av_hwframe_get_buffer(). This field may be set by the caller before calling av_hwframe_ctx_init(). The buffers returned by calling av_buffer_pool_get() on this pool must have the properties described in the documentation in the corresponding hw type's header (hwcontext_*.h). The pool will be freed strictly before this struct's free() callback is invoked. + public AVBufferPool* @pool; + /// Initial size of the frame pool. If a device type does not support dynamically resizing the pool, then this is also the maximum pool size. + public int @initial_pool_size; + /// The pixel format identifying the underlying HW surface type. + public AVPixelFormat @format; + /// The pixel format identifying the actual data layout of the hardware frames. + public AVPixelFormat @sw_format; + /// The allocated dimensions of the frames in this pool. + public int @width; + /// The allocated dimensions of the frames in this pool. + public int @height; +} + +public unsafe partial struct AVIndexEntry +{ + public long @pos; + /// Timestamp in AVStream.time_base units, preferably the time from which on correctly decoded frames are available when seeking to this entry. That means preferable PTS on keyframe based formats. But demuxers can choose to store a different timestamp, if it is more convenient for the implementation or nothing better is known + public long @timestamp; + /// Flag is used to indicate which frame should be discarded after decoding. + public int @flags2_size30; + /// Minimum distance between this and the previous keyframe, used to avoid unneeded searching. + public int @min_distance; +} + +/// @{ +public unsafe partial struct AVInputFormat +{ + /// A comma separated list of short names for the format. New names may be appended with a minor bump. + public byte* @name; + /// Descriptive name for the format, meant to be more human-readable than name. You should use the NULL_IF_CONFIG_SMALL() macro to define it. + public byte* @long_name; + /// Can use flags: AVFMT_NOFILE, AVFMT_NEEDNUMBER, AVFMT_SHOW_IDS, AVFMT_NOTIMESTAMPS, AVFMT_GENERIC_INDEX, AVFMT_TS_DISCONT, AVFMT_NOBINSEARCH, AVFMT_NOGENSEARCH, AVFMT_NO_BYTE_SEEK, AVFMT_SEEK_TO_PTS. + public int @flags; + /// If extensions are defined, then no probe is done. You should usually not use extension format guessing because it is not reliable enough + public byte* @extensions; + public AVCodecTag** @codec_tag; + /// AVClass for the private context + public AVClass* @priv_class; + /// Comma-separated list of mime types. It is used check for matching mime types while probing. + public byte* @mime_type; + /// *************************************************************** No fields below this line are part of the public API. They may not be used outside of libavformat and can be changed and removed at will. New public fields should be added right above. **************************************************************** + public int @raw_codec_id; + /// Size of private data so that it can be allocated in the wrapper. + public int @priv_data_size; + /// Internal flags. See FF_FMT_FLAG_* in internal.h. + public int @flags_internal; + /// Tell if a given file has a chance of being parsed as this format. The buffer provided is guaranteed to be AVPROBE_PADDING_SIZE bytes big so you do not have to check for that unless you need more. + public AVInputFormat_read_probe_func @read_probe; + /// Read the format header and initialize the AVFormatContext structure. Return 0 if OK. 'avformat_new_stream' should be called to create new streams. + public AVInputFormat_read_header_func @read_header; + /// Read one packet and put it in 'pkt'. pts and flags are also set. 'avformat_new_stream' can be called only if the flag AVFMTCTX_NOHEADER is used and only in the calling thread (not in a background thread). + public AVInputFormat_read_packet_func @read_packet; + /// Close the stream. The AVFormatContext and AVStreams are not freed by this function + public AVInputFormat_read_close_func @read_close; + /// Seek to a given timestamp relative to the frames in stream component stream_index. + public AVInputFormat_read_seek_func @read_seek; + /// Get the next timestamp in stream[stream_index].time_base units. + public AVInputFormat_read_timestamp_func @read_timestamp; + /// Start/resume playing - only meaningful if using a network-based format (RTSP). + public AVInputFormat_read_play_func @read_play; + /// Pause playing - only meaningful if using a network-based format (RTSP). + public AVInputFormat_read_pause_func @read_pause; + /// Seek to timestamp ts. Seeking will be done so that the point from which all active streams can be presented successfully will be closest to ts and within min/max_ts. Active streams are all streams that have AVStream.discard < AVDISCARD_ALL. + public AVInputFormat_read_seek2_func @read_seek2; + /// Returns device list with it properties. + public AVInputFormat_get_device_list_func @get_device_list; +} + +/// Bytestream IO Context. New public fields can be added with minor version bumps. Removal, reordering and changes to existing public fields require a major version bump. sizeof(AVIOContext) must not be used outside libav*. +public unsafe partial struct AVIOContext +{ + /// A class for private options. + public AVClass* @av_class; + /// Start of the buffer. + public byte* @buffer; + /// Maximum buffer size + public int @buffer_size; + /// Current position in the buffer + public byte* @buf_ptr; + /// End of the data, may be less than buffer+buffer_size if the read function returned less data than requested, e.g. for streams where no more data has been received yet. + public byte* @buf_end; + /// A private pointer, passed to the read/write/seek/... functions. + public void* @opaque; + public AVIOContext_read_packet_func @read_packet; + public AVIOContext_write_packet_func @write_packet; + public AVIOContext_seek_func @seek; + /// position in the file of the current buffer + public long @pos; + /// true if was unable to read due to error or eof + public int @eof_reached; + /// contains the error code or 0 if no error happened + public int @error; + /// true if open for writing + public int @write_flag; + public int @max_packet_size; + /// Try to buffer at least this amount of data before flushing it. + public int @min_packet_size; + public ulong @checksum; + public byte* @checksum_ptr; + public AVIOContext_update_checksum_func @update_checksum; + /// Pause or resume playback for network streaming protocols - e.g. MMS. + public AVIOContext_read_pause_func @read_pause; + /// Seek to a given timestamp in stream with the specified stream_index. Needed for some network streaming protocols which don't support seeking to byte position. + public AVIOContext_read_seek_func @read_seek; + /// A combination of AVIO_SEEKABLE_ flags or 0 when the stream is not seekable. + public int @seekable; + /// avio_read and avio_write should if possible be satisfied directly instead of going through a buffer, and avio_seek will always call the underlying seek function directly. + public int @direct; + /// ',' separated list of allowed protocols. + public byte* @protocol_whitelist; + /// ',' separated list of disallowed protocols. + public byte* @protocol_blacklist; + /// A callback that is used instead of write_packet. + public AVIOContext_write_data_type_func @write_data_type; + /// If set, don't call write_data_type separately for AVIO_DATA_MARKER_BOUNDARY_POINT, but ignore them and treat them as AVIO_DATA_MARKER_UNKNOWN (to avoid needlessly small chunks of data returned from the callback). + public int @ignore_boundary_point; + [Obsolete("field utilized privately by libavformat. For a public statistic of how many bytes were written out, see AVIOContext::bytes_written.")] + public long @written; + /// Maximum reached position before a backward seek in the write buffer, used keeping track of already written data for a later flush. + public byte* @buf_ptr_max; + /// Read-only statistic of bytes read for this AVIOContext. + public long @bytes_read; + /// Read-only statistic of bytes written for this AVIOContext. + public long @bytes_written; +} + +public unsafe partial struct AVIODirContext +{ + public URLContext* @url_context; +} + +/// Describes single entry of the directory. +public unsafe partial struct AVIODirEntry +{ + /// Filename + public byte* @name; + /// Type of the entry + public int @type; + /// Set to 1 when name is encoded with UTF-8, 0 otherwise. Name can be encoded with UTF-8 even though 0 is set. + public int @utf8; + /// File size in bytes, -1 if unknown. + public long @size; + /// Time of last modification in microseconds since unix epoch, -1 if unknown. + public long @modification_timestamp; + /// Time of last access in microseconds since unix epoch, -1 if unknown. + public long @access_timestamp; + /// Time of last status change in microseconds since unix epoch, -1 if unknown. + public long @status_change_timestamp; + /// User ID of owner, -1 if unknown. + public long @user_id; + /// Group ID of owner, -1 if unknown. + public long @group_id; + /// Unix file mode, -1 if unknown. + public long @filemode; +} + +/// Callback for checking whether to abort blocking functions. AVERROR_EXIT is returned in this case by the interrupted function. During blocking operations, callback is called with opaque as parameter. If the callback returns 1, the blocking operation will be aborted. +public unsafe partial struct AVIOInterruptCB +{ + public AVIOInterruptCB_callback_func @callback; + public void* @opaque; +} + +/// Mastering display metadata capable of representing the color volume of the display used to master the content (SMPTE 2086:2014). +public unsafe partial struct AVMasteringDisplayMetadata +{ + /// CIE 1931 xy chromaticity coords of color primaries (r, g, b order). + public AVRational3x2 @display_primaries; + /// CIE 1931 xy chromaticity coords of white point. + public AVRational2 @white_point; + /// Min luminance of mastering display (cd/m^2). + public AVRational @min_luminance; + /// Max luminance of mastering display (cd/m^2). + public AVRational @max_luminance; + /// Flag indicating whether the display primaries (and white point) are set. + public int @has_primaries; + /// Flag indicating whether the luminance (min_ and max_) have been set. + public int @has_luminance; +} + +/// AVOption +public unsafe partial struct AVOption +{ + public byte* @name; + /// short English help text + public byte* @help; + /// The offset relative to the context structure where the option value is stored. It should be 0 for named constants. + public int @offset; + public AVOptionType @type; + public AVOption_default_val @default_val; + /// minimum valid value for the option + public double @min; + /// maximum valid value for the option + public double @max; + public int @flags; + /// The logical unit to which the option belongs. Non-constant options and corresponding named constants share the same unit. May be NULL. + public byte* @unit; +} + +/// the default value for scalar options +[StructLayout(LayoutKind.Explicit)] +public unsafe partial struct AVOption_default_val +{ + [FieldOffset(0)] + public long @i64; + [FieldOffset(0)] + public double @dbl; + [FieldOffset(0)] + public byte* @str; + [FieldOffset(0)] + public AVRational @q; +} + +/// A single allowed range of values, or a single allowed value. +public unsafe partial struct AVOptionRange +{ + public byte* @str; + /// Value range. For string ranges this represents the min/max length. For dimensions this represents the min/max pixel count or width/height in multi-component case. + public double @value_min; + /// Value range. For string ranges this represents the min/max length. For dimensions this represents the min/max pixel count or width/height in multi-component case. + public double @value_max; + /// Value's component range. For string this represents the unicode range for chars, 0-127 limits to ASCII. + public double @component_min; + /// Value's component range. For string this represents the unicode range for chars, 0-127 limits to ASCII. + public double @component_max; + /// Range flag. If set to 1 the struct encodes a range, if set to 0 a single value. + public int @is_range; +} + +/// List of AVOptionRange structs. +public unsafe partial struct AVOptionRanges +{ + /// Array of option ranges. + public AVOptionRange** @range; + /// Number of ranges per component. + public int @nb_ranges; + /// Number of componentes. + public int @nb_components; +} + +/// @{ +public unsafe partial struct AVOutputFormat +{ + public byte* @name; + /// Descriptive name for the format, meant to be more human-readable than name. You should use the NULL_IF_CONFIG_SMALL() macro to define it. + public byte* @long_name; + public byte* @mime_type; + /// comma-separated filename extensions + public byte* @extensions; + /// default audio codec + public AVCodecID @audio_codec; + /// default video codec + public AVCodecID @video_codec; + /// default subtitle codec + public AVCodecID @subtitle_codec; + /// can use flags: AVFMT_NOFILE, AVFMT_NEEDNUMBER, AVFMT_GLOBALHEADER, AVFMT_NOTIMESTAMPS, AVFMT_VARIABLE_FPS, AVFMT_NODIMENSIONS, AVFMT_NOSTREAMS, AVFMT_ALLOW_FLUSH, AVFMT_TS_NONSTRICT, AVFMT_TS_NEGATIVE + public int @flags; + /// List of supported codec_id-codec_tag pairs, ordered by "better choice first". The arrays are all terminated by AV_CODEC_ID_NONE. + public AVCodecTag** @codec_tag; + /// AVClass for the private context + public AVClass* @priv_class; + /// *************************************************************** No fields below this line are part of the public API. They may not be used outside of libavformat and can be changed and removed at will. New public fields should be added right above. **************************************************************** + public int @priv_data_size; + /// Internal flags. See FF_FMT_FLAG_* in internal.h. + public int @flags_internal; + public AVOutputFormat_write_header_func @write_header; + /// Write a packet. If AVFMT_ALLOW_FLUSH is set in flags, pkt can be NULL in order to flush data buffered in the muxer. When flushing, return 0 if there still is more data to flush, or 1 if everything was flushed and there is no more buffered data. + public AVOutputFormat_write_packet_func @write_packet; + public AVOutputFormat_write_trailer_func @write_trailer; + /// A format-specific function for interleavement. If unset, packets will be interleaved by dts. + public AVOutputFormat_interleave_packet_func @interleave_packet; + /// Test if the given codec can be stored in this container. + public AVOutputFormat_query_codec_func @query_codec; + public AVOutputFormat_get_output_timestamp_func @get_output_timestamp; + /// Allows sending messages from application to device. + public AVOutputFormat_control_message_func @control_message; + /// Write an uncoded AVFrame. + public AVOutputFormat_write_uncoded_frame_func @write_uncoded_frame; + /// Returns device list with it properties. + public AVOutputFormat_get_device_list_func @get_device_list; + /// default data codec + public AVCodecID @data_codec; + /// Initialize format. May allocate data here, and set any AVFormatContext or AVStream parameters that need to be set before packets are sent. This method must not write output. + public AVOutputFormat_init_func @init; + /// Deinitialize format. If present, this is called whenever the muxer is being destroyed, regardless of whether or not the header has been written. + public AVOutputFormat_deinit_func @deinit; + /// Set up any necessary bitstream filtering and extract any extra data needed for the global header. + public AVOutputFormat_check_bitstream_func @check_bitstream; +} + +/// This structure stores compressed data. It is typically exported by demuxers and then passed as input to decoders, or received as output from encoders and then passed to muxers. +public unsafe partial struct AVPacket +{ + /// A reference to the reference-counted buffer where the packet data is stored. May be NULL, then the packet data is not reference-counted. + public AVBufferRef* @buf; + /// Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will be presented to the user. Can be AV_NOPTS_VALUE if it is not stored in the file. pts MUST be larger or equal to dts as presentation cannot happen before decompression, unless one wants to view hex dumps. Some formats misuse the terms dts and pts/cts to mean something different. Such timestamps must be converted to true pts/dts before they are stored in AVPacket. + public long @pts; + /// Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed. Can be AV_NOPTS_VALUE if it is not stored in the file. + public long @dts; + public byte* @data; + public int @size; + public int @stream_index; + /// A combination of AV_PKT_FLAG values + public int @flags; + /// Additional packet data that can be provided by the container. Packet can contain several types of side information. + public AVPacketSideData* @side_data; + public int @side_data_elems; + /// Duration of this packet in AVStream->time_base units, 0 if unknown. Equals next_pts - this_pts in presentation order. + public long @duration; + /// byte position in stream, -1 if unknown + public long @pos; + /// for some private data of the user + public void* @opaque; + /// AVBufferRef for free use by the API user. FFmpeg will never check the contents of the buffer ref. FFmpeg calls av_buffer_unref() on it when the packet is unreferenced. av_packet_copy_props() calls create a new reference with av_buffer_ref() for the target packet's opaque_ref field. + public AVBufferRef* @opaque_ref; + /// Time base of the packet's timestamps. In the future, this field may be set on packets output by encoders or demuxers, but its value will be by default ignored on input to decoders or muxers. + public AVRational @time_base; +} + +public unsafe partial struct AVPacketList +{ + public AVPacket @pkt; + public AVPacketList* @next; +} + +public unsafe partial struct AVPacketSideData +{ + public byte* @data; + public ulong @size; + public AVPacketSideDataType @type; +} + +/// Pan Scan area. This specifies the area which should be displayed. Note there may be multiple such areas for one frame. +public unsafe partial struct AVPanScan +{ + /// id - encoding: Set by user. - decoding: Set by libavcodec. + public int @id; + /// width and height in 1/16 pel - encoding: Set by user. - decoding: Set by libavcodec. + public int @width; + public int @height; + /// position of the top left corner in 1/16 pel for up to 3 fields/frames - encoding: Set by user. - decoding: Set by libavcodec. + public short3x2 @position; +} + +/// Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes of an image. It also stores the subsampling factors and number of components. +public unsafe partial struct AVPixFmtDescriptor +{ + public byte* @name; + /// The number of components each pixel has, (1-4) + public byte @nb_components; + /// Amount to shift the luma width right to find the chroma width. For YV12 this is 1 for example. chroma_width = AV_CEIL_RSHIFT(luma_width, log2_chroma_w) The note above is needed to ensure rounding up. This value only refers to the chroma components. + public byte @log2_chroma_w; + /// Amount to shift the luma height right to find the chroma height. For YV12 this is 1 for example. chroma_height= AV_CEIL_RSHIFT(luma_height, log2_chroma_h) The note above is needed to ensure rounding up. This value only refers to the chroma components. + public byte @log2_chroma_h; + /// Combination of AV_PIX_FMT_FLAG_... flags. + public ulong @flags; + /// Parameters that describe how pixels are packed. If the format has 1 or 2 components, then luma is 0. If the format has 3 or 4 components: if the RGB flag is set then 0 is red, 1 is green and 2 is blue; otherwise 0 is luma, 1 is chroma-U and 2 is chroma-V. + public AVComponentDescriptor4 @comp; + /// Alternative comma-separated names. + public byte* @alias; +} + +/// This structure contains the data a format has to probe a file. +public unsafe partial struct AVProbeData +{ + public byte* @filename; + /// Buffer must have AVPROBE_PADDING_SIZE of extra allocated bytes filled with zero. + public byte* @buf; + /// Size of buf except extra allocated bytes + public int @buf_size; + /// mime_type, when known. + public byte* @mime_type; +} + +/// This structure supplies correlation between a packet timestamp and a wall clock production time. The definition follows the Producer Reference Time ('prft') as defined in ISO/IEC 14496-12 +public unsafe partial struct AVProducerReferenceTime +{ + /// A UTC timestamp, in microseconds, since Unix epoch (e.g, av_gettime()). + public long @wallclock; + public int @flags; +} + +/// AVProfile. +public unsafe partial struct AVProfile +{ + public int @profile; + /// short name for the profile + public byte* @name; +} + +/// New fields can be added to the end with minor version bumps. Removal, reordering and changes to existing fields require a major version bump. sizeof(AVProgram) must not be used outside libav*. +public unsafe partial struct AVProgram +{ + public int @id; + public int @flags; + /// selects which program to discard and which to feed to the caller + public AVDiscard @discard; + public uint* @stream_index; + public uint @nb_stream_indexes; + public AVDictionary* @metadata; + public int @program_num; + public int @pmt_pid; + public int @pcr_pid; + public int @pmt_version; + /// *************************************************************** All fields below this line are not part of the public API. They may not be used outside of libavformat and can be changed and removed at will. New public fields should be added right above. **************************************************************** + public long @start_time; + public long @end_time; + /// reference dts for wrap detection + public long @pts_wrap_reference; + /// behavior on wrap detection + public int @pts_wrap_behavior; +} + +/// Rational number (pair of numerator and denominator). +public unsafe partial struct AVRational +{ + /// Numerator + public int @num; + /// Denominator + public int @den; +} + +/// Structure describing a single Region Of Interest. +public unsafe partial struct AVRegionOfInterest +{ + /// Must be set to the size of this data structure (that is, sizeof(AVRegionOfInterest)). + public uint @self_size; + /// Distance in pixels from the top edge of the frame to the top and bottom edges and from the left edge of the frame to the left and right edges of the rectangle defining this region of interest. + public int @top; + public int @bottom; + public int @left; + public int @right; + /// Quantisation offset. + public AVRational @qoffset; +} + +/// Stream structure. New fields can be added to the end with minor version bumps. Removal, reordering and changes to existing fields require a major version bump. sizeof(AVStream) must not be used outside libav*. +public unsafe partial struct AVStream +{ + /// stream index in AVFormatContext + public int @index; + /// Format-specific stream ID. decoding: set by libavformat encoding: set by the user, replaced by libavformat if left unset + public int @id; + public void* @priv_data; + /// This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented. + public AVRational @time_base; + /// Decoding: pts of the first frame of the stream in presentation order, in stream time base. Only set this if you are absolutely 100% sure that the value you set it to really is the pts of the first frame. This may be undefined (AV_NOPTS_VALUE). + public long @start_time; + /// Decoding: duration of the stream, in stream time base. If a source file does not specify a duration, but does specify a bitrate, this value will be estimated from bitrate and file size. + public long @duration; + /// number of frames in this stream if known or 0 + public long @nb_frames; + /// Stream disposition - a combination of AV_DISPOSITION_* flags. - demuxing: set by libavformat when creating the stream or in avformat_find_stream_info(). - muxing: may be set by the caller before avformat_write_header(). + public int @disposition; + /// Selects which packets can be discarded at will and do not need to be demuxed. + public AVDiscard @discard; + /// sample aspect ratio (0 if unknown) - encoding: Set by user. - decoding: Set by libavformat. + public AVRational @sample_aspect_ratio; + public AVDictionary* @metadata; + /// Average framerate + public AVRational @avg_frame_rate; + /// For streams with AV_DISPOSITION_ATTACHED_PIC disposition, this packet will contain the attached picture. + public AVPacket @attached_pic; + /// An array of side data that applies to the whole stream (i.e. the container does not allow it to change between packets). + public AVPacketSideData* @side_data; + /// The number of elements in the AVStream.side_data array. + public int @nb_side_data; + /// Flags indicating events happening on the stream, a combination of AVSTREAM_EVENT_FLAG_*. + public int @event_flags; + /// Real base framerate of the stream. This is the lowest framerate with which all timestamps can be represented accurately (it is the least common multiple of all framerates in the stream). Note, this value is just a guess! For example, if the time base is 1/90000 and all frames have either approximately 3600 or 1800 timer ticks, then r_frame_rate will be 50/1. + public AVRational @r_frame_rate; + /// Codec parameters associated with this stream. Allocated and freed by libavformat in avformat_new_stream() and avformat_free_context() respectively. + public AVCodecParameters* @codecpar; + /// Number of bits in timestamps. Used for wrapping control. + public int @pts_wrap_bits; +} + +public unsafe partial struct AVSubtitle +{ + public ushort @format; + public uint @start_display_time; + public uint @end_display_time; + public uint @num_rects; + public AVSubtitleRect** @rects; + /// Same as packet pts, in AV_TIME_BASE + public long @pts; +} + +public unsafe partial struct AVSubtitleRect +{ + /// top left corner of pict, undefined when pict is not set + public int @x; + /// top left corner of pict, undefined when pict is not set + public int @y; + /// width of pict, undefined when pict is not set + public int @w; + /// height of pict, undefined when pict is not set + public int @h; + /// number of colors in pict, undefined when pict is not set + public int @nb_colors; + /// data+linesize for the bitmap of this subtitle. Can be set for text/ass as well once they are rendered. + public byte_ptr4 @data; + public int4 @linesize; + public AVSubtitleType @type; + /// 0 terminated plain UTF-8 text + public byte* @text; + /// 0 terminated ASS/SSA compatible event line. The presentation of this is unaffected by the other values in this struct. + public byte* @ass; + public int @flags; +} + +public unsafe partial struct AVTimecode +{ + /// timecode frame start (first base frame number) + public int @start; + /// flags such as drop frame, +24 hours support, ... + public uint @flags; + /// frame rate in rational form + public AVRational @rate; + /// frame per second; must be consistent with the rate field + public uint @fps; +} + +public unsafe partial struct D3D11_VIDEO_DECODER_CONFIG +{ + public _GUID @guidConfigBitstreamEncryption; + public _GUID @guidConfigMBcontrolEncryption; + public _GUID @guidConfigResidDiffEncryption; + public uint @ConfigBitstreamRaw; + public uint @ConfigMBcontrolRasterOrder; + public uint @ConfigResidDiffHost; + public uint @ConfigSpatialResid8; + public uint @ConfigResid8Subtraction; + public uint @ConfigSpatialHost8or9Clipping; + public uint @ConfigSpatialResidInterleaved; + public uint @ConfigIntraResidUnsigned; + public uint @ConfigResidDiffAccelerator; + public uint @ConfigHostInverseScan; + public uint @ConfigSpecificIDCT; + public uint @Config4GroupedCoefs; + public ushort @ConfigMinRenderTargetBuffCount; + public ushort @ConfigDecoderSpecific; +} + +public unsafe partial struct ID3D11Device +{ + public ID3D11DeviceVtbl* @lpVtbl; +} + +public unsafe partial struct ID3D11DeviceContext +{ + public ID3D11DeviceContextVtbl* @lpVtbl; +} + +public unsafe partial struct ID3D11DeviceContextVtbl +{ + public void* @QueryInterface; + public void* @AddRef; + public void* @Release; + public void* @GetDevice; + public void* @GetPrivateData; + public void* @SetPrivateData; + public void* @SetPrivateDataInterface; + public void* @VSSetConstantBuffers; + public void* @PSSetShaderResources; + public void* @PSSetShader; + public void* @PSSetSamplers; + public void* @VSSetShader; + public void* @DrawIndexed; + public void* @Draw; + public void* @Map; + public void* @Unmap; + public void* @PSSetConstantBuffers; + public void* @IASetInputLayout; + public void* @IASetVertexBuffers; + public void* @IASetIndexBuffer; + public void* @DrawIndexedInstanced; + public void* @DrawInstanced; + public void* @GSSetConstantBuffers; + public void* @GSSetShader; + public void* @IASetPrimitiveTopology; + public void* @VSSetShaderResources; + public void* @VSSetSamplers; + public void* @Begin; + public void* @End; + public void* @GetData; + public void* @SetPredication; + public void* @GSSetShaderResources; + public void* @GSSetSamplers; + public void* @OMSetRenderTargets; + public void* @OMSetRenderTargetsAndUnorderedAccessViews; + public void* @OMSetBlendState; + public void* @OMSetDepthStencilState; + public void* @SOSetTargets; + public void* @DrawAuto; + public void* @DrawIndexedInstancedIndirect; + public void* @DrawInstancedIndirect; + public void* @Dispatch; + public void* @DispatchIndirect; + public void* @RSSetState; + public void* @RSSetViewports; + public void* @RSSetScissorRects; + public void* @CopySubresourceRegion; + public void* @CopyResource; + public void* @UpdateSubresource; + public void* @CopyStructureCount; + public void* @ClearRenderTargetView; + public void* @ClearUnorderedAccessViewUint; + public void* @ClearUnorderedAccessViewFloat; + public void* @ClearDepthStencilView; + public void* @GenerateMips; + public void* @SetResourceMinLOD; + public void* @GetResourceMinLOD; + public void* @ResolveSubresource; + public void* @ExecuteCommandList; + public void* @HSSetShaderResources; + public void* @HSSetShader; + public void* @HSSetSamplers; + public void* @HSSetConstantBuffers; + public void* @DSSetShaderResources; + public void* @DSSetShader; + public void* @DSSetSamplers; + public void* @DSSetConstantBuffers; + public void* @CSSetShaderResources; + public void* @CSSetUnorderedAccessViews; + public void* @CSSetShader; + public void* @CSSetSamplers; + public void* @CSSetConstantBuffers; + public void* @VSGetConstantBuffers; + public void* @PSGetShaderResources; + public void* @PSGetShader; + public void* @PSGetSamplers; + public void* @VSGetShader; + public void* @PSGetConstantBuffers; + public void* @IAGetInputLayout; + public void* @IAGetVertexBuffers; + public void* @IAGetIndexBuffer; + public void* @GSGetConstantBuffers; + public void* @GSGetShader; + public void* @IAGetPrimitiveTopology; + public void* @VSGetShaderResources; + public void* @VSGetSamplers; + public void* @GetPredication; + public void* @GSGetShaderResources; + public void* @GSGetSamplers; + public void* @OMGetRenderTargets; + public void* @OMGetRenderTargetsAndUnorderedAccessViews; + public void* @OMGetBlendState; + public void* @OMGetDepthStencilState; + public void* @SOGetTargets; + public void* @RSGetState; + public void* @RSGetViewports; + public void* @RSGetScissorRects; + public void* @HSGetShaderResources; + public void* @HSGetShader; + public void* @HSGetSamplers; + public void* @HSGetConstantBuffers; + public void* @DSGetShaderResources; + public void* @DSGetShader; + public void* @DSGetSamplers; + public void* @DSGetConstantBuffers; + public void* @CSGetShaderResources; + public void* @CSGetUnorderedAccessViews; + public void* @CSGetShader; + public void* @CSGetSamplers; + public void* @CSGetConstantBuffers; + public void* @ClearState; + public void* @Flush; + public void* @GetType; + public void* @GetContextFlags; + public void* @FinishCommandList; +} + +public unsafe partial struct ID3D11DeviceVtbl +{ + public void* @QueryInterface; + public void* @AddRef; + public void* @Release; + public void* @CreateBuffer; + public void* @CreateTexture1D; + public void* @CreateTexture2D; + public void* @CreateTexture3D; + public void* @CreateShaderResourceView; + public void* @CreateUnorderedAccessView; + public void* @CreateRenderTargetView; + public void* @CreateDepthStencilView; + public void* @CreateInputLayout; + public void* @CreateVertexShader; + public void* @CreateGeometryShader; + public void* @CreateGeometryShaderWithStreamOutput; + public void* @CreatePixelShader; + public void* @CreateHullShader; + public void* @CreateDomainShader; + public void* @CreateComputeShader; + public void* @CreateClassLinkage; + public void* @CreateBlendState; + public void* @CreateDepthStencilState; + public void* @CreateRasterizerState; + public void* @CreateSamplerState; + public void* @CreateQuery; + public void* @CreatePredicate; + public void* @CreateCounter; + public void* @CreateDeferredContext; + public void* @OpenSharedResource; + public void* @CheckFormatSupport; + public void* @CheckMultisampleQualityLevels; + public void* @CheckCounterInfo; + public void* @CheckCounter; + public void* @CheckFeatureSupport; + public void* @GetPrivateData; + public void* @SetPrivateData; + public void* @SetPrivateDataInterface; + public void* @GetFeatureLevel; + public void* @GetCreationFlags; + public void* @GetDeviceRemovedReason; + public void* @GetImmediateContext; + public void* @SetExceptionMode; + public void* @GetExceptionMode; +} + +public unsafe partial struct ID3D11Texture2D +{ + public ID3D11Texture2DVtbl* @lpVtbl; +} + +public unsafe partial struct ID3D11Texture2DVtbl +{ + public void* @QueryInterface; + public void* @AddRef; + public void* @Release; + public void* @GetDevice; + public void* @GetPrivateData; + public void* @SetPrivateData; + public void* @SetPrivateDataInterface; + public void* @GetType; + public void* @SetEvictionPriority; + public void* @GetEvictionPriority; + public void* @GetDesc; +} + +public unsafe partial struct ID3D11VideoContext +{ + public ID3D11VideoContextVtbl* @lpVtbl; +} + +public unsafe partial struct ID3D11VideoContextVtbl +{ + public void* @QueryInterface; + public void* @AddRef; + public void* @Release; + public void* @GetDevice; + public void* @GetPrivateData; + public void* @SetPrivateData; + public void* @SetPrivateDataInterface; + public void* @GetDecoderBuffer; + public void* @ReleaseDecoderBuffer; + public void* @DecoderBeginFrame; + public void* @DecoderEndFrame; + public void* @SubmitDecoderBuffers; + public void* @DecoderExtension; + public void* @VideoProcessorSetOutputTargetRect; + public void* @VideoProcessorSetOutputBackgroundColor; + public void* @VideoProcessorSetOutputColorSpace; + public void* @VideoProcessorSetOutputAlphaFillMode; + public void* @VideoProcessorSetOutputConstriction; + public void* @VideoProcessorSetOutputStereoMode; + public void* @VideoProcessorSetOutputExtension; + public void* @VideoProcessorGetOutputTargetRect; + public void* @VideoProcessorGetOutputBackgroundColor; + public void* @VideoProcessorGetOutputColorSpace; + public void* @VideoProcessorGetOutputAlphaFillMode; + public void* @VideoProcessorGetOutputConstriction; + public void* @VideoProcessorGetOutputStereoMode; + public void* @VideoProcessorGetOutputExtension; + public void* @VideoProcessorSetStreamFrameFormat; + public void* @VideoProcessorSetStreamColorSpace; + public void* @VideoProcessorSetStreamOutputRate; + public void* @VideoProcessorSetStreamSourceRect; + public void* @VideoProcessorSetStreamDestRect; + public void* @VideoProcessorSetStreamAlpha; + public void* @VideoProcessorSetStreamPalette; + public void* @VideoProcessorSetStreamPixelAspectRatio; + public void* @VideoProcessorSetStreamLumaKey; + public void* @VideoProcessorSetStreamStereoFormat; + public void* @VideoProcessorSetStreamAutoProcessingMode; + public void* @VideoProcessorSetStreamFilter; + public void* @VideoProcessorSetStreamExtension; + public void* @VideoProcessorGetStreamFrameFormat; + public void* @VideoProcessorGetStreamColorSpace; + public void* @VideoProcessorGetStreamOutputRate; + public void* @VideoProcessorGetStreamSourceRect; + public void* @VideoProcessorGetStreamDestRect; + public void* @VideoProcessorGetStreamAlpha; + public void* @VideoProcessorGetStreamPalette; + public void* @VideoProcessorGetStreamPixelAspectRatio; + public void* @VideoProcessorGetStreamLumaKey; + public void* @VideoProcessorGetStreamStereoFormat; + public void* @VideoProcessorGetStreamAutoProcessingMode; + public void* @VideoProcessorGetStreamFilter; + public void* @VideoProcessorGetStreamExtension; + public void* @VideoProcessorBlt; + public void* @NegotiateCryptoSessionKeyExchange; + public void* @EncryptionBlt; + public void* @DecryptionBlt; + public void* @StartSessionKeyRefresh; + public void* @FinishSessionKeyRefresh; + public void* @GetEncryptionBltKey; + public void* @NegotiateAuthenticatedChannelKeyExchange; + public void* @QueryAuthenticatedChannel; + public void* @ConfigureAuthenticatedChannel; + public void* @VideoProcessorSetStreamRotation; + public void* @VideoProcessorGetStreamRotation; +} + +public unsafe partial struct ID3D11VideoDecoder +{ + public ID3D11VideoDecoderVtbl* @lpVtbl; +} + +public unsafe partial struct ID3D11VideoDecoderOutputView +{ + public ID3D11VideoDecoderOutputViewVtbl* @lpVtbl; +} + +public unsafe partial struct ID3D11VideoDecoderOutputViewVtbl +{ + public void* @QueryInterface; + public void* @AddRef; + public void* @Release; + public void* @GetDevice; + public void* @GetPrivateData; + public void* @SetPrivateData; + public void* @SetPrivateDataInterface; + public void* @GetResource; + public void* @GetDesc; +} + +public unsafe partial struct ID3D11VideoDecoderVtbl +{ + public void* @QueryInterface; + public void* @AddRef; + public void* @Release; + public void* @GetDevice; + public void* @GetPrivateData; + public void* @SetPrivateData; + public void* @SetPrivateDataInterface; + public void* @GetCreationParameters; + public void* @GetDriverHandle; +} + +public unsafe partial struct ID3D11VideoDevice +{ + public ID3D11VideoDeviceVtbl* @lpVtbl; +} + +public unsafe partial struct ID3D11VideoDeviceVtbl +{ + public void* @QueryInterface; + public void* @AddRef; + public void* @Release; + public void* @CreateVideoDecoder; + public void* @CreateVideoProcessor; + public void* @CreateAuthenticatedChannel; + public void* @CreateCryptoSession; + public void* @CreateVideoDecoderOutputView; + public void* @CreateVideoProcessorInputView; + public void* @CreateVideoProcessorOutputView; + public void* @CreateVideoProcessorEnumerator; + public void* @GetVideoDecoderProfileCount; + public void* @GetVideoDecoderProfile; + public void* @CheckVideoDecoderFormat; + public void* @GetVideoDecoderConfigCount; + public void* @GetVideoDecoderConfig; + public void* @GetContentProtectionCaps; + public void* @CheckCryptoKeyExchange; + public void* @SetPrivateData; + public void* @SetPrivateDataInterface; +} + +public unsafe partial struct IDirect3DDeviceManager9 +{ + public IDirect3DDeviceManager9Vtbl* @lpVtbl; +} + +public unsafe partial struct IDirect3DDeviceManager9Vtbl +{ + public void* @QueryInterface; + public void* @AddRef; + public void* @Release; + public void* @ResetDevice; + public void* @OpenDeviceHandle; + public void* @CloseDeviceHandle; + public void* @TestDevice; + public void* @LockDevice; + public void* @UnlockDevice; + public void* @GetVideoService; +} + +public unsafe partial struct IDirect3DSurface9 +{ + public IDirect3DSurface9Vtbl* @lpVtbl; +} + +public unsafe partial struct IDirect3DSurface9Vtbl +{ + public void* @QueryInterface; + public void* @AddRef; + public void* @Release; + public void* @GetDevice; + public void* @SetPrivateData; + public void* @GetPrivateData; + public void* @FreePrivateData; + public void* @SetPriority; + public void* @GetPriority; + public void* @PreLoad; + public void* @GetType; + public void* @GetContainer; + public void* @GetDesc; + public void* @LockRect; + public void* @UnlockRect; + public void* @GetDC; + public void* @ReleaseDC; +} + +public unsafe partial struct IDirectXVideoDecoder +{ + public IDirectXVideoDecoderVtbl* @lpVtbl; +} + +public unsafe partial struct IDirectXVideoDecoderVtbl +{ + public void* @QueryInterface; + public void* @AddRef; + public void* @Release; + public void* @GetVideoDecoderService; + public void* @GetCreationParameters; + public void* @GetBuffer; + public void* @ReleaseBuffer; + public void* @BeginFrame; + public void* @EndFrame; + public void* @Execute; +} + +public unsafe partial struct RcOverride +{ + public int @start_frame; + public int @end_frame; + public int @qscale; + public float @quality_factor; +} + +public unsafe partial struct SwsFilter +{ + public SwsVector* @lumH; + public SwsVector* @lumV; + public SwsVector* @chrH; + public SwsVector* @chrV; +} + +public unsafe partial struct SwsVector +{ + /// pointer to the list of coefficients + public double* @coeff; + /// number of coefficients in the vector + public int @length; +} + +/// Context for an Audio FIFO Buffer. +/// This struct is incomplete. +public unsafe partial struct AVAudioFifo +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVBPrint +{ +} + +/// Structure for chain/list of bitstream filters. Empty list can be allocated by av_bsf_list_alloc(). +/// This struct is incomplete. +public unsafe partial struct AVBSFList +{ +} + +/// A reference counted buffer type. It is opaque and is meant to be used through references (AVBufferRef). +/// This struct is incomplete. +public unsafe partial struct AVBuffer +{ +} + +/// The buffer pool. This structure is opaque and not meant to be accessed directly. It is allocated with av_buffer_pool_init() and freed with av_buffer_pool_uninit(). +/// This struct is incomplete. +public unsafe partial struct AVBufferPool +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVCodecInternal +{ +} + +/// ********************************************** +/// This struct is incomplete. +public unsafe partial struct AVCodecTag +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVDictionary +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVFilterChannelLayouts +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVFilterCommand +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVFilterFormats +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVFilterGraphInternal +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVFilterInternal +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVFilterPad +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVHWDeviceInternal +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVHWFramesInternal +{ +} + +/// Low-complexity tree container +/// This struct is incomplete. +public unsafe partial struct AVTreeNode +{ +} + +/// The libswresample context. Unlike libavcodec and libavformat, this structure is opaque. This means that if you would like to set options, you must use the avoptions API and cannot directly set values to members of the structure. +/// This struct is incomplete. +public unsafe partial struct SwrContext +{ +} + +/// This struct is incomplete. +public unsafe partial struct SwsContext +{ +} + +/// This struct is incomplete. +public unsafe partial struct URLContext +{ +} + diff --git a/FFmpeg.AutoGen.Abstractions/generated/ffmpeg.functions.facade.g.cs b/FFmpeg.AutoGen.Abstractions/generated/ffmpeg.functions.facade.g.cs new file mode 100644 index 00000000..5319d7b1 --- /dev/null +++ b/FFmpeg.AutoGen.Abstractions/generated/ffmpeg.functions.facade.g.cs @@ -0,0 +1,3463 @@ +using System; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen.Abstractions; + +public static unsafe partial class ffmpeg +{ + /// Create an AVABufferSinkParams structure. + [Obsolete()] + public static AVABufferSinkParams* av_abuffersink_params_alloc() => vectors.av_abuffersink_params_alloc(); + + /// Add an index entry into a sorted list. Update the entry if the list already contains it. + /// timestamp in the time base of the given stream + public static int av_add_index_entry(AVStream* @st, long @pos, long @timestamp, int @size, int @distance, int @flags) => vectors.av_add_index_entry(@st, @pos, @timestamp, @size, @distance, @flags); + + /// Add two rationals. + /// First rational + /// Second rational + /// b+c + public static AVRational av_add_q(AVRational @b, AVRational @c) => vectors.av_add_q(@b, @c); + + /// Add a value to a timestamp. + /// Input timestamp time base + /// Input timestamp + /// Time base of `inc` + /// Value to be added + public static long av_add_stable(AVRational @ts_tb, long @ts, AVRational @inc_tb, long @inc) => vectors.av_add_stable(@ts_tb, @ts, @inc_tb, @inc); + + /// Read data and append it to the current content of the AVPacket. If pkt->size is 0 this is identical to av_get_packet. Note that this uses av_grow_packet and thus involves a realloc which is inefficient. Thus this function should only be used when there is no reasonable way to know (an upper bound of) the final size. + /// associated IO context + /// packet + /// amount of data to read + /// >0 (read size) if OK, AVERROR_xxx otherwise, previous data will not be lost even if an error occurs. + public static int av_append_packet(AVIOContext* @s, AVPacket* @pkt, int @size) => vectors.av_append_packet(@s, @pkt, @size); + + /// Allocate an AVAudioFifo. + /// sample format + /// number of channels + /// initial allocation size, in samples + /// newly allocated AVAudioFifo, or NULL on error + public static AVAudioFifo* av_audio_fifo_alloc(AVSampleFormat @sample_fmt, int @channels, int @nb_samples) => vectors.av_audio_fifo_alloc(@sample_fmt, @channels, @nb_samples); + + /// Drain data from an AVAudioFifo. + /// AVAudioFifo to drain + /// number of samples to drain + /// 0 if OK, or negative AVERROR code on failure + public static int av_audio_fifo_drain(AVAudioFifo* @af, int @nb_samples) => vectors.av_audio_fifo_drain(@af, @nb_samples); + + /// Free an AVAudioFifo. + /// AVAudioFifo to free + public static void av_audio_fifo_free(AVAudioFifo* @af) => vectors.av_audio_fifo_free(@af); + + /// Peek data from an AVAudioFifo. + /// AVAudioFifo to read from + /// audio data plane pointers + /// number of samples to peek + /// number of samples actually peek, or negative AVERROR code on failure. The number of samples actually peek will not be greater than nb_samples, and will only be less than nb_samples if av_audio_fifo_size is less than nb_samples. + public static int av_audio_fifo_peek(AVAudioFifo* @af, void** @data, int @nb_samples) => vectors.av_audio_fifo_peek(@af, @data, @nb_samples); + + /// Peek data from an AVAudioFifo. + /// AVAudioFifo to read from + /// audio data plane pointers + /// number of samples to peek + /// offset from current read position + /// number of samples actually peek, or negative AVERROR code on failure. The number of samples actually peek will not be greater than nb_samples, and will only be less than nb_samples if av_audio_fifo_size is less than nb_samples. + public static int av_audio_fifo_peek_at(AVAudioFifo* @af, void** @data, int @nb_samples, int @offset) => vectors.av_audio_fifo_peek_at(@af, @data, @nb_samples, @offset); + + /// Read data from an AVAudioFifo. + /// AVAudioFifo to read from + /// audio data plane pointers + /// number of samples to read + /// number of samples actually read, or negative AVERROR code on failure. The number of samples actually read will not be greater than nb_samples, and will only be less than nb_samples if av_audio_fifo_size is less than nb_samples. + public static int av_audio_fifo_read(AVAudioFifo* @af, void** @data, int @nb_samples) => vectors.av_audio_fifo_read(@af, @data, @nb_samples); + + /// Reallocate an AVAudioFifo. + /// AVAudioFifo to reallocate + /// new allocation size, in samples + /// 0 if OK, or negative AVERROR code on failure + public static int av_audio_fifo_realloc(AVAudioFifo* @af, int @nb_samples) => vectors.av_audio_fifo_realloc(@af, @nb_samples); + + /// Reset the AVAudioFifo buffer. + /// AVAudioFifo to reset + public static void av_audio_fifo_reset(AVAudioFifo* @af) => vectors.av_audio_fifo_reset(@af); + + /// Get the current number of samples in the AVAudioFifo available for reading. + /// the AVAudioFifo to query + /// number of samples available for reading + public static int av_audio_fifo_size(AVAudioFifo* @af) => vectors.av_audio_fifo_size(@af); + + /// Get the current number of samples in the AVAudioFifo available for writing. + /// the AVAudioFifo to query + /// number of samples available for writing + public static int av_audio_fifo_space(AVAudioFifo* @af) => vectors.av_audio_fifo_space(@af); + + /// Write data to an AVAudioFifo. + /// AVAudioFifo to write to + /// audio data plane pointers + /// number of samples to write + /// number of samples actually written, or negative AVERROR code on failure. If successful, the number of samples actually written will always be nb_samples. + public static int av_audio_fifo_write(AVAudioFifo* @af, void** @data, int @nb_samples) => vectors.av_audio_fifo_write(@af, @data, @nb_samples); + + /// Append a description of a channel layout to a bprint buffer. + [Obsolete("use av_channel_layout_describe()")] + public static void av_bprint_channel_layout(AVBPrint* @bp, int @nb_channels, ulong @channel_layout) => vectors.av_bprint_channel_layout(@bp, @nb_channels, @channel_layout); + + /// Allocate a context for a given bitstream filter. The caller must fill in the context parameters as described in the documentation and then call av_bsf_init() before sending any data to the filter. + /// the filter for which to allocate an instance. + /// a pointer into which the pointer to the newly-allocated context will be written. It must be freed with av_bsf_free() after the filtering is done. + /// 0 on success, a negative AVERROR code on failure + public static int av_bsf_alloc(AVBitStreamFilter* @filter, AVBSFContext** @ctx) => vectors.av_bsf_alloc(@filter, @ctx); + + /// Reset the internal bitstream filter state. Should be called e.g. when seeking. + public static void av_bsf_flush(AVBSFContext* @ctx) => vectors.av_bsf_flush(@ctx); + + /// Free a bitstream filter context and everything associated with it; write NULL into the supplied pointer. + public static void av_bsf_free(AVBSFContext** @ctx) => vectors.av_bsf_free(@ctx); + + /// Returns a bitstream filter with the specified name or NULL if no such bitstream filter exists. + /// a bitstream filter with the specified name or NULL if no such bitstream filter exists. + public static AVBitStreamFilter* av_bsf_get_by_name(string @name) => vectors.av_bsf_get_by_name(@name); + + /// Get the AVClass for AVBSFContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + public static AVClass* av_bsf_get_class() => vectors.av_bsf_get_class(); + + /// Get null/pass-through bitstream filter. + /// Pointer to be set to new instance of pass-through bitstream filter + public static int av_bsf_get_null_filter(AVBSFContext** @bsf) => vectors.av_bsf_get_null_filter(@bsf); + + /// Prepare the filter for use, after all the parameters and options have been set. + public static int av_bsf_init(AVBSFContext* @ctx) => vectors.av_bsf_init(@ctx); + + /// Iterate over all registered bitstream filters. + /// a pointer where libavcodec will store the iteration state. Must point to NULL to start the iteration. + /// the next registered bitstream filter or NULL when the iteration is finished + public static AVBitStreamFilter* av_bsf_iterate(void** @opaque) => vectors.av_bsf_iterate(@opaque); + + /// Allocate empty list of bitstream filters. The list must be later freed by av_bsf_list_free() or finalized by av_bsf_list_finalize(). + /// Pointer to on success, NULL in case of failure + public static AVBSFList* av_bsf_list_alloc() => vectors.av_bsf_list_alloc(); + + /// Append bitstream filter to the list of bitstream filters. + /// List to append to + /// Filter context to be appended + /// >=0 on success, negative AVERROR in case of failure + public static int av_bsf_list_append(AVBSFList* @lst, AVBSFContext* @bsf) => vectors.av_bsf_list_append(@lst, @bsf); + + /// Construct new bitstream filter context given it's name and options and append it to the list of bitstream filters. + /// List to append to + /// Name of the bitstream filter + /// Options for the bitstream filter, can be set to NULL + /// >=0 on success, negative AVERROR in case of failure + public static int av_bsf_list_append2(AVBSFList* @lst, string @bsf_name, AVDictionary** @options) => vectors.av_bsf_list_append2(@lst, @bsf_name, @options); + + /// Finalize list of bitstream filters. + /// Filter list structure to be transformed + /// Pointer to be set to newly created structure representing the chain of bitstream filters + /// >=0 on success, negative AVERROR in case of failure + public static int av_bsf_list_finalize(AVBSFList** @lst, AVBSFContext** @bsf) => vectors.av_bsf_list_finalize(@lst, @bsf); + + /// Free list of bitstream filters. + /// Pointer to pointer returned by av_bsf_list_alloc() + public static void av_bsf_list_free(AVBSFList** @lst) => vectors.av_bsf_list_free(@lst); + + /// Parse string describing list of bitstream filters and create single AVBSFContext describing the whole chain of bitstream filters. Resulting AVBSFContext can be treated as any other AVBSFContext freshly allocated by av_bsf_alloc(). + /// String describing chain of bitstream filters in format `bsf1[=opt1=val1:opt2=val2][,bsf2]` + /// Pointer to be set to newly created structure representing the chain of bitstream filters + /// >=0 on success, negative AVERROR in case of failure + public static int av_bsf_list_parse_str(string @str, AVBSFContext** @bsf) => vectors.av_bsf_list_parse_str(@str, @bsf); + + /// Retrieve a filtered packet. + /// this struct will be filled with the contents of the filtered packet. It is owned by the caller and must be freed using av_packet_unref() when it is no longer needed. This parameter should be "clean" (i.e. freshly allocated with av_packet_alloc() or unreffed with av_packet_unref()) when this function is called. If this function returns successfully, the contents of pkt will be completely overwritten by the returned data. On failure, pkt is not touched. + /// - 0 on success. - AVERROR(EAGAIN) if more packets need to be sent to the filter (using av_bsf_send_packet()) to get more output. - AVERROR_EOF if there will be no further output from the filter. - Another negative AVERROR value if an error occurs. + public static int av_bsf_receive_packet(AVBSFContext* @ctx, AVPacket* @pkt) => vectors.av_bsf_receive_packet(@ctx, @pkt); + + /// Submit a packet for filtering. + /// the packet to filter. The bitstream filter will take ownership of the packet and reset the contents of pkt. pkt is not touched if an error occurs. If pkt is empty (i.e. NULL, or pkt->data is NULL and pkt->side_data_elems zero), it signals the end of the stream (i.e. no more non-empty packets will be sent; sending more empty packets does nothing) and will cause the filter to output any packets it may have buffered internally. + /// - 0 on success. - AVERROR(EAGAIN) if packets need to be retrieved from the filter (using av_bsf_receive_packet()) before new input can be consumed. - Another negative AVERROR value if an error occurs. + public static int av_bsf_send_packet(AVBSFContext* @ctx, AVPacket* @pkt) => vectors.av_bsf_send_packet(@ctx, @pkt); + + /// Allocate an AVBuffer of the given size using av_malloc(). + /// an AVBufferRef of given size or NULL when out of memory + public static AVBufferRef* av_buffer_alloc(ulong @size) => vectors.av_buffer_alloc(@size); + + /// Same as av_buffer_alloc(), except the returned buffer will be initialized to zero. + public static AVBufferRef* av_buffer_allocz(ulong @size) => vectors.av_buffer_allocz(@size); + + /// Create an AVBuffer from an existing array. + /// data array + /// size of data in bytes + /// a callback for freeing this buffer's data + /// parameter to be got for processing or passed to free + /// a combination of AV_BUFFER_FLAG_* + /// an AVBufferRef referring to data on success, NULL on failure. + public static AVBufferRef* av_buffer_create(byte* @data, ulong @size, av_buffer_create_free_func @free, void* @opaque, int @flags) => vectors.av_buffer_create(@data, @size, @free, @opaque, @flags); + + /// Default free callback, which calls av_free() on the buffer data. This function is meant to be passed to av_buffer_create(), not called directly. + public static void av_buffer_default_free(void* @opaque, byte* @data) => vectors.av_buffer_default_free(@opaque, @data); + + /// Returns the opaque parameter set by av_buffer_create. + /// the opaque parameter set by av_buffer_create. + public static void* av_buffer_get_opaque(AVBufferRef* @buf) => vectors.av_buffer_get_opaque(@buf); + + public static int av_buffer_get_ref_count(AVBufferRef* @buf) => vectors.av_buffer_get_ref_count(@buf); + + /// Returns 1 if the caller may write to the data referred to by buf (which is true if and only if buf is the only reference to the underlying AVBuffer). Return 0 otherwise. A positive answer is valid until av_buffer_ref() is called on buf. + /// 1 if the caller may write to the data referred to by buf (which is true if and only if buf is the only reference to the underlying AVBuffer). Return 0 otherwise. A positive answer is valid until av_buffer_ref() is called on buf. + public static int av_buffer_is_writable(AVBufferRef* @buf) => vectors.av_buffer_is_writable(@buf); + + /// Create a writable reference from a given buffer reference, avoiding data copy if possible. + /// buffer reference to make writable. On success, buf is either left untouched, or it is unreferenced and a new writable AVBufferRef is written in its place. On failure, buf is left untouched. + /// 0 on success, a negative AVERROR on failure. + public static int av_buffer_make_writable(AVBufferRef** @buf) => vectors.av_buffer_make_writable(@buf); + + /// Query the original opaque parameter of an allocated buffer in the pool. + /// a buffer reference to a buffer returned by av_buffer_pool_get. + /// the opaque parameter set by the buffer allocator function of the buffer pool. + public static void* av_buffer_pool_buffer_get_opaque(AVBufferRef* @ref) => vectors.av_buffer_pool_buffer_get_opaque(@ref); + + /// Allocate a new AVBuffer, reusing an old buffer from the pool when available. This function may be called simultaneously from multiple threads. + /// a reference to the new buffer on success, NULL on error. + public static AVBufferRef* av_buffer_pool_get(AVBufferPool* @pool) => vectors.av_buffer_pool_get(@pool); + + /// Allocate and initialize a buffer pool. + /// size of each buffer in this pool + /// a function that will be used to allocate new buffers when the pool is empty. May be NULL, then the default allocator will be used (av_buffer_alloc()). + /// newly created buffer pool on success, NULL on error. + public static AVBufferPool* av_buffer_pool_init(ulong @size, av_buffer_pool_init_alloc_func @alloc) => vectors.av_buffer_pool_init(@size, @alloc); + + /// Allocate and initialize a buffer pool with a more complex allocator. + /// size of each buffer in this pool + /// arbitrary user data used by the allocator + /// a function that will be used to allocate new buffers when the pool is empty. May be NULL, then the default allocator will be used (av_buffer_alloc()). + /// a function that will be called immediately before the pool is freed. I.e. after av_buffer_pool_uninit() is called by the caller and all the frames are returned to the pool and freed. It is intended to uninitialize the user opaque data. May be NULL. + /// newly created buffer pool on success, NULL on error. + public static AVBufferPool* av_buffer_pool_init2(ulong @size, void* @opaque, av_buffer_pool_init2_alloc_func @alloc, av_buffer_pool_init2_pool_free_func @pool_free) => vectors.av_buffer_pool_init2(@size, @opaque, @alloc, @pool_free); + + /// Mark the pool as being available for freeing. It will actually be freed only once all the allocated buffers associated with the pool are released. Thus it is safe to call this function while some of the allocated buffers are still in use. + /// pointer to the pool to be freed. It will be set to NULL. + public static void av_buffer_pool_uninit(AVBufferPool** @pool) => vectors.av_buffer_pool_uninit(@pool); + + /// Reallocate a given buffer. + /// a buffer reference to reallocate. On success, buf will be unreferenced and a new reference with the required size will be written in its place. On failure buf will be left untouched. *buf may be NULL, then a new buffer is allocated. + /// required new buffer size. + /// 0 on success, a negative AVERROR on failure. + public static int av_buffer_realloc(AVBufferRef** @buf, ulong @size) => vectors.av_buffer_realloc(@buf, @size); + + /// Create a new reference to an AVBuffer. + /// a new AVBufferRef referring to the same AVBuffer as buf or NULL on failure. + public static AVBufferRef* av_buffer_ref(AVBufferRef* @buf) => vectors.av_buffer_ref(@buf); + + /// Ensure dst refers to the same data as src. + /// Pointer to either a valid buffer reference or NULL. On success, this will point to a buffer reference equivalent to src. On failure, dst will be left untouched. + /// A buffer reference to replace dst with. May be NULL, then this function is equivalent to av_buffer_unref(dst). + /// 0 on success AVERROR(ENOMEM) on memory allocation failure. + public static int av_buffer_replace(AVBufferRef** @dst, AVBufferRef* @src) => vectors.av_buffer_replace(@dst, @src); + + /// Free a given reference and automatically free the buffer if there are no more references to it. + /// the reference to be freed. The pointer is set to NULL on return. + public static void av_buffer_unref(AVBufferRef** @buf) => vectors.av_buffer_unref(@buf); + + public static int av_buffersink_get_ch_layout(AVFilterContext* @ctx, AVChannelLayout* @ch_layout) => vectors.av_buffersink_get_ch_layout(@ctx, @ch_layout); + + [Obsolete()] + public static ulong av_buffersink_get_channel_layout(AVFilterContext* @ctx) => vectors.av_buffersink_get_channel_layout(@ctx); + + public static int av_buffersink_get_channels(AVFilterContext* @ctx) => vectors.av_buffersink_get_channels(@ctx); + + public static int av_buffersink_get_format(AVFilterContext* @ctx) => vectors.av_buffersink_get_format(@ctx); + + /// Get a frame with filtered data from sink and put it in frame. + /// pointer to a context of a buffersink or abuffersink AVFilter. + /// pointer to an allocated frame that will be filled with data. The data must be freed using av_frame_unref() / av_frame_free() + /// - >= 0 if a frame was successfully returned. - AVERROR(EAGAIN) if no frames are available at this point; more input frames must be added to the filtergraph to get more output. - AVERROR_EOF if there will be no more output frames on this sink. - A different negative AVERROR code in other failure cases. + public static int av_buffersink_get_frame(AVFilterContext* @ctx, AVFrame* @frame) => vectors.av_buffersink_get_frame(@ctx, @frame); + + /// Get a frame with filtered data from sink and put it in frame. + /// pointer to a buffersink or abuffersink filter context. + /// pointer to an allocated frame that will be filled with data. The data must be freed using av_frame_unref() / av_frame_free() + /// a combination of AV_BUFFERSINK_FLAG_* flags + /// >= 0 in for success, a negative AVERROR code for failure. + public static int av_buffersink_get_frame_flags(AVFilterContext* @ctx, AVFrame* @frame, int @flags) => vectors.av_buffersink_get_frame_flags(@ctx, @frame, @flags); + + public static AVRational av_buffersink_get_frame_rate(AVFilterContext* @ctx) => vectors.av_buffersink_get_frame_rate(@ctx); + + public static int av_buffersink_get_h(AVFilterContext* @ctx) => vectors.av_buffersink_get_h(@ctx); + + public static AVBufferRef* av_buffersink_get_hw_frames_ctx(AVFilterContext* @ctx) => vectors.av_buffersink_get_hw_frames_ctx(@ctx); + + public static AVRational av_buffersink_get_sample_aspect_ratio(AVFilterContext* @ctx) => vectors.av_buffersink_get_sample_aspect_ratio(@ctx); + + public static int av_buffersink_get_sample_rate(AVFilterContext* @ctx) => vectors.av_buffersink_get_sample_rate(@ctx); + + /// Same as av_buffersink_get_frame(), but with the ability to specify the number of samples read. This function is less efficient than av_buffersink_get_frame(), because it copies the data around. + /// pointer to a context of the abuffersink AVFilter. + /// pointer to an allocated frame that will be filled with data. The data must be freed using av_frame_unref() / av_frame_free() frame will contain exactly nb_samples audio samples, except at the end of stream, when it can contain less than nb_samples. + /// The return codes have the same meaning as for av_buffersink_get_frame(). + public static int av_buffersink_get_samples(AVFilterContext* @ctx, AVFrame* @frame, int @nb_samples) => vectors.av_buffersink_get_samples(@ctx, @frame, @nb_samples); + + public static AVRational av_buffersink_get_time_base(AVFilterContext* @ctx) => vectors.av_buffersink_get_time_base(@ctx); + + /// Get the properties of the stream @{ + public static AVMediaType av_buffersink_get_type(AVFilterContext* @ctx) => vectors.av_buffersink_get_type(@ctx); + + public static int av_buffersink_get_w(AVFilterContext* @ctx) => vectors.av_buffersink_get_w(@ctx); + + /// Create an AVBufferSinkParams structure. + [Obsolete()] + public static AVBufferSinkParams* av_buffersink_params_alloc() => vectors.av_buffersink_params_alloc(); + + /// Set the frame size for an audio buffer sink. + public static void av_buffersink_set_frame_size(AVFilterContext* @ctx, uint @frame_size) => vectors.av_buffersink_set_frame_size(@ctx, @frame_size); + + /// Add a frame to the buffer source. + /// an instance of the buffersrc filter + /// frame to be added. If the frame is reference counted, this function will take ownership of the reference(s) and reset the frame. Otherwise the frame data will be copied. If this function returns an error, the input frame is not touched. + /// 0 on success, a negative AVERROR on error. + public static int av_buffersrc_add_frame(AVFilterContext* @ctx, AVFrame* @frame) => vectors.av_buffersrc_add_frame(@ctx, @frame); + + /// Add a frame to the buffer source. + /// pointer to a buffer source context + /// a frame, or NULL to mark EOF + /// a combination of AV_BUFFERSRC_FLAG_* + /// >= 0 in case of success, a negative AVERROR code in case of failure + public static int av_buffersrc_add_frame_flags(AVFilterContext* @buffer_src, AVFrame* @frame, int @flags) => vectors.av_buffersrc_add_frame_flags(@buffer_src, @frame, @flags); + + /// Close the buffer source after EOF. + public static int av_buffersrc_close(AVFilterContext* @ctx, long @pts, uint @flags) => vectors.av_buffersrc_close(@ctx, @pts, @flags); + + /// Get the number of failed requests. + public static uint av_buffersrc_get_nb_failed_requests(AVFilterContext* @buffer_src) => vectors.av_buffersrc_get_nb_failed_requests(@buffer_src); + + /// Allocate a new AVBufferSrcParameters instance. It should be freed by the caller with av_free(). + public static AVBufferSrcParameters* av_buffersrc_parameters_alloc() => vectors.av_buffersrc_parameters_alloc(); + + /// Initialize the buffersrc or abuffersrc filter with the provided parameters. This function may be called multiple times, the later calls override the previous ones. Some of the parameters may also be set through AVOptions, then whatever method is used last takes precedence. + /// an instance of the buffersrc or abuffersrc filter + /// the stream parameters. The frames later passed to this filter must conform to those parameters. All the allocated fields in param remain owned by the caller, libavfilter will make internal copies or references when necessary. + /// 0 on success, a negative AVERROR code on failure. + public static int av_buffersrc_parameters_set(AVFilterContext* @ctx, AVBufferSrcParameters* @param) => vectors.av_buffersrc_parameters_set(@ctx, @param); + + /// Add a frame to the buffer source. + /// an instance of the buffersrc filter + /// frame to be added. If the frame is reference counted, this function will make a new reference to it. Otherwise the frame data will be copied. + /// 0 on success, a negative AVERROR on error + public static int av_buffersrc_write_frame(AVFilterContext* @ctx, AVFrame* @frame) => vectors.av_buffersrc_write_frame(@ctx, @frame); + + /// Allocate a memory block for an array with av_mallocz(). + /// Number of elements + /// Size of the single element + /// Pointer to the allocated block, or `NULL` if the block cannot be allocated + public static void* av_calloc(ulong @nmemb, ulong @size) => vectors.av_calloc(@nmemb, @size); + + /// Get a human readable string describing a given channel. + /// pre-allocated buffer where to put the generated string + /// size in bytes of the buffer. + /// amount of bytes needed to hold the output string, or a negative AVERROR on failure. If the returned value is bigger than buf_size, then the string was truncated. + public static int av_channel_description(byte* @buf, ulong @buf_size, AVChannel @channel) => vectors.av_channel_description(@buf, @buf_size, @channel); + + /// bprint variant of av_channel_description(). + public static void av_channel_description_bprint(AVBPrint* @bp, AVChannel @channel_id) => vectors.av_channel_description_bprint(@bp, @channel_id); + + /// This is the inverse function of av_channel_name(). + /// the channel with the given name AV_CHAN_NONE when name does not identify a known channel + public static AVChannel av_channel_from_string(string @name) => vectors.av_channel_from_string(@name); + + /// Get the channel with the given index in a channel layout. + /// input channel layout + /// channel with the index idx in channel_layout on success or AV_CHAN_NONE on failure (if idx is not valid or the channel order is unspecified) + public static AVChannel av_channel_layout_channel_from_index(AVChannelLayout* @channel_layout, uint @idx) => vectors.av_channel_layout_channel_from_index(@channel_layout, @idx); + + /// Get a channel described by the given string. + /// input channel layout + /// a channel described by the given string in channel_layout on success or AV_CHAN_NONE on failure (if the string is not valid or the channel order is unspecified) + public static AVChannel av_channel_layout_channel_from_string(AVChannelLayout* @channel_layout, string @name) => vectors.av_channel_layout_channel_from_string(@channel_layout, @name); + + /// Check whether a channel layout is valid, i.e. can possibly describe audio data. + /// input channel layout + /// 1 if channel_layout is valid, 0 otherwise. + public static int av_channel_layout_check(AVChannelLayout* @channel_layout) => vectors.av_channel_layout_check(@channel_layout); + + /// Check whether two channel layouts are semantically the same, i.e. the same channels are present on the same positions in both. + /// input channel layout + /// input channel layout + /// 0 if chl and chl1 are equal, 1 if they are not equal. A negative AVERROR code if one or both are invalid. + public static int av_channel_layout_compare(AVChannelLayout* @chl, AVChannelLayout* @chl1) => vectors.av_channel_layout_compare(@chl, @chl1); + + /// Make a copy of a channel layout. This differs from just assigning src to dst in that it allocates and copies the map for AV_CHANNEL_ORDER_CUSTOM. + /// destination channel layout + /// source channel layout + /// 0 on success, a negative AVERROR on error. + public static int av_channel_layout_copy(AVChannelLayout* @dst, AVChannelLayout* @src) => vectors.av_channel_layout_copy(@dst, @src); + + /// Get the default channel layout for a given number of channels. + /// number of channels + public static void av_channel_layout_default(AVChannelLayout* @ch_layout, int @nb_channels) => vectors.av_channel_layout_default(@ch_layout, @nb_channels); + + /// Get a human-readable string describing the channel layout properties. The string will be in the same format that is accepted by av_channel_layout_from_string(), allowing to rebuild the same channel layout, except for opaque pointers. + /// channel layout to be described + /// pre-allocated buffer where to put the generated string + /// size in bytes of the buffer. + /// amount of bytes needed to hold the output string, or a negative AVERROR on failure. If the returned value is bigger than buf_size, then the string was truncated. + public static int av_channel_layout_describe(AVChannelLayout* @channel_layout, byte* @buf, ulong @buf_size) => vectors.av_channel_layout_describe(@channel_layout, @buf, @buf_size); + + /// bprint variant of av_channel_layout_describe(). + /// 0 on success, or a negative AVERROR value on failure. + public static int av_channel_layout_describe_bprint(AVChannelLayout* @channel_layout, AVBPrint* @bp) => vectors.av_channel_layout_describe_bprint(@channel_layout, @bp); + + /// Get the channel with the given index in channel_layout. + [Obsolete("use av_channel_layout_channel_from_index()")] + public static ulong av_channel_layout_extract_channel(ulong @channel_layout, int @index) => vectors.av_channel_layout_extract_channel(@channel_layout, @index); + + /// Initialize a native channel layout from a bitmask indicating which channels are present. + /// the layout structure to be initialized + /// bitmask describing the channel layout + /// 0 on success AVERROR(EINVAL) for invalid mask values + public static int av_channel_layout_from_mask(AVChannelLayout* @channel_layout, ulong @mask) => vectors.av_channel_layout_from_mask(@channel_layout, @mask); + + /// Initialize a channel layout from a given string description. The input string can be represented by: - the formal channel layout name (returned by av_channel_layout_describe()) - single or multiple channel names (returned by av_channel_name(), eg. "FL", or concatenated with "+", each optionally containing a custom name after a "", eg. "FL+FR+LFE") - a decimal or hexadecimal value of a native channel layout (eg. "4" or "0x4") - the number of channels with default layout (eg. "4c") - the number of unordered channels (eg. "4C" or "4 channels") - the ambisonic order followed by optional non-diegetic channels (eg. "ambisonic 2+stereo") + /// input channel layout + /// string describing the channel layout + /// 0 channel layout was detected, AVERROR_INVALIDATATA otherwise + public static int av_channel_layout_from_string(AVChannelLayout* @channel_layout, string @str) => vectors.av_channel_layout_from_string(@channel_layout, @str); + + /// Get the index of a given channel in a channel layout. In case multiple channels are found, only the first match will be returned. + /// input channel layout + /// index of channel in channel_layout on success or a negative number if channel is not present in channel_layout. + public static int av_channel_layout_index_from_channel(AVChannelLayout* @channel_layout, AVChannel @channel) => vectors.av_channel_layout_index_from_channel(@channel_layout, @channel); + + /// Get the index in a channel layout of a channel described by the given string. In case multiple channels are found, only the first match will be returned. + /// input channel layout + /// a channel index described by the given string, or a negative AVERROR value. + public static int av_channel_layout_index_from_string(AVChannelLayout* @channel_layout, string @name) => vectors.av_channel_layout_index_from_string(@channel_layout, @name); + + /// Iterate over all standard channel layouts. + /// a pointer where libavutil will store the iteration state. Must point to NULL to start the iteration. + /// the standard channel layout or NULL when the iteration is finished + public static AVChannelLayout* av_channel_layout_standard(void** @opaque) => vectors.av_channel_layout_standard(@opaque); + + /// Find out what channels from a given set are present in a channel layout, without regard for their positions. + /// input channel layout + /// a combination of AV_CH_* representing a set of channels + /// a bitfield representing all the channels from mask that are present in channel_layout + public static ulong av_channel_layout_subset(AVChannelLayout* @channel_layout, ulong @mask) => vectors.av_channel_layout_subset(@channel_layout, @mask); + + /// Free any allocated data in the channel layout and reset the channel count to 0. + /// the layout structure to be uninitialized + public static void av_channel_layout_uninit(AVChannelLayout* @channel_layout) => vectors.av_channel_layout_uninit(@channel_layout); + + /// Get a human readable string in an abbreviated form describing a given channel. This is the inverse function of av_channel_from_string(). + /// pre-allocated buffer where to put the generated string + /// size in bytes of the buffer. + /// amount of bytes needed to hold the output string, or a negative AVERROR on failure. If the returned value is bigger than buf_size, then the string was truncated. + public static int av_channel_name(byte* @buf, ulong @buf_size, AVChannel @channel) => vectors.av_channel_name(@buf, @buf_size, @channel); + + /// bprint variant of av_channel_name(). + public static void av_channel_name_bprint(AVBPrint* @bp, AVChannel @channel_id) => vectors.av_channel_name_bprint(@bp, @channel_id); + + /// Returns the AVChromaLocation value for name or an AVError if not found. + /// the AVChromaLocation value for name or an AVError if not found. + public static int av_chroma_location_from_name(string @name) => vectors.av_chroma_location_from_name(@name); + + /// Returns the name for provided chroma location or NULL if unknown. + /// the name for provided chroma location or NULL if unknown. + public static string av_chroma_location_name(AVChromaLocation @location) => vectors.av_chroma_location_name(@location); + + /// Get the AVCodecID for the given codec tag tag. If no codec id is found returns AV_CODEC_ID_NONE. + /// list of supported codec_id-codec_tag pairs, as stored in AVInputFormat.codec_tag and AVOutputFormat.codec_tag + /// codec tag to match to a codec ID + public static AVCodecID av_codec_get_id(AVCodecTag** @tags, uint @tag) => vectors.av_codec_get_id(@tags, @tag); + + /// Get the codec tag for the given codec id id. If no codec tag is found returns 0. + /// list of supported codec_id-codec_tag pairs, as stored in AVInputFormat.codec_tag and AVOutputFormat.codec_tag + /// codec ID to match to a codec tag + public static uint av_codec_get_tag(AVCodecTag** @tags, AVCodecID @id) => vectors.av_codec_get_tag(@tags, @id); + + /// Get the codec tag for the given codec id. + /// list of supported codec_id - codec_tag pairs, as stored in AVInputFormat.codec_tag and AVOutputFormat.codec_tag + /// codec id that should be searched for in the list + /// A pointer to the found tag + /// 0 if id was not found in tags, > 0 if it was found + public static int av_codec_get_tag2(AVCodecTag** @tags, AVCodecID @id, uint* @tag) => vectors.av_codec_get_tag2(@tags, @id, @tag); + + /// Returns a non-zero number if codec is a decoder, zero otherwise + /// a non-zero number if codec is a decoder, zero otherwise + public static int av_codec_is_decoder(AVCodec* @codec) => vectors.av_codec_is_decoder(@codec); + + /// Returns a non-zero number if codec is an encoder, zero otherwise + /// a non-zero number if codec is an encoder, zero otherwise + public static int av_codec_is_encoder(AVCodec* @codec) => vectors.av_codec_is_encoder(@codec); + + /// Iterate over all registered codecs. + /// a pointer where libavcodec will store the iteration state. Must point to NULL to start the iteration. + /// the next registered codec or NULL when the iteration is finished + public static AVCodec* av_codec_iterate(void** @opaque) => vectors.av_codec_iterate(@opaque); + + /// Returns the AVColorPrimaries value for name or an AVError if not found. + /// the AVColorPrimaries value for name or an AVError if not found. + public static int av_color_primaries_from_name(string @name) => vectors.av_color_primaries_from_name(@name); + + /// Returns the name for provided color primaries or NULL if unknown. + /// the name for provided color primaries or NULL if unknown. + public static string av_color_primaries_name(AVColorPrimaries @primaries) => vectors.av_color_primaries_name(@primaries); + + /// Returns the AVColorRange value for name or an AVError if not found. + /// the AVColorRange value for name or an AVError if not found. + public static int av_color_range_from_name(string @name) => vectors.av_color_range_from_name(@name); + + /// Returns the name for provided color range or NULL if unknown. + /// the name for provided color range or NULL if unknown. + public static string av_color_range_name(AVColorRange @range) => vectors.av_color_range_name(@range); + + /// Returns the AVColorSpace value for name or an AVError if not found. + /// the AVColorSpace value for name or an AVError if not found. + public static int av_color_space_from_name(string @name) => vectors.av_color_space_from_name(@name); + + /// Returns the name for provided color space or NULL if unknown. + /// the name for provided color space or NULL if unknown. + public static string av_color_space_name(AVColorSpace @space) => vectors.av_color_space_name(@space); + + /// Returns the AVColorTransferCharacteristic value for name or an AVError if not found. + /// the AVColorTransferCharacteristic value for name or an AVError if not found. + public static int av_color_transfer_from_name(string @name) => vectors.av_color_transfer_from_name(@name); + + /// Returns the name for provided color transfer or NULL if unknown. + /// the name for provided color transfer or NULL if unknown. + public static string av_color_transfer_name(AVColorTransferCharacteristic @transfer) => vectors.av_color_transfer_name(@transfer); + + /// Compare the remainders of two integer operands divided by a common divisor. + /// Divisor; must be a power of 2 + /// - a negative value if `a % mod < b % mod` - a positive value if `a % mod > b % mod` - zero if `a % mod == b % mod` + public static long av_compare_mod(ulong @a, ulong @b, ulong @mod) => vectors.av_compare_mod(@a, @b, @mod); + + /// Compare two timestamps each in its own time base. + /// One of the following values: - -1 if `ts_a` is before `ts_b` - 1 if `ts_a` is after `ts_b` - 0 if they represent the same position + public static int av_compare_ts(long @ts_a, AVRational @tb_a, long @ts_b, AVRational @tb_b) => vectors.av_compare_ts(@ts_a, @tb_a, @ts_b, @tb_b); + + /// Allocate an AVContentLightMetadata structure and set its fields to default values. The resulting struct can be freed using av_freep(). + /// An AVContentLightMetadata filled with default values or NULL on failure. + public static AVContentLightMetadata* av_content_light_metadata_alloc(ulong* @size) => vectors.av_content_light_metadata_alloc(@size); + + /// Allocate a complete AVContentLightMetadata and add it to the frame. + /// The frame which side data is added to. + /// The AVContentLightMetadata structure to be filled by caller. + public static AVContentLightMetadata* av_content_light_metadata_create_side_data(AVFrame* @frame) => vectors.av_content_light_metadata_create_side_data(@frame); + + /// Allocate a CPB properties structure and initialize its fields to default values. + /// if non-NULL, the size of the allocated struct will be written here. This is useful for embedding it in side data. + /// the newly allocated struct or NULL on failure + public static AVCPBProperties* av_cpb_properties_alloc(ulong* @size) => vectors.av_cpb_properties_alloc(@size); + + /// Returns the number of logical CPU cores present. + /// the number of logical CPU cores present. + public static int av_cpu_count() => vectors.av_cpu_count(); + + /// Overrides cpu count detection and forces the specified count. Count < 1 disables forcing of specific count. + public static void av_cpu_force_count(int @count) => vectors.av_cpu_force_count(@count); + + /// Get the maximum data alignment that may be required by FFmpeg. + public static ulong av_cpu_max_align() => vectors.av_cpu_max_align(); + + /// Convert a double precision floating point number to a rational. + /// `double` to convert + /// Maximum allowed numerator and denominator + /// `d` in AVRational form + public static AVRational av_d2q(double @d, int @max) => vectors.av_d2q(@d, @max); + + /// Allocate an AVD3D11VAContext. + /// Newly-allocated AVD3D11VAContext or NULL on failure. + public static AVD3D11VAContext* av_d3d11va_alloc_context() => vectors.av_d3d11va_alloc_context(); + + public static AVClassCategory av_default_get_category(void* @ptr) => vectors.av_default_get_category(@ptr); + + /// Return the context name + /// The AVClass context + /// The AVClass class_name + public static string av_default_item_name(void* @ctx) => vectors.av_default_item_name(@ctx); + + /// Iterate over all registered demuxers. + /// a pointer where libavformat will store the iteration state. Must point to NULL to start the iteration. + /// the next registered demuxer or NULL when the iteration is finished + public static AVInputFormat* av_demuxer_iterate(void** @opaque) => vectors.av_demuxer_iterate(@opaque); + + /// Copy entries from one AVDictionary struct into another. + /// pointer to a pointer to a AVDictionary struct. If *dst is NULL, this function will allocate a struct for you and put it in *dst + /// pointer to source AVDictionary struct + /// flags to use when setting entries in *dst + /// 0 on success, negative AVERROR code on failure. If dst was allocated by this function, callers should free the associated memory. + public static int av_dict_copy(AVDictionary** @dst, AVDictionary* @src, int @flags) => vectors.av_dict_copy(@dst, @src, @flags); + + /// Get number of entries in dictionary. + /// dictionary + /// number of entries in dictionary + public static int av_dict_count(AVDictionary* @m) => vectors.av_dict_count(@m); + + /// Free all the memory allocated for an AVDictionary struct and all keys and values. + public static void av_dict_free(AVDictionary** @m) => vectors.av_dict_free(@m); + + /// Get a dictionary entry with matching key. + /// matching key + /// Set to the previous matching element to find the next. If set to NULL the first matching element is returned. + /// a collection of AV_DICT_* flags controlling how the entry is retrieved + /// found entry or NULL in case no matching entry was found in the dictionary + public static AVDictionaryEntry* av_dict_get(AVDictionary* @m, string @key, AVDictionaryEntry* @prev, int @flags) => vectors.av_dict_get(@m, @key, @prev, @flags); + + /// Get dictionary entries as a string. + /// dictionary + /// Pointer to buffer that will be allocated with string containg entries. Buffer must be freed by the caller when is no longer needed. + /// character used to separate key from value + /// character used to separate two pairs from each other + /// >= 0 on success, negative on error + public static int av_dict_get_string(AVDictionary* @m, byte** @buffer, byte @key_val_sep, byte @pairs_sep) => vectors.av_dict_get_string(@m, @buffer, @key_val_sep, @pairs_sep); + + /// Parse the key/value pairs list and add the parsed entries to a dictionary. + /// a 0-terminated list of characters used to separate key from value + /// a 0-terminated list of characters used to separate two pairs from each other + /// flags to use when adding to dictionary. AV_DICT_DONT_STRDUP_KEY and AV_DICT_DONT_STRDUP_VAL are ignored since the key/value tokens will always be duplicated. + /// 0 on success, negative AVERROR code on failure + public static int av_dict_parse_string(AVDictionary** @pm, string @str, string @key_val_sep, string @pairs_sep, int @flags) => vectors.av_dict_parse_string(@pm, @str, @key_val_sep, @pairs_sep, @flags); + + /// Set the given entry in *pm, overwriting an existing entry. + /// pointer to a pointer to a dictionary struct. If *pm is NULL a dictionary struct is allocated and put in *pm. + /// entry key to add to *pm (will either be av_strduped or added as a new key depending on flags) + /// entry value to add to *pm (will be av_strduped or added as a new key depending on flags). Passing a NULL value will cause an existing entry to be deleted. + /// >= 0 on success otherwise an error code < 0 + public static int av_dict_set(AVDictionary** @pm, string @key, string @value, int @flags) => vectors.av_dict_set(@pm, @key, @value, @flags); + + /// Convenience wrapper for av_dict_set that converts the value to a string and stores it. + public static int av_dict_set_int(AVDictionary** @pm, string @key, long @value, int @flags) => vectors.av_dict_set_int(@pm, @key, @value, @flags); + + /// Returns The AV_DISPOSITION_* flag corresponding to disp or a negative error code if disp does not correspond to a known stream disposition. + /// The AV_DISPOSITION_* flag corresponding to disp or a negative error code if disp does not correspond to a known stream disposition. + public static int av_disposition_from_string(string @disp) => vectors.av_disposition_from_string(@disp); + + /// Returns The string description corresponding to the lowest set bit in disposition. NULL when the lowest set bit does not correspond to a known disposition or when disposition is 0. + /// a combination of AV_DISPOSITION_* values + /// The string description corresponding to the lowest set bit in disposition. NULL when the lowest set bit does not correspond to a known disposition or when disposition is 0. + public static string av_disposition_to_string(int @disposition) => vectors.av_disposition_to_string(@disposition); + + /// Divide one rational by another. + /// First rational + /// Second rational + /// b/c + public static AVRational av_div_q(AVRational @b, AVRational @c) => vectors.av_div_q(@b, @c); + + /// Print detailed information about the input or output format, such as duration, bitrate, streams, container, programs, metadata, side data, codec and time base. + /// the context to analyze + /// index of the stream to dump information about + /// the URL to print, such as source or destination file + /// Select whether the specified context is an input(0) or output(1) + public static void av_dump_format(AVFormatContext* @ic, int @index, string @url, int @is_output) => vectors.av_dump_format(@ic, @index, @url, @is_output); + + /// Allocate an AVDynamicHDRPlus structure and set its fields to default values. The resulting struct can be freed using av_freep(). + /// An AVDynamicHDRPlus filled with default values or NULL on failure. + public static AVDynamicHDRPlus* av_dynamic_hdr_plus_alloc(ulong* @size) => vectors.av_dynamic_hdr_plus_alloc(@size); + + /// Allocate a complete AVDynamicHDRPlus and add it to the frame. + /// The frame which side data is added to. + /// The AVDynamicHDRPlus structure to be filled by caller or NULL on failure. + public static AVDynamicHDRPlus* av_dynamic_hdr_plus_create_side_data(AVFrame* @frame) => vectors.av_dynamic_hdr_plus_create_side_data(@frame); + + /// Add the pointer to an element to a dynamic array. + /// Pointer to the array to grow + /// Pointer to the number of elements in the array + /// Element to add + public static void av_dynarray_add(void* @tab_ptr, int* @nb_ptr, void* @elem) => vectors.av_dynarray_add(@tab_ptr, @nb_ptr, @elem); + + /// Add an element to a dynamic array. + /// >=0 on success, negative otherwise + public static int av_dynarray_add_nofree(void* @tab_ptr, int* @nb_ptr, void* @elem) => vectors.av_dynarray_add_nofree(@tab_ptr, @nb_ptr, @elem); + + /// Add an element of size `elem_size` to a dynamic array. + /// Pointer to the array to grow + /// Pointer to the number of elements in the array + /// Size in bytes of an element in the array + /// Pointer to the data of the element to add. If `NULL`, the space of the newly added element is allocated but left uninitialized. + /// Pointer to the data of the element to copy in the newly allocated space + public static void* av_dynarray2_add(void** @tab_ptr, int* @nb_ptr, ulong @elem_size, byte* @elem_data) => vectors.av_dynarray2_add(@tab_ptr, @nb_ptr, @elem_size, @elem_data); + + /// Allocate a buffer, reusing the given one if large enough. + /// Pointer to pointer to an already allocated buffer. `*ptr` will be overwritten with pointer to new buffer on success or `NULL` on failure + /// Pointer to the size of buffer `*ptr`. `*size` is updated to the new allocated size, in particular 0 in case of failure. + /// Desired minimal size of buffer `*ptr` + public static void av_fast_malloc(void* @ptr, uint* @size, ulong @min_size) => vectors.av_fast_malloc(@ptr, @size, @min_size); + + /// Allocate and clear a buffer, reusing the given one if large enough. + /// Pointer to pointer to an already allocated buffer. `*ptr` will be overwritten with pointer to new buffer on success or `NULL` on failure + /// Pointer to the size of buffer `*ptr`. `*size` is updated to the new allocated size, in particular 0 in case of failure. + /// Desired minimal size of buffer `*ptr` + public static void av_fast_mallocz(void* @ptr, uint* @size, ulong @min_size) => vectors.av_fast_mallocz(@ptr, @size, @min_size); + + /// Same behaviour av_fast_malloc but the buffer has additional AV_INPUT_BUFFER_PADDING_SIZE at the end which will always be 0. + public static void av_fast_padded_malloc(void* @ptr, uint* @size, ulong @min_size) => vectors.av_fast_padded_malloc(@ptr, @size, @min_size); + + /// Same behaviour av_fast_padded_malloc except that buffer will always be 0-initialized after call. + public static void av_fast_padded_mallocz(void* @ptr, uint* @size, ulong @min_size) => vectors.av_fast_padded_mallocz(@ptr, @size, @min_size); + + /// Reallocate the given buffer if it is not large enough, otherwise do nothing. + /// Already allocated buffer, or `NULL` + /// Pointer to the size of buffer `ptr`. `*size` is updated to the new allocated size, in particular 0 in case of failure. + /// Desired minimal size of buffer `ptr` + /// `ptr` if the buffer is large enough, a pointer to newly reallocated buffer if the buffer was not large enough, or `NULL` in case of error + public static void* av_fast_realloc(void* @ptr, uint* @size, ulong @min_size) => vectors.av_fast_realloc(@ptr, @size, @min_size); + + /// Read the file with name filename, and put its content in a newly allocated buffer or map it with mmap() when available. In case of success set *bufptr to the read or mmapped buffer, and *size to the size in bytes of the buffer in *bufptr. Unlike mmap this function succeeds with zero sized files, in this case *bufptr will be set to NULL and *size will be set to 0. The returned buffer must be released with av_file_unmap(). + /// loglevel offset used for logging + /// context used for logging + /// a non negative number in case of success, a negative value corresponding to an AVERROR error code in case of failure + public static int av_file_map(string @filename, byte** @bufptr, ulong* @size, int @log_offset, void* @log_ctx) => vectors.av_file_map(@filename, @bufptr, @size, @log_offset, @log_ctx); + + /// Unmap or free the buffer bufptr created by av_file_map(). + /// size in bytes of bufptr, must be the same as returned by av_file_map() + public static void av_file_unmap(byte* @bufptr, ulong @size) => vectors.av_file_unmap(@bufptr, @size); + + /// Check whether filename actually is a numbered sequence generator. + /// possible numbered sequence string + /// 1 if a valid numbered sequence string, 0 otherwise + public static int av_filename_number_test(string @filename) => vectors.av_filename_number_test(@filename); + + /// Iterate over all registered filters. + /// a pointer where libavfilter will store the iteration state. Must point to NULL to start the iteration. + /// the next registered filter or NULL when the iteration is finished + public static AVFilter* av_filter_iterate(void** @opaque) => vectors.av_filter_iterate(@opaque); + + /// Compute what kind of losses will occur when converting from one specific pixel format to another. When converting from one pixel format to another, information loss may occur. For example, when converting from RGB24 to GRAY, the color information will be lost. Similarly, other losses occur when converting from some formats to other formats. These losses can involve loss of chroma, but also loss of resolution, loss of color depth, loss due to the color space conversion, loss of the alpha bits or loss due to color quantization. av_get_fix_fmt_loss() informs you about the various types of losses which will occur when converting from one pixel format to another. + /// source pixel format + /// Whether the source pixel format alpha channel is used. + /// Combination of flags informing you what kind of losses will occur (maximum loss for an invalid dst_pix_fmt). + public static AVPixelFormat av_find_best_pix_fmt_of_2(AVPixelFormat @dst_pix_fmt1, AVPixelFormat @dst_pix_fmt2, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr) => vectors.av_find_best_pix_fmt_of_2(@dst_pix_fmt1, @dst_pix_fmt2, @src_pix_fmt, @has_alpha, @loss_ptr); + + /// Find the "best" stream in the file. The best stream is determined according to various heuristics as the most likely to be what the user expects. If the decoder parameter is non-NULL, av_find_best_stream will find the default decoder for the stream's codec; streams for which no decoder can be found are ignored. + /// media file handle + /// stream type: video, audio, subtitles, etc. + /// user-requested stream number, or -1 for automatic selection + /// try to find a stream related (eg. in the same program) to this one, or -1 if none + /// if non-NULL, returns the decoder for the selected stream + /// flags; none are currently defined + /// the non-negative stream number in case of success, AVERROR_STREAM_NOT_FOUND if no stream with the requested type could be found, AVERROR_DECODER_NOT_FOUND if streams were found but no decoder + public static int av_find_best_stream(AVFormatContext* @ic, AVMediaType @type, int @wanted_stream_nb, int @related_stream, AVCodec** @decoder_ret, int @flags) => vectors.av_find_best_stream(@ic, @type, @wanted_stream_nb, @related_stream, @decoder_ret, @flags); + + public static int av_find_default_stream_index(AVFormatContext* @s) => vectors.av_find_default_stream_index(@s); + + /// Find AVInputFormat based on the short name of the input format. + public static AVInputFormat* av_find_input_format(string @short_name) => vectors.av_find_input_format(@short_name); + + /// Find the value in a list of rationals nearest a given reference rational. + /// Reference rational + /// Array of rationals terminated by `{0, 0}` + /// Index of the nearest value found in the array + public static int av_find_nearest_q_idx(AVRational @q, AVRational* @q_list) => vectors.av_find_nearest_q_idx(@q, @q_list); + + /// Find the programs which belong to a given stream. + /// media file handle + /// the last found program, the search will start after this program, or from the beginning if it is NULL + /// stream index + /// the next program which belongs to s, NULL if no program is found or the last program is not among the programs of ic. + public static AVProgram* av_find_program_from_stream(AVFormatContext* @ic, AVProgram* @last, int @s) => vectors.av_find_program_from_stream(@ic, @last, @s); + + /// Returns the method used to set ctx->duration. + /// AVFMT_DURATION_FROM_PTS, AVFMT_DURATION_FROM_STREAM, or AVFMT_DURATION_FROM_BITRATE. + public static AVDurationEstimationMethod av_fmt_ctx_get_duration_estimation_method(AVFormatContext* @ctx) => vectors.av_fmt_ctx_get_duration_estimation_method(@ctx); + + /// Open a file using a UTF-8 filename. The API of this function matches POSIX fopen(), errors are returned through errno. + [Obsolete("Avoid using it, as on Windows, the FILE* allocated by this function may be allocated with a different CRT than the caller who uses the FILE*. No replacement provided in public API.")] + public static _iobuf* av_fopen_utf8(string @path, string @mode) => vectors.av_fopen_utf8(@path, @mode); + + /// Disables cpu detection and forces the specified flags. -1 is a special case that disables forcing of specific flags. + public static void av_force_cpu_flags(int @flags) => vectors.av_force_cpu_flags(@flags); + + /// This function will cause global side data to be injected in the next packet of each stream as well as after any subsequent seek. + public static void av_format_inject_global_side_data(AVFormatContext* @s) => vectors.av_format_inject_global_side_data(@s); + + /// Fill the provided buffer with a string containing a FourCC (four-character code) representation. + /// a buffer with size in bytes of at least AV_FOURCC_MAX_STRING_SIZE + /// the fourcc to represent + /// the buffer in input + public static byte* av_fourcc_make_string(byte* @buf, uint @fourcc) => vectors.av_fourcc_make_string(@buf, @fourcc); + + /// Allocate an AVFrame and set its fields to default values. The resulting struct must be freed using av_frame_free(). + /// An AVFrame filled with default values or NULL on failure. + public static AVFrame* av_frame_alloc() => vectors.av_frame_alloc(); + + /// Crop the given video AVFrame according to its crop_left/crop_top/crop_right/ crop_bottom fields. If cropping is successful, the function will adjust the data pointers and the width/height fields, and set the crop fields to 0. + /// the frame which should be cropped + /// Some combination of AV_FRAME_CROP_* flags, or 0. + /// >= 0 on success, a negative AVERROR on error. If the cropping fields were invalid, AVERROR(ERANGE) is returned, and nothing is changed. + public static int av_frame_apply_cropping(AVFrame* @frame, int @flags) => vectors.av_frame_apply_cropping(@frame, @flags); + + /// Create a new frame that references the same data as src. + /// newly created AVFrame on success, NULL on error. + public static AVFrame* av_frame_clone(AVFrame* @src) => vectors.av_frame_clone(@src); + + /// Copy the frame data from src to dst. + /// >= 0 on success, a negative AVERROR on error. + public static int av_frame_copy(AVFrame* @dst, AVFrame* @src) => vectors.av_frame_copy(@dst, @src); + + /// Copy only "metadata" fields from src to dst. + public static int av_frame_copy_props(AVFrame* @dst, AVFrame* @src) => vectors.av_frame_copy_props(@dst, @src); + + /// Free the frame and any dynamically allocated objects in it, e.g. extended_data. If the frame is reference counted, it will be unreferenced first. + /// frame to be freed. The pointer will be set to NULL. + public static void av_frame_free(AVFrame** @frame) => vectors.av_frame_free(@frame); + + /// Allocate new buffer(s) for audio or video data. + /// frame in which to store the new buffers. + /// Required buffer size alignment. If equal to 0, alignment will be chosen automatically for the current CPU. It is highly recommended to pass 0 here unless you know what you are doing. + /// 0 on success, a negative AVERROR on error. + public static int av_frame_get_buffer(AVFrame* @frame, int @align) => vectors.av_frame_get_buffer(@frame, @align); + + /// Get the buffer reference a given data plane is stored in. + /// index of the data plane of interest in frame->extended_data. + /// the buffer reference that contains the plane or NULL if the input frame is not valid. + public static AVBufferRef* av_frame_get_plane_buffer(AVFrame* @frame, int @plane) => vectors.av_frame_get_plane_buffer(@frame, @plane); + + /// Returns a pointer to the side data of a given type on success, NULL if there is no side data with such type in this frame. + /// a pointer to the side data of a given type on success, NULL if there is no side data with such type in this frame. + public static AVFrameSideData* av_frame_get_side_data(AVFrame* @frame, AVFrameSideDataType @type) => vectors.av_frame_get_side_data(@frame, @type); + + /// Check if the frame data is writable. + /// A positive value if the frame data is writable (which is true if and only if each of the underlying buffers has only one reference, namely the one stored in this frame). Return 0 otherwise. + public static int av_frame_is_writable(AVFrame* @frame) => vectors.av_frame_is_writable(@frame); + + /// Ensure that the frame data is writable, avoiding data copy if possible. + /// 0 on success, a negative AVERROR on error. + public static int av_frame_make_writable(AVFrame* @frame) => vectors.av_frame_make_writable(@frame); + + /// Move everything contained in src to dst and reset src. + public static void av_frame_move_ref(AVFrame* @dst, AVFrame* @src) => vectors.av_frame_move_ref(@dst, @src); + + /// Add a new side data to a frame. + /// a frame to which the side data should be added + /// type of the added side data + /// size of the side data + /// newly added side data on success, NULL on error + public static AVFrameSideData* av_frame_new_side_data(AVFrame* @frame, AVFrameSideDataType @type, ulong @size) => vectors.av_frame_new_side_data(@frame, @type, @size); + + /// Add a new side data to a frame from an existing AVBufferRef + /// a frame to which the side data should be added + /// the type of the added side data + /// an AVBufferRef to add as side data. The ownership of the reference is transferred to the frame. + /// newly added side data on success, NULL on error. On failure the frame is unchanged and the AVBufferRef remains owned by the caller. + public static AVFrameSideData* av_frame_new_side_data_from_buf(AVFrame* @frame, AVFrameSideDataType @type, AVBufferRef* @buf) => vectors.av_frame_new_side_data_from_buf(@frame, @type, @buf); + + /// Set up a new reference to the data described by the source frame. + /// 0 on success, a negative AVERROR on error + public static int av_frame_ref(AVFrame* @dst, AVFrame* @src) => vectors.av_frame_ref(@dst, @src); + + /// Remove and free all side data instances of the given type. + public static void av_frame_remove_side_data(AVFrame* @frame, AVFrameSideDataType @type) => vectors.av_frame_remove_side_data(@frame, @type); + + /// Returns a string identifying the side data type + /// a string identifying the side data type + public static string av_frame_side_data_name(AVFrameSideDataType @type) => vectors.av_frame_side_data_name(@type); + + /// Unreference all the buffers referenced by frame and reset the frame fields. + public static void av_frame_unref(AVFrame* @frame) => vectors.av_frame_unref(@frame); + + /// Free a memory block which has been allocated with a function of av_malloc() or av_realloc() family. + /// Pointer to the memory block which should be freed. + public static void av_free(void* @ptr) => vectors.av_free(@ptr); + + /// Free a memory block which has been allocated with a function of av_malloc() or av_realloc() family, and set the pointer pointing to it to `NULL`. + /// Pointer to the pointer to the memory block which should be freed + public static void av_freep(void* @ptr) => vectors.av_freep(@ptr); + + /// Compute the greatest common divisor of two integer operands. + /// GCD of a and b up to sign; if a >= 0 and b >= 0, return value is >= 0; if a == 0 and b == 0, returns 0. + public static long av_gcd(long @a, long @b) => vectors.av_gcd(@a, @b); + + /// Return the best rational so that a and b are multiple of it. If the resulting denominator is larger than max_den, return def. + public static AVRational av_gcd_q(AVRational @a, AVRational @b, int @max_den, AVRational @def) => vectors.av_gcd_q(@a, @b, @max_den, @def); + + /// Return the planar<->packed alternative form of the given sample format, or AV_SAMPLE_FMT_NONE on error. If the passed sample_fmt is already in the requested planar/packed format, the format returned is the same as the input. + public static AVSampleFormat av_get_alt_sample_fmt(AVSampleFormat @sample_fmt, int @planar) => vectors.av_get_alt_sample_fmt(@sample_fmt, @planar); + + /// Return audio frame duration. + /// codec context + /// size of the frame, or 0 if unknown + /// frame duration, in samples, if known. 0 if not able to determine. + public static int av_get_audio_frame_duration(AVCodecContext* @avctx, int @frame_bytes) => vectors.av_get_audio_frame_duration(@avctx, @frame_bytes); + + /// This function is the same as av_get_audio_frame_duration(), except it works with AVCodecParameters instead of an AVCodecContext. + public static int av_get_audio_frame_duration2(AVCodecParameters* @par, int @frame_bytes) => vectors.av_get_audio_frame_duration2(@par, @frame_bytes); + + /// Return the number of bits per pixel used by the pixel format described by pixdesc. Note that this is not the same as the number of bits per sample. + public static int av_get_bits_per_pixel(AVPixFmtDescriptor* @pixdesc) => vectors.av_get_bits_per_pixel(@pixdesc); + + /// Return codec bits per sample. + /// the codec + /// Number of bits per sample or zero if unknown for the given codec. + public static int av_get_bits_per_sample(AVCodecID @codec_id) => vectors.av_get_bits_per_sample(@codec_id); + + /// Return number of bytes per sample. + /// the sample format + /// number of bytes per sample or zero if unknown for the given sample format + public static int av_get_bytes_per_sample(AVSampleFormat @sample_fmt) => vectors.av_get_bytes_per_sample(@sample_fmt); + + /// Get the description of a given channel. + /// a channel layout with a single channel + /// channel description on success, NULL on error + [Obsolete("use av_channel_description()")] + public static string av_get_channel_description(ulong @channel) => vectors.av_get_channel_description(@channel); + + /// Return a channel layout id that matches name, or 0 if no match is found. + [Obsolete("use av_channel_layout_from_string()")] + public static ulong av_get_channel_layout(string @name) => vectors.av_get_channel_layout(@name); + + /// Get the index of a channel in channel_layout. + /// a channel layout describing exactly one channel which must be present in channel_layout. + /// index of channel in channel_layout on success, a negative AVERROR on error. + [Obsolete("use av_channel_layout_index_from_channel()")] + public static int av_get_channel_layout_channel_index(ulong @channel_layout, ulong @channel) => vectors.av_get_channel_layout_channel_index(@channel_layout, @channel); + + /// Return the number of channels in the channel layout. + [Obsolete("use AVChannelLayout.nb_channels")] + public static int av_get_channel_layout_nb_channels(ulong @channel_layout) => vectors.av_get_channel_layout_nb_channels(@channel_layout); + + /// Return a description of a channel layout. If nb_channels is <= 0, it is guessed from the channel_layout. + /// put here the string containing the channel layout + /// size in bytes of the buffer + [Obsolete("use av_channel_layout_describe()")] + public static void av_get_channel_layout_string(byte* @buf, int @buf_size, int @nb_channels, ulong @channel_layout) => vectors.av_get_channel_layout_string(@buf, @buf_size, @nb_channels, @channel_layout); + + /// Get the name of a given channel. + /// channel name on success, NULL on error. + [Obsolete("use av_channel_name()")] + public static string av_get_channel_name(ulong @channel) => vectors.av_get_channel_name(@channel); + + /// Get the name of a colorspace. + /// a static string identifying the colorspace; can be NULL. + [Obsolete("use av_color_space_name()")] + public static string av_get_colorspace_name(AVColorSpace @val) => vectors.av_get_colorspace_name(@val); + + /// Return the flags which specify extensions supported by the CPU. The returned value is affected by av_force_cpu_flags() if that was used before. So av_get_cpu_flags() can easily be used in an application to detect the enabled cpu flags. + public static int av_get_cpu_flags() => vectors.av_get_cpu_flags(); + + /// Return default channel layout for a given number of channels. + [Obsolete("use av_channel_layout_default()")] + public static long av_get_default_channel_layout(int @nb_channels) => vectors.av_get_default_channel_layout(@nb_channels); + + /// Return codec bits per sample. Only return non-zero if the bits per sample is exactly correct, not an approximation. + /// the codec + /// Number of bits per sample or zero if unknown for the given codec. + public static int av_get_exact_bits_per_sample(AVCodecID @codec_id) => vectors.av_get_exact_bits_per_sample(@codec_id); + + /// Return a channel layout and the number of channels based on the specified name. + /// channel layout specification string + /// parsed channel layout (0 if unknown) + /// number of channels + /// 0 on success, AVERROR(EINVAL) if the parsing fails. + [Obsolete("use av_channel_layout_from_string()")] + public static int av_get_extended_channel_layout(string @name, ulong* @channel_layout, int* @nb_channels) => vectors.av_get_extended_channel_layout(@name, @channel_layout, @nb_channels); + + public static int av_get_frame_filename(byte* @buf, int @buf_size, string @path, int @number) => vectors.av_get_frame_filename(@buf, @buf_size, @path, @number); + + /// Return in 'buf' the path with '%d' replaced by a number. + /// destination buffer + /// destination buffer size + /// numbered sequence string + /// frame number + /// AV_FRAME_FILENAME_FLAGS_* + /// 0 if OK, -1 on format error + public static int av_get_frame_filename2(byte* @buf, int @buf_size, string @path, int @number, int @flags) => vectors.av_get_frame_filename2(@buf, @buf_size, @path, @number, @flags); + + /// Return a string describing the media_type enum, NULL if media_type is unknown. + public static string av_get_media_type_string(AVMediaType @media_type) => vectors.av_get_media_type_string(@media_type); + + /// Get timing information for the data currently output. The exact meaning of "currently output" depends on the format. It is mostly relevant for devices that have an internal buffer and/or work in real time. + /// media file handle + /// stream in the media file + /// DTS of the last packet output for the stream, in stream time_base units + /// absolute time when that packet whas output, in microsecond + /// 0 if OK, AVERROR(ENOSYS) if the format does not support it Note: some formats or devices may not allow to measure dts and wall atomically. + public static int av_get_output_timestamp(AVFormatContext* @s, int @stream, long* @dts, long* @wall) => vectors.av_get_output_timestamp(@s, @stream, @dts, @wall); + + /// Get the packed alternative form of the given sample format. + /// the packed alternative form of the given sample format or AV_SAMPLE_FMT_NONE on error. + public static AVSampleFormat av_get_packed_sample_fmt(AVSampleFormat @sample_fmt) => vectors.av_get_packed_sample_fmt(@sample_fmt); + + /// Allocate and read the payload of a packet and initialize its fields with default values. + /// associated IO context + /// packet + /// desired payload size + /// >0 (read size) if OK, AVERROR_xxx otherwise + public static int av_get_packet(AVIOContext* @s, AVPacket* @pkt, int @size) => vectors.av_get_packet(@s, @pkt, @size); + + /// Return the number of bits per pixel for the pixel format described by pixdesc, including any padding or unused bits. + public static int av_get_padded_bits_per_pixel(AVPixFmtDescriptor* @pixdesc) => vectors.av_get_padded_bits_per_pixel(@pixdesc); + + /// Return the PCM codec associated with a sample format. + /// endianness, 0 for little, 1 for big, -1 (or anything else) for native + /// AV_CODEC_ID_PCM_* or AV_CODEC_ID_NONE + public static AVCodecID av_get_pcm_codec(AVSampleFormat @fmt, int @be) => vectors.av_get_pcm_codec(@fmt, @be); + + /// Return a single letter to describe the given picture type pict_type. + /// the picture type + /// a single character representing the picture type, '?' if pict_type is unknown + public static byte av_get_picture_type_char(AVPictureType @pict_type) => vectors.av_get_picture_type_char(@pict_type); + + /// Return the pixel format corresponding to name. + public static AVPixelFormat av_get_pix_fmt(string @name) => vectors.av_get_pix_fmt(@name); + + /// Compute what kind of losses will occur when converting from one specific pixel format to another. When converting from one pixel format to another, information loss may occur. For example, when converting from RGB24 to GRAY, the color information will be lost. Similarly, other losses occur when converting from some formats to other formats. These losses can involve loss of chroma, but also loss of resolution, loss of color depth, loss due to the color space conversion, loss of the alpha bits or loss due to color quantization. av_get_fix_fmt_loss() informs you about the various types of losses which will occur when converting from one pixel format to another. + /// destination pixel format + /// source pixel format + /// Whether the source pixel format alpha channel is used. + /// Combination of flags informing you what kind of losses will occur (maximum loss for an invalid dst_pix_fmt). + public static int av_get_pix_fmt_loss(AVPixelFormat @dst_pix_fmt, AVPixelFormat @src_pix_fmt, int @has_alpha) => vectors.av_get_pix_fmt_loss(@dst_pix_fmt, @src_pix_fmt, @has_alpha); + + /// Return the short name for a pixel format, NULL in case pix_fmt is unknown. + public static string av_get_pix_fmt_name(AVPixelFormat @pix_fmt) => vectors.av_get_pix_fmt_name(@pix_fmt); + + /// Print in buf the string corresponding to the pixel format with number pix_fmt, or a header if pix_fmt is negative. + /// the buffer where to write the string + /// the size of buf + /// the number of the pixel format to print the corresponding info string, or a negative value to print the corresponding header. + public static byte* av_get_pix_fmt_string(byte* @buf, int @buf_size, AVPixelFormat @pix_fmt) => vectors.av_get_pix_fmt_string(@buf, @buf_size, @pix_fmt); + + /// Get the planar alternative form of the given sample format. + /// the planar alternative form of the given sample format or AV_SAMPLE_FMT_NONE on error. + public static AVSampleFormat av_get_planar_sample_fmt(AVSampleFormat @sample_fmt) => vectors.av_get_planar_sample_fmt(@sample_fmt); + + /// Return a name for the specified profile, if available. + /// the codec that is searched for the given profile + /// the profile value for which a name is requested + /// A name for the profile if found, NULL otherwise. + public static string av_get_profile_name(AVCodec* @codec, int @profile) => vectors.av_get_profile_name(@codec, @profile); + + /// Return a sample format corresponding to name, or AV_SAMPLE_FMT_NONE on error. + public static AVSampleFormat av_get_sample_fmt(string @name) => vectors.av_get_sample_fmt(@name); + + /// Return the name of sample_fmt, or NULL if sample_fmt is not recognized. + public static string av_get_sample_fmt_name(AVSampleFormat @sample_fmt) => vectors.av_get_sample_fmt_name(@sample_fmt); + + /// Generate a string corresponding to the sample format with sample_fmt, or a header if sample_fmt is negative. + /// the buffer where to write the string + /// the size of buf + /// the number of the sample format to print the corresponding info string, or a negative value to print the corresponding header. + /// the pointer to the filled buffer or NULL if sample_fmt is unknown or in case of other errors + public static byte* av_get_sample_fmt_string(byte* @buf, int @buf_size, AVSampleFormat @sample_fmt) => vectors.av_get_sample_fmt_string(@buf, @buf_size, @sample_fmt); + + /// Get the value and name of a standard channel layout. + /// index in an internal list, starting at 0 + /// channel layout mask + /// name of the layout + /// 0 if the layout exists, < 0 if index is beyond the limits + [Obsolete("use av_channel_layout_standard()")] + public static int av_get_standard_channel_layout(uint @index, ulong* @layout, byte** @name) => vectors.av_get_standard_channel_layout(@index, @layout, @name); + + /// Return the fractional representation of the internal time base. + public static AVRational av_get_time_base_q() => vectors.av_get_time_base_q(); + + /// Get the current time in microseconds. + public static long av_gettime() => vectors.av_gettime(); + + /// Get the current time in microseconds since some unspecified starting point. On platforms that support it, the time comes from a monotonic clock This property makes this time source ideal for measuring relative time. The returned values may not be monotonic on platforms where a monotonic clock is not available. + public static long av_gettime_relative() => vectors.av_gettime_relative(); + + /// Indicates with a boolean result if the av_gettime_relative() time source is monotonic. + public static int av_gettime_relative_is_monotonic() => vectors.av_gettime_relative_is_monotonic(); + + /// Increase packet size, correctly zeroing padding + /// packet + /// number of bytes by which to increase the size of the packet + public static int av_grow_packet(AVPacket* @pkt, int @grow_by) => vectors.av_grow_packet(@pkt, @grow_by); + + /// Guess the codec ID based upon muxer and filename. + public static AVCodecID av_guess_codec(AVOutputFormat* @fmt, string @short_name, string @filename, string @mime_type, AVMediaType @type) => vectors.av_guess_codec(@fmt, @short_name, @filename, @mime_type, @type); + + /// Return the output format in the list of registered output formats which best matches the provided parameters, or return NULL if there is no match. + /// if non-NULL checks if short_name matches with the names of the registered formats + /// if non-NULL checks if filename terminates with the extensions of the registered formats + /// if non-NULL checks if mime_type matches with the MIME type of the registered formats + public static AVOutputFormat* av_guess_format(string @short_name, string @filename, string @mime_type) => vectors.av_guess_format(@short_name, @filename, @mime_type); + + /// Guess the frame rate, based on both the container and codec information. + /// the format context which the stream is part of + /// the stream which the frame is part of + /// the frame for which the frame rate should be determined, may be NULL + /// the guessed (valid) frame rate, 0/1 if no idea + public static AVRational av_guess_frame_rate(AVFormatContext* @ctx, AVStream* @stream, AVFrame* @frame) => vectors.av_guess_frame_rate(@ctx, @stream, @frame); + + /// Guess the sample aspect ratio of a frame, based on both the stream and the frame aspect ratio. + /// the format context which the stream is part of + /// the stream which the frame is part of + /// the frame with the aspect ratio to be determined + /// the guessed (valid) sample_aspect_ratio, 0/1 if no idea + public static AVRational av_guess_sample_aspect_ratio(AVFormatContext* @format, AVStream* @stream, AVFrame* @frame) => vectors.av_guess_sample_aspect_ratio(@format, @stream, @frame); + + /// Send a nice hexadecimal dump of a buffer to the specified file stream. + /// The file stream pointer where the dump should be sent to. + /// buffer + /// buffer size + public static void av_hex_dump(_iobuf* @f, byte* @buf, int @size) => vectors.av_hex_dump(@f, @buf, @size); + + /// Send a nice hexadecimal dump of a buffer to the log. + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct. + /// The importance level of the message, lower values signifying higher importance. + /// buffer + /// buffer size + public static void av_hex_dump_log(void* @avcl, int @level, byte* @buf, int @size) => vectors.av_hex_dump_log(@avcl, @level, @buf, @size); + + /// Allocate an AVHWDeviceContext for a given hardware type. + /// the type of the hardware device to allocate. + /// a reference to the newly created AVHWDeviceContext on success or NULL on failure. + public static AVBufferRef* av_hwdevice_ctx_alloc(AVHWDeviceType @type) => vectors.av_hwdevice_ctx_alloc(@type); + + /// Open a device of the specified type and create an AVHWDeviceContext for it. + /// On success, a reference to the newly-created device context will be written here. The reference is owned by the caller and must be released with av_buffer_unref() when no longer needed. On failure, NULL will be written to this pointer. + /// The type of the device to create. + /// A type-specific string identifying the device to open. + /// A dictionary of additional (type-specific) options to use in opening the device. The dictionary remains owned by the caller. + /// currently unused + /// 0 on success, a negative AVERROR code on failure. + public static int av_hwdevice_ctx_create(AVBufferRef** @device_ctx, AVHWDeviceType @type, string @device, AVDictionary* @opts, int @flags) => vectors.av_hwdevice_ctx_create(@device_ctx, @type, @device, @opts, @flags); + + /// Create a new device of the specified type from an existing device. + /// On success, a reference to the newly-created AVHWDeviceContext. + /// The type of the new device to create. + /// A reference to an existing AVHWDeviceContext which will be used to create the new device. + /// Currently unused; should be set to zero. + /// Zero on success, a negative AVERROR code on failure. + public static int av_hwdevice_ctx_create_derived(AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, int @flags) => vectors.av_hwdevice_ctx_create_derived(@dst_ctx, @type, @src_ctx, @flags); + + /// Create a new device of the specified type from an existing device. + /// On success, a reference to the newly-created AVHWDeviceContext. + /// The type of the new device to create. + /// A reference to an existing AVHWDeviceContext which will be used to create the new device. + /// Options for the new device to create, same format as in av_hwdevice_ctx_create. + /// Currently unused; should be set to zero. + /// Zero on success, a negative AVERROR code on failure. + public static int av_hwdevice_ctx_create_derived_opts(AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, AVDictionary* @options, int @flags) => vectors.av_hwdevice_ctx_create_derived_opts(@dst_ctx, @type, @src_ctx, @options, @flags); + + /// Finalize the device context before use. This function must be called after the context is filled with all the required information and before it is used in any way. + /// a reference to the AVHWDeviceContext + /// 0 on success, a negative AVERROR code on failure + public static int av_hwdevice_ctx_init(AVBufferRef* @ref) => vectors.av_hwdevice_ctx_init(@ref); + + /// Look up an AVHWDeviceType by name. + /// String name of the device type (case-insensitive). + /// The type from enum AVHWDeviceType, or AV_HWDEVICE_TYPE_NONE if not found. + public static AVHWDeviceType av_hwdevice_find_type_by_name(string @name) => vectors.av_hwdevice_find_type_by_name(@name); + + /// Get the constraints on HW frames given a device and the HW-specific configuration to be used with that device. If no HW-specific configuration is provided, returns the maximum possible capabilities of the device. + /// a reference to the associated AVHWDeviceContext. + /// a filled HW-specific configuration structure, or NULL to return the maximum possible capabilities of the device. + /// AVHWFramesConstraints structure describing the constraints on the device, or NULL if not available. + public static AVHWFramesConstraints* av_hwdevice_get_hwframe_constraints(AVBufferRef* @ref, void* @hwconfig) => vectors.av_hwdevice_get_hwframe_constraints(@ref, @hwconfig); + + /// Get the string name of an AVHWDeviceType. + /// Type from enum AVHWDeviceType. + /// Pointer to a static string containing the name, or NULL if the type is not valid. + public static string av_hwdevice_get_type_name(AVHWDeviceType @type) => vectors.av_hwdevice_get_type_name(@type); + + /// Allocate a HW-specific configuration structure for a given HW device. After use, the user must free all members as required by the specific hardware structure being used, then free the structure itself with av_free(). + /// a reference to the associated AVHWDeviceContext. + /// The newly created HW-specific configuration structure on success or NULL on failure. + public static void* av_hwdevice_hwconfig_alloc(AVBufferRef* @device_ctx) => vectors.av_hwdevice_hwconfig_alloc(@device_ctx); + + /// Iterate over supported device types. + /// The next usable device type from enum AVHWDeviceType, or AV_HWDEVICE_TYPE_NONE if there are no more. + public static AVHWDeviceType av_hwdevice_iterate_types(AVHWDeviceType @prev) => vectors.av_hwdevice_iterate_types(@prev); + + /// Free an AVHWFrameConstraints structure. + /// The (filled or unfilled) AVHWFrameConstraints structure. + public static void av_hwframe_constraints_free(AVHWFramesConstraints** @constraints) => vectors.av_hwframe_constraints_free(@constraints); + + /// Allocate an AVHWFramesContext tied to a given device context. + /// a reference to a AVHWDeviceContext. This function will make a new reference for internal use, the one passed to the function remains owned by the caller. + /// a reference to the newly created AVHWFramesContext on success or NULL on failure. + public static AVBufferRef* av_hwframe_ctx_alloc(AVBufferRef* @device_ctx) => vectors.av_hwframe_ctx_alloc(@device_ctx); + + /// Create and initialise an AVHWFramesContext as a mapping of another existing AVHWFramesContext on a different device. + /// On success, a reference to the newly created AVHWFramesContext. + /// A reference to the device to create the new AVHWFramesContext on. + /// A reference to an existing AVHWFramesContext which will be mapped to the derived context. + /// Some combination of AV_HWFRAME_MAP_* flags, defining the mapping parameters to apply to frames which are allocated in the derived device. + /// Zero on success, negative AVERROR code on failure. + public static int av_hwframe_ctx_create_derived(AVBufferRef** @derived_frame_ctx, AVPixelFormat @format, AVBufferRef* @derived_device_ctx, AVBufferRef* @source_frame_ctx, int @flags) => vectors.av_hwframe_ctx_create_derived(@derived_frame_ctx, @format, @derived_device_ctx, @source_frame_ctx, @flags); + + /// Finalize the context before use. This function must be called after the context is filled with all the required information and before it is attached to any frames. + /// a reference to the AVHWFramesContext + /// 0 on success, a negative AVERROR code on failure + public static int av_hwframe_ctx_init(AVBufferRef* @ref) => vectors.av_hwframe_ctx_init(@ref); + + /// Allocate a new frame attached to the given AVHWFramesContext. + /// a reference to an AVHWFramesContext + /// an empty (freshly allocated or unreffed) frame to be filled with newly allocated buffers. + /// currently unused, should be set to zero + /// 0 on success, a negative AVERROR code on failure + public static int av_hwframe_get_buffer(AVBufferRef* @hwframe_ctx, AVFrame* @frame, int @flags) => vectors.av_hwframe_get_buffer(@hwframe_ctx, @frame, @flags); + + /// Map a hardware frame. + /// Destination frame, to contain the mapping. + /// Source frame, to be mapped. + /// Some combination of AV_HWFRAME_MAP_* flags. + /// Zero on success, negative AVERROR code on failure. + public static int av_hwframe_map(AVFrame* @dst, AVFrame* @src, int @flags) => vectors.av_hwframe_map(@dst, @src, @flags); + + /// Copy data to or from a hw surface. At least one of dst/src must have an AVHWFramesContext attached. + /// the destination frame. dst is not touched on failure. + /// the source frame. + /// currently unused, should be set to zero + /// 0 on success, a negative AVERROR error code on failure. + public static int av_hwframe_transfer_data(AVFrame* @dst, AVFrame* @src, int @flags) => vectors.av_hwframe_transfer_data(@dst, @src, @flags); + + /// Get a list of possible source or target formats usable in av_hwframe_transfer_data(). + /// the frame context to obtain the information for + /// the direction of the transfer + /// the pointer to the output format list will be written here. The list is terminated with AV_PIX_FMT_NONE and must be freed by the caller when no longer needed using av_free(). If this function returns successfully, the format list will have at least one item (not counting the terminator). On failure, the contents of this pointer are unspecified. + /// currently unused, should be set to zero + /// 0 on success, a negative AVERROR code on failure. + public static int av_hwframe_transfer_get_formats(AVBufferRef* @hwframe_ctx, AVHWFrameTransferDirection @dir, AVPixelFormat** @formats, int @flags) => vectors.av_hwframe_transfer_get_formats(@hwframe_ctx, @dir, @formats, @flags); + + /// Allocate an image with size w and h and pixel format pix_fmt, and fill pointers and linesizes accordingly. The allocated image buffer has to be freed by using av_freep(&pointers[0]). + /// the value to use for buffer size alignment + /// the size in bytes required for the image buffer, a negative error code in case of failure + public static int av_image_alloc(ref byte_ptr4 @pointers, ref int4 @linesizes, int @w, int @h, AVPixelFormat @pix_fmt, int @align) => vectors.av_image_alloc(ref @pointers, ref @linesizes, @w, @h, @pix_fmt, @align); + + /// Check if the given sample aspect ratio of an image is valid. + /// width of the image + /// height of the image + /// sample aspect ratio of the image + /// 0 if valid, a negative AVERROR code otherwise + public static int av_image_check_sar(uint @w, uint @h, AVRational @sar) => vectors.av_image_check_sar(@w, @h, @sar); + + /// Check if the given dimension of an image is valid, meaning that all bytes of the image can be addressed with a signed int. + /// the width of the picture + /// the height of the picture + /// the offset to sum to the log level for logging with log_ctx + /// the parent logging context, it may be NULL + /// >= 0 if valid, a negative error code otherwise + public static int av_image_check_size(uint @w, uint @h, int @log_offset, void* @log_ctx) => vectors.av_image_check_size(@w, @h, @log_offset, @log_ctx); + + /// Check if the given dimension of an image is valid, meaning that all bytes of a plane of an image with the specified pix_fmt can be addressed with a signed int. + /// the width of the picture + /// the height of the picture + /// the maximum number of pixels the user wants to accept + /// the pixel format, can be AV_PIX_FMT_NONE if unknown. + /// the offset to sum to the log level for logging with log_ctx + /// the parent logging context, it may be NULL + /// >= 0 if valid, a negative error code otherwise + public static int av_image_check_size2(uint @w, uint @h, long @max_pixels, AVPixelFormat @pix_fmt, int @log_offset, void* @log_ctx) => vectors.av_image_check_size2(@w, @h, @max_pixels, @pix_fmt, @log_offset, @log_ctx); + + /// Copy image in src_data to dst_data. + /// linesizes for the image in dst_data + /// linesizes for the image in src_data + public static void av_image_copy(ref byte_ptr4 @dst_data, ref int4 @dst_linesizes, in byte_ptr4 @src_data, in int4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height) => vectors.av_image_copy(ref @dst_data, ref @dst_linesizes, @src_data, @src_linesizes, @pix_fmt, @width, @height); + + /// Copy image plane from src to dst. That is, copy "height" number of lines of "bytewidth" bytes each. The first byte of each successive line is separated by *_linesize bytes. + /// linesize for the image plane in dst + /// linesize for the image plane in src + public static void av_image_copy_plane(byte* @dst, int @dst_linesize, byte* @src, int @src_linesize, int @bytewidth, int @height) => vectors.av_image_copy_plane(@dst, @dst_linesize, @src, @src_linesize, @bytewidth, @height); + + /// Copy image data located in uncacheable (e.g. GPU mapped) memory. Where available, this function will use special functionality for reading from such memory, which may result in greatly improved performance compared to plain av_image_copy_plane(). + public static void av_image_copy_plane_uc_from(byte* @dst, long @dst_linesize, byte* @src, long @src_linesize, long @bytewidth, int @height) => vectors.av_image_copy_plane_uc_from(@dst, @dst_linesize, @src, @src_linesize, @bytewidth, @height); + + /// Copy image data from an image into a buffer. + /// a buffer into which picture data will be copied + /// the size in bytes of dst + /// pointers containing the source image data + /// linesizes for the image in src_data + /// the pixel format of the source image + /// the width of the source image in pixels + /// the height of the source image in pixels + /// the assumed linesize alignment for dst + /// the number of bytes written to dst, or a negative value (error code) on error + public static int av_image_copy_to_buffer(byte* @dst, int @dst_size, in byte_ptr4 @src_data, in int4 @src_linesize, AVPixelFormat @pix_fmt, int @width, int @height, int @align) => vectors.av_image_copy_to_buffer(@dst, @dst_size, @src_data, @src_linesize, @pix_fmt, @width, @height, @align); + + /// Copy image data located in uncacheable (e.g. GPU mapped) memory. Where available, this function will use special functionality for reading from such memory, which may result in greatly improved performance compared to plain av_image_copy(). + public static void av_image_copy_uc_from(ref byte_ptr4 @dst_data, in long4 @dst_linesizes, in byte_ptr4 @src_data, in long4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height) => vectors.av_image_copy_uc_from(ref @dst_data, @dst_linesizes, @src_data, @src_linesizes, @pix_fmt, @width, @height); + + /// Setup the data pointers and linesizes based on the specified image parameters and the provided array. + /// data pointers to be filled in + /// linesizes for the image in dst_data to be filled in + /// buffer which will contain or contains the actual image data, can be NULL + /// the pixel format of the image + /// the width of the image in pixels + /// the height of the image in pixels + /// the value used in src for linesize alignment + /// the size in bytes required for src, a negative error code in case of failure + public static int av_image_fill_arrays(ref byte_ptr4 @dst_data, ref int4 @dst_linesize, byte* @src, AVPixelFormat @pix_fmt, int @width, int @height, int @align) => vectors.av_image_fill_arrays(ref @dst_data, ref @dst_linesize, @src, @pix_fmt, @width, @height, @align); + + /// Overwrite the image data with black. This is suitable for filling a sub-rectangle of an image, meaning the padding between the right most pixel and the left most pixel on the next line will not be overwritten. For some formats, the image size might be rounded up due to inherent alignment. + /// data pointers to destination image + /// linesizes for the destination image + /// the pixel format of the image + /// the color range of the image (important for colorspaces such as YUV) + /// the width of the image in pixels + /// the height of the image in pixels + /// 0 if the image data was cleared, a negative AVERROR code otherwise + public static int av_image_fill_black(ref byte_ptr4 @dst_data, in long4 @dst_linesize, AVPixelFormat @pix_fmt, AVColorRange @range, int @width, int @height) => vectors.av_image_fill_black(ref @dst_data, @dst_linesize, @pix_fmt, @range, @width, @height); + + /// Fill plane linesizes for an image with pixel format pix_fmt and width width. + /// array to be filled with the linesize for each plane + /// >= 0 in case of success, a negative error code otherwise + public static int av_image_fill_linesizes(ref int4 @linesizes, AVPixelFormat @pix_fmt, int @width) => vectors.av_image_fill_linesizes(ref @linesizes, @pix_fmt, @width); + + /// Compute the max pixel step for each plane of an image with a format described by pixdesc. + /// an array which is filled with the max pixel step for each plane. Since a plane may contain different pixel components, the computed max_pixsteps[plane] is relative to the component in the plane with the max pixel step. + /// an array which is filled with the component for each plane which has the max pixel step. May be NULL. + public static void av_image_fill_max_pixsteps(ref int4 @max_pixsteps, ref int4 @max_pixstep_comps, AVPixFmtDescriptor* @pixdesc) => vectors.av_image_fill_max_pixsteps(ref @max_pixsteps, ref @max_pixstep_comps, @pixdesc); + + /// Fill plane sizes for an image with pixel format pix_fmt and height height. + /// the array to be filled with the size of each image plane + /// the array containing the linesize for each plane, should be filled by av_image_fill_linesizes() + /// >= 0 in case of success, a negative error code otherwise + public static int av_image_fill_plane_sizes(ref ulong4 @size, AVPixelFormat @pix_fmt, int @height, in long4 @linesizes) => vectors.av_image_fill_plane_sizes(ref @size, @pix_fmt, @height, @linesizes); + + /// Fill plane data pointers for an image with pixel format pix_fmt and height height. + /// pointers array to be filled with the pointer for each image plane + /// the pointer to a buffer which will contain the image + /// the array containing the linesize for each plane, should be filled by av_image_fill_linesizes() + /// the size in bytes required for the image buffer, a negative error code in case of failure + public static int av_image_fill_pointers(ref byte_ptr4 @data, AVPixelFormat @pix_fmt, int @height, byte* @ptr, in int4 @linesizes) => vectors.av_image_fill_pointers(ref @data, @pix_fmt, @height, @ptr, @linesizes); + + /// Return the size in bytes of the amount of data required to store an image with the given parameters. + /// the pixel format of the image + /// the width of the image in pixels + /// the height of the image in pixels + /// the assumed linesize alignment + /// the buffer size in bytes, a negative error code in case of failure + public static int av_image_get_buffer_size(AVPixelFormat @pix_fmt, int @width, int @height, int @align) => vectors.av_image_get_buffer_size(@pix_fmt, @width, @height, @align); + + /// Compute the size of an image line with format pix_fmt and width width for the plane plane. + /// the computed size in bytes + public static int av_image_get_linesize(AVPixelFormat @pix_fmt, int @width, int @plane) => vectors.av_image_get_linesize(@pix_fmt, @width, @plane); + + /// Get the index for a specific timestamp. + /// stream that the timestamp belongs to + /// timestamp to retrieve the index for + /// if AVSEEK_FLAG_BACKWARD then the returned index will correspond to the timestamp which is < = the requested one, if backward is 0, then it will be >= if AVSEEK_FLAG_ANY seek to any frame, only keyframes otherwise + /// < 0 if no such timestamp could be found + public static int av_index_search_timestamp(AVStream* @st, long @timestamp, int @flags) => vectors.av_index_search_timestamp(@st, @timestamp, @flags); + + /// Initialize optional fields of a packet with default values. + /// packet + [Obsolete("This function is deprecated. Once it's removed, sizeof(AVPacket) will not be a part of the ABI anymore.")] + public static void av_init_packet(AVPacket* @pkt) => vectors.av_init_packet(@pkt); + + /// Audio input devices iterator. + public static AVInputFormat* av_input_audio_device_next(AVInputFormat* @d) => vectors.av_input_audio_device_next(@d); + + /// Video input devices iterator. + public static AVInputFormat* av_input_video_device_next(AVInputFormat* @d) => vectors.av_input_video_device_next(@d); + + /// Compute the length of an integer list. + /// size in bytes of each list element (only 1, 2, 4 or 8) + /// pointer to the list + /// list terminator (usually 0 or -1) + /// length of the list, in elements, not counting the terminator + public static uint av_int_list_length_for_size(uint @elsize, void* @list, ulong @term) => vectors.av_int_list_length_for_size(@elsize, @list, @term); + + /// Write a packet to an output media file ensuring correct interleaving. + /// media file handle + /// The packet containing the data to be written. If the packet is reference-counted, this function will take ownership of this reference and unreference it later when it sees fit. If the packet is not reference-counted, libavformat will make a copy. The returned packet will be blank (as if returned from av_packet_alloc()), even on error. This parameter can be NULL (at any time, not just at the end), to flush the interleaving queues. Packet's "stream_index" field must be set to the index of the corresponding stream in "s->streams". The timestamps ( "pts", "dts") must be set to correct values in the stream's timebase (unless the output format is flagged with the AVFMT_NOTIMESTAMPS flag, then they can be set to AV_NOPTS_VALUE). The dts for subsequent packets in one stream must be strictly increasing (unless the output format is flagged with the AVFMT_TS_NONSTRICT, then they merely have to be nondecreasing). "duration" should also be set if known. + /// 0 on success, a negative AVERROR on error. + public static int av_interleaved_write_frame(AVFormatContext* @s, AVPacket* @pkt) => vectors.av_interleaved_write_frame(@s, @pkt); + + /// Write an uncoded frame to an output media file. + /// >=0 for success, a negative code on error + public static int av_interleaved_write_uncoded_frame(AVFormatContext* @s, int @stream_index, AVFrame* @frame) => vectors.av_interleaved_write_uncoded_frame(@s, @stream_index, @frame); + + /// Send the specified message to the log if the level is less than or equal to the current av_log_level. By default, all logging messages are sent to stderr. This behavior can be altered by setting a different logging callback function. + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct or NULL if general log. + /// The importance level of the message expressed using a "Logging Constant". + /// The format string (printf-compatible) that specifies how subsequent arguments are converted to output. + public static void av_log(void* @avcl, int @level, string @fmt) => vectors.av_log(@avcl, @level, @fmt); + + /// Default logging callback + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct. + /// The importance level of the message expressed using a "Logging Constant". + /// The format string (printf-compatible) that specifies how subsequent arguments are converted to output. + /// The arguments referenced by the format string. + public static void av_log_default_callback(void* @avcl, int @level, string @fmt, byte* @vl) => vectors.av_log_default_callback(@avcl, @level, @fmt, @vl); + + /// Format a line of log the same way as the default callback. + /// buffer to receive the formatted line + /// size of the buffer + /// used to store whether the prefix must be printed; must point to a persistent integer initially set to 1 + public static void av_log_format_line(void* @ptr, int @level, string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix) => vectors.av_log_format_line(@ptr, @level, @fmt, @vl, @line, @line_size, @print_prefix); + + /// Format a line of log the same way as the default callback. + /// buffer to receive the formatted line; may be NULL if line_size is 0 + /// size of the buffer; at most line_size-1 characters will be written to the buffer, plus one null terminator + /// used to store whether the prefix must be printed; must point to a persistent integer initially set to 1 + /// Returns a negative value if an error occurred, otherwise returns the number of characters that would have been written for a sufficiently large buffer, not including the terminating null character. If the return value is not less than line_size, it means that the log message was truncated to fit the buffer. + public static int av_log_format_line2(void* @ptr, int @level, string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix) => vectors.av_log_format_line2(@ptr, @level, @fmt, @vl, @line, @line_size, @print_prefix); + + public static int av_log_get_flags() => vectors.av_log_get_flags(); + + /// Get the current log level + /// Current log level + public static int av_log_get_level() => vectors.av_log_get_level(); + + /// Send the specified message to the log once with the initial_level and then with the subsequent_level. By default, all logging messages are sent to stderr. This behavior can be altered by setting a different logging callback function. + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct or NULL if general log. + /// importance level of the message expressed using a "Logging Constant" for the first occurance. + /// importance level of the message expressed using a "Logging Constant" after the first occurance. + /// a variable to keep trak of if a message has already been printed this must be initialized to 0 before the first use. The same state must not be accessed by 2 Threads simultaneously. + /// The format string (printf-compatible) that specifies how subsequent arguments are converted to output. + public static void av_log_once(void* @avcl, int @initial_level, int @subsequent_level, int* @state, string @fmt) => vectors.av_log_once(@avcl, @initial_level, @subsequent_level, @state, @fmt); + + /// Set the logging callback + /// A logging function with a compatible signature. + public static void av_log_set_callback(av_log_set_callback_callback_func @callback) => vectors.av_log_set_callback(@callback); + + public static void av_log_set_flags(int @arg) => vectors.av_log_set_flags(@arg); + + /// Set the log level + /// Logging level + public static void av_log_set_level(int @level) => vectors.av_log_set_level(@level); + + public static int av_log2(uint @v) => vectors.av_log2(@v); + + public static int av_log2_16bit(uint @v) => vectors.av_log2_16bit(@v); + + /// Allocate a memory block with alignment suitable for all memory accesses (including vectors if available on the CPU). + /// Size in bytes for the memory block to be allocated + /// Pointer to the allocated block, or `NULL` if the block cannot be allocated + public static void* av_malloc(ulong @size) => vectors.av_malloc(@size); + + /// Allocate a memory block for an array with av_malloc(). + /// Number of element + /// Size of a single element + /// Pointer to the allocated block, or `NULL` if the block cannot be allocated + public static void* av_malloc_array(ulong @nmemb, ulong @size) => vectors.av_malloc_array(@nmemb, @size); + + /// Allocate a memory block with alignment suitable for all memory accesses (including vectors if available on the CPU) and zero all the bytes of the block. + /// Size in bytes for the memory block to be allocated + /// Pointer to the allocated block, or `NULL` if it cannot be allocated + public static void* av_mallocz(ulong @size) => vectors.av_mallocz(@size); + + [Obsolete("use av_calloc()")] + public static void* av_mallocz_array(ulong @nmemb, ulong @size) => vectors.av_mallocz_array(@nmemb, @size); + + /// Allocate an AVMasteringDisplayMetadata structure and set its fields to default values. The resulting struct can be freed using av_freep(). + /// An AVMasteringDisplayMetadata filled with default values or NULL on failure. + public static AVMasteringDisplayMetadata* av_mastering_display_metadata_alloc() => vectors.av_mastering_display_metadata_alloc(); + + /// Allocate a complete AVMasteringDisplayMetadata and add it to the frame. + /// The frame which side data is added to. + /// The AVMasteringDisplayMetadata structure to be filled by caller. + public static AVMasteringDisplayMetadata* av_mastering_display_metadata_create_side_data(AVFrame* @frame) => vectors.av_mastering_display_metadata_create_side_data(@frame); + + /// Return a positive value if the given filename has one of the given extensions, 0 otherwise. + /// file name to check against the given extensions + /// a comma-separated list of filename extensions + public static int av_match_ext(string @filename, string @extensions) => vectors.av_match_ext(@filename, @extensions); + + /// Set the maximum size that may be allocated in one block. + /// Value to be set as the new maximum size + public static void av_max_alloc(ulong @max) => vectors.av_max_alloc(@max); + + /// Overlapping memcpy() implementation. + /// Destination buffer + /// Number of bytes back to start copying (i.e. the initial size of the overlapping window); must be > 0 + /// Number of bytes to copy; must be >= 0 + public static void av_memcpy_backptr(byte* @dst, int @back, int @cnt) => vectors.av_memcpy_backptr(@dst, @back, @cnt); + + /// Duplicate a buffer with av_malloc(). + /// Buffer to be duplicated + /// Size in bytes of the buffer copied + /// Pointer to a newly allocated buffer containing a copy of `p` or `NULL` if the buffer cannot be allocated + public static void* av_memdup(void* @p, ulong @size) => vectors.av_memdup(@p, @size); + + /// Multiply two rationals. + /// First rational + /// Second rational + /// b*c + public static AVRational av_mul_q(AVRational @b, AVRational @c) => vectors.av_mul_q(@b, @c); + + /// Iterate over all registered muxers. + /// a pointer where libavformat will store the iteration state. Must point to NULL to start the iteration. + /// the next registered muxer or NULL when the iteration is finished + public static AVOutputFormat* av_muxer_iterate(void** @opaque) => vectors.av_muxer_iterate(@opaque); + + /// Find which of the two rationals is closer to another rational. + /// Rational to be compared against + /// One of the following values: - 1 if `q1` is nearer to `q` than `q2` - -1 if `q2` is nearer to `q` than `q1` - 0 if they have the same distance + public static int av_nearer_q(AVRational @q, AVRational @q1, AVRational @q2) => vectors.av_nearer_q(@q, @q1, @q2); + + /// Allocate the payload of a packet and initialize its fields with default values. + /// packet + /// wanted payload size + /// 0 if OK, AVERROR_xxx otherwise + public static int av_new_packet(AVPacket* @pkt, int @size) => vectors.av_new_packet(@pkt, @size); + + public static AVProgram* av_new_program(AVFormatContext* @s, int @id) => vectors.av_new_program(@s, @id); + + /// Iterate over potential AVOptions-enabled children of parent. + /// a pointer where iteration state is stored. + /// AVClass corresponding to next potential child or NULL + public static AVClass* av_opt_child_class_iterate(AVClass* @parent, void** @iter) => vectors.av_opt_child_class_iterate(@parent, @iter); + + /// Iterate over AVOptions-enabled children of obj. + /// result of a previous call to this function or NULL + /// next AVOptions-enabled child or NULL + public static void* av_opt_child_next(void* @obj, void* @prev) => vectors.av_opt_child_next(@obj, @prev); + + /// Copy options from src object into dest object. + /// Object to copy from + /// Object to copy into + /// 0 on success, negative on error + public static int av_opt_copy(void* @dest, void* @src) => vectors.av_opt_copy(@dest, @src); + + public static int av_opt_eval_double(void* @obj, AVOption* @o, string @val, double* @double_out) => vectors.av_opt_eval_double(@obj, @o, @val, @double_out); + + /// @{ This group of functions can be used to evaluate option strings and get numbers out of them. They do the same thing as av_opt_set(), except the result is written into the caller-supplied pointer. + /// a struct whose first element is a pointer to AVClass. + /// an option for which the string is to be evaluated. + /// string to be evaluated. + /// 0 on success, a negative number on failure. + public static int av_opt_eval_flags(void* @obj, AVOption* @o, string @val, int* @flags_out) => vectors.av_opt_eval_flags(@obj, @o, @val, @flags_out); + + public static int av_opt_eval_float(void* @obj, AVOption* @o, string @val, float* @float_out) => vectors.av_opt_eval_float(@obj, @o, @val, @float_out); + + public static int av_opt_eval_int(void* @obj, AVOption* @o, string @val, int* @int_out) => vectors.av_opt_eval_int(@obj, @o, @val, @int_out); + + public static int av_opt_eval_int64(void* @obj, AVOption* @o, string @val, long* @int64_out) => vectors.av_opt_eval_int64(@obj, @o, @val, @int64_out); + + public static int av_opt_eval_q(void* @obj, AVOption* @o, string @val, AVRational* @q_out) => vectors.av_opt_eval_q(@obj, @o, @val, @q_out); + + /// Look for an option in an object. Consider only options which have all the specified flags set. + /// A pointer to a struct whose first element is a pointer to an AVClass. Alternatively a double pointer to an AVClass, if AV_OPT_SEARCH_FAKE_OBJ search flag is set. + /// The name of the option to look for. + /// When searching for named constants, name of the unit it belongs to. + /// Find only options with all the specified flags set (AV_OPT_FLAG). + /// A combination of AV_OPT_SEARCH_*. + /// A pointer to the option found, or NULL if no option was found. + public static AVOption* av_opt_find(void* @obj, string @name, string @unit, int @opt_flags, int @search_flags) => vectors.av_opt_find(@obj, @name, @unit, @opt_flags, @search_flags); + + /// Look for an option in an object. Consider only options which have all the specified flags set. + /// A pointer to a struct whose first element is a pointer to an AVClass. Alternatively a double pointer to an AVClass, if AV_OPT_SEARCH_FAKE_OBJ search flag is set. + /// The name of the option to look for. + /// When searching for named constants, name of the unit it belongs to. + /// Find only options with all the specified flags set (AV_OPT_FLAG). + /// A combination of AV_OPT_SEARCH_*. + /// if non-NULL, an object to which the option belongs will be written here. It may be different from obj if AV_OPT_SEARCH_CHILDREN is present in search_flags. This parameter is ignored if search_flags contain AV_OPT_SEARCH_FAKE_OBJ. + /// A pointer to the option found, or NULL if no option was found. + public static AVOption* av_opt_find2(void* @obj, string @name, string @unit, int @opt_flags, int @search_flags, void** @target_obj) => vectors.av_opt_find2(@obj, @name, @unit, @opt_flags, @search_flags, @target_obj); + + /// Check whether a particular flag is set in a flags field. + /// the name of the flag field option + /// the name of the flag to check + /// non-zero if the flag is set, zero if the flag isn't set, isn't of the right type, or the flags field doesn't exist. + public static int av_opt_flag_is_set(void* @obj, string @field_name, string @flag_name) => vectors.av_opt_flag_is_set(@obj, @field_name, @flag_name); + + /// Free all allocated objects in obj. + public static void av_opt_free(void* @obj) => vectors.av_opt_free(@obj); + + /// Free an AVOptionRanges struct and set it to NULL. + public static void av_opt_freep_ranges(AVOptionRanges** @ranges) => vectors.av_opt_freep_ranges(@ranges); + + /// @{ Those functions get a value of the option with the given name from an object. + /// a struct whose first element is a pointer to an AVClass. + /// name of the option to get. + /// flags passed to av_opt_find2. I.e. if AV_OPT_SEARCH_CHILDREN is passed here, then the option may be found in a child of obj. + /// value of the option will be written here + /// >=0 on success, a negative error code otherwise + public static int av_opt_get(void* @obj, string @name, int @search_flags, byte** @out_val) => vectors.av_opt_get(@obj, @name, @search_flags, @out_val); + + [Obsolete()] + public static int av_opt_get_channel_layout(void* @obj, string @name, int @search_flags, long* @ch_layout) => vectors.av_opt_get_channel_layout(@obj, @name, @search_flags, @ch_layout); + + public static int av_opt_get_chlayout(void* @obj, string @name, int @search_flags, AVChannelLayout* @layout) => vectors.av_opt_get_chlayout(@obj, @name, @search_flags, @layout); + + /// The returned dictionary is a copy of the actual value and must be freed with av_dict_free() by the caller + public static int av_opt_get_dict_val(void* @obj, string @name, int @search_flags, AVDictionary** @out_val) => vectors.av_opt_get_dict_val(@obj, @name, @search_flags, @out_val); + + public static int av_opt_get_double(void* @obj, string @name, int @search_flags, double* @out_val) => vectors.av_opt_get_double(@obj, @name, @search_flags, @out_val); + + public static int av_opt_get_image_size(void* @obj, string @name, int @search_flags, int* @w_out, int* @h_out) => vectors.av_opt_get_image_size(@obj, @name, @search_flags, @w_out, @h_out); + + public static int av_opt_get_int(void* @obj, string @name, int @search_flags, long* @out_val) => vectors.av_opt_get_int(@obj, @name, @search_flags, @out_val); + + /// Extract a key-value pair from the beginning of a string. + /// pointer to the options string, will be updated to point to the rest of the string (one of the pairs_sep or the final NUL) + /// a 0-terminated list of characters used to separate key from value, for example '=' + /// a 0-terminated list of characters used to separate two pairs from each other, for example ':' or ',' + /// flags; see the AV_OPT_FLAG_* values below + /// parsed key; must be freed using av_free() + /// parsed value; must be freed using av_free() + /// >=0 for success, or a negative value corresponding to an AVERROR code in case of error; in particular: AVERROR(EINVAL) if no key is present + public static int av_opt_get_key_value(byte** @ropts, string @key_val_sep, string @pairs_sep, uint @flags, byte** @rkey, byte** @rval) => vectors.av_opt_get_key_value(@ropts, @key_val_sep, @pairs_sep, @flags, @rkey, @rval); + + public static int av_opt_get_pixel_fmt(void* @obj, string @name, int @search_flags, AVPixelFormat* @out_fmt) => vectors.av_opt_get_pixel_fmt(@obj, @name, @search_flags, @out_fmt); + + public static int av_opt_get_q(void* @obj, string @name, int @search_flags, AVRational* @out_val) => vectors.av_opt_get_q(@obj, @name, @search_flags, @out_val); + + public static int av_opt_get_sample_fmt(void* @obj, string @name, int @search_flags, AVSampleFormat* @out_fmt) => vectors.av_opt_get_sample_fmt(@obj, @name, @search_flags, @out_fmt); + + public static int av_opt_get_video_rate(void* @obj, string @name, int @search_flags, AVRational* @out_val) => vectors.av_opt_get_video_rate(@obj, @name, @search_flags, @out_val); + + /// Check if given option is set to its default value. + /// AVClass object to check option on + /// option to be checked + /// >0 when option is set to its default, 0 when option is not set its default, < 0 on error + public static int av_opt_is_set_to_default(void* @obj, AVOption* @o) => vectors.av_opt_is_set_to_default(@obj, @o); + + /// Check if given option is set to its default value. + /// AVClass object to check option on + /// option name + /// combination of AV_OPT_SEARCH_* + /// >0 when option is set to its default, 0 when option is not set its default, < 0 on error + public static int av_opt_is_set_to_default_by_name(void* @obj, string @name, int @search_flags) => vectors.av_opt_is_set_to_default_by_name(@obj, @name, @search_flags); + + /// Iterate over all AVOptions belonging to obj. + /// an AVOptions-enabled struct or a double pointer to an AVClass describing it. + /// result of the previous call to av_opt_next() on this object or NULL + /// next AVOption or NULL + public static AVOption* av_opt_next(void* @obj, AVOption* @prev) => vectors.av_opt_next(@obj, @prev); + + /// @} + public static void* av_opt_ptr(AVClass* @avclass, void* @obj, string @name) => vectors.av_opt_ptr(@avclass, @obj, @name); + + /// Get a list of allowed ranges for the given option. + /// is a bitmask of flags, undefined flags should not be set and should be ignored AV_OPT_SEARCH_FAKE_OBJ indicates that the obj is a double pointer to a AVClass instead of a full instance AV_OPT_MULTI_COMPONENT_RANGE indicates that function may return more than one component, + /// number of compontents returned on success, a negative errro code otherwise + public static int av_opt_query_ranges(AVOptionRanges** @p0, void* @obj, string @key, int @flags) => vectors.av_opt_query_ranges(@p0, @obj, @key, @flags); + + /// Get a default list of allowed ranges for the given option. + /// is a bitmask of flags, undefined flags should not be set and should be ignored AV_OPT_SEARCH_FAKE_OBJ indicates that the obj is a double pointer to a AVClass instead of a full instance AV_OPT_MULTI_COMPONENT_RANGE indicates that function may return more than one component, + /// number of compontents returned on success, a negative errro code otherwise + public static int av_opt_query_ranges_default(AVOptionRanges** @p0, void* @obj, string @key, int @flags) => vectors.av_opt_query_ranges_default(@p0, @obj, @key, @flags); + + /// Serialize object's options. + /// AVClass object to serialize + /// serialize options with all the specified flags set (AV_OPT_FLAG) + /// combination of AV_OPT_SERIALIZE_* flags + /// Pointer to buffer that will be allocated with string containg serialized options. Buffer must be freed by the caller when is no longer needed. + /// character used to separate key from value + /// character used to separate two pairs from each other + /// >= 0 on success, negative on error + public static int av_opt_serialize(void* @obj, int @opt_flags, int @flags, byte** @buffer, byte @key_val_sep, byte @pairs_sep) => vectors.av_opt_serialize(@obj, @opt_flags, @flags, @buffer, @key_val_sep, @pairs_sep); + + /// @{ Those functions set the field of obj with the given name to value. + /// A struct whose first element is a pointer to an AVClass. + /// the name of the field to set + /// The value to set. In case of av_opt_set() if the field is not of a string type, then the given string is parsed. SI postfixes and some named scalars are supported. If the field is of a numeric type, it has to be a numeric or named scalar. Behavior with more than one scalar and +- infix operators is undefined. If the field is of a flags type, it has to be a sequence of numeric scalars or named flags separated by '+' or '-'. Prefixing a flag with '+' causes it to be set without affecting the other flags; similarly, '-' unsets a flag. If the field is of a dictionary type, it has to be a ':' separated list of key=value parameters. Values containing ':' special characters must be escaped. + /// flags passed to av_opt_find2. I.e. if AV_OPT_SEARCH_CHILDREN is passed here, then the option may be set on a child of obj. + /// 0 if the value has been set, or an AVERROR code in case of error: AVERROR_OPTION_NOT_FOUND if no matching option exists AVERROR(ERANGE) if the value is out of range AVERROR(EINVAL) if the value is not valid + public static int av_opt_set(void* @obj, string @name, string @val, int @search_flags) => vectors.av_opt_set(@obj, @name, @val, @search_flags); + + public static int av_opt_set_bin(void* @obj, string @name, byte* @val, int @size, int @search_flags) => vectors.av_opt_set_bin(@obj, @name, @val, @size, @search_flags); + + [Obsolete()] + public static int av_opt_set_channel_layout(void* @obj, string @name, long @ch_layout, int @search_flags) => vectors.av_opt_set_channel_layout(@obj, @name, @ch_layout, @search_flags); + + public static int av_opt_set_chlayout(void* @obj, string @name, AVChannelLayout* @layout, int @search_flags) => vectors.av_opt_set_chlayout(@obj, @name, @layout, @search_flags); + + /// Set the values of all AVOption fields to their default values. + /// an AVOption-enabled struct (its first member must be a pointer to AVClass) + public static void av_opt_set_defaults(void* @s) => vectors.av_opt_set_defaults(@s); + + /// Set the values of all AVOption fields to their default values. Only these AVOption fields for which (opt->flags & mask) == flags will have their default applied to s. + /// an AVOption-enabled struct (its first member must be a pointer to AVClass) + /// combination of AV_OPT_FLAG_* + /// combination of AV_OPT_FLAG_* + public static void av_opt_set_defaults2(void* @s, int @mask, int @flags) => vectors.av_opt_set_defaults2(@s, @mask, @flags); + + /// Set all the options from a given dictionary on an object. + /// a struct whose first element is a pointer to AVClass + /// options to process. This dictionary will be freed and replaced by a new one containing all options not found in obj. Of course this new dictionary needs to be freed by caller with av_dict_free(). + /// 0 on success, a negative AVERROR if some option was found in obj, but could not be set. + public static int av_opt_set_dict(void* @obj, AVDictionary** @options) => vectors.av_opt_set_dict(@obj, @options); + + public static int av_opt_set_dict_val(void* @obj, string @name, AVDictionary* @val, int @search_flags) => vectors.av_opt_set_dict_val(@obj, @name, @val, @search_flags); + + /// Set all the options from a given dictionary on an object. + /// a struct whose first element is a pointer to AVClass + /// options to process. This dictionary will be freed and replaced by a new one containing all options not found in obj. Of course this new dictionary needs to be freed by caller with av_dict_free(). + /// A combination of AV_OPT_SEARCH_*. + /// 0 on success, a negative AVERROR if some option was found in obj, but could not be set. + public static int av_opt_set_dict2(void* @obj, AVDictionary** @options, int @search_flags) => vectors.av_opt_set_dict2(@obj, @options, @search_flags); + + public static int av_opt_set_double(void* @obj, string @name, double @val, int @search_flags) => vectors.av_opt_set_double(@obj, @name, @val, @search_flags); + + /// Parse the key-value pairs list in opts. For each key=value pair found, set the value of the corresponding option in ctx. + /// the AVClass object to set options on + /// the options string, key-value pairs separated by a delimiter + /// a NULL-terminated array of options names for shorthand notation: if the first field in opts has no key part, the key is taken from the first element of shorthand; then again for the second, etc., until either opts is finished, shorthand is finished or a named option is found; after that, all options must be named + /// a 0-terminated list of characters used to separate key from value, for example '=' + /// a 0-terminated list of characters used to separate two pairs from each other, for example ':' or ',' + /// the number of successfully set key=value pairs, or a negative value corresponding to an AVERROR code in case of error: AVERROR(EINVAL) if opts cannot be parsed, the error code issued by av_set_string3() if a key/value pair cannot be set + public static int av_opt_set_from_string(void* @ctx, string @opts, byte** @shorthand, string @key_val_sep, string @pairs_sep) => vectors.av_opt_set_from_string(@ctx, @opts, @shorthand, @key_val_sep, @pairs_sep); + + public static int av_opt_set_image_size(void* @obj, string @name, int @w, int @h, int @search_flags) => vectors.av_opt_set_image_size(@obj, @name, @w, @h, @search_flags); + + public static int av_opt_set_int(void* @obj, string @name, long @val, int @search_flags) => vectors.av_opt_set_int(@obj, @name, @val, @search_flags); + + public static int av_opt_set_pixel_fmt(void* @obj, string @name, AVPixelFormat @fmt, int @search_flags) => vectors.av_opt_set_pixel_fmt(@obj, @name, @fmt, @search_flags); + + public static int av_opt_set_q(void* @obj, string @name, AVRational @val, int @search_flags) => vectors.av_opt_set_q(@obj, @name, @val, @search_flags); + + public static int av_opt_set_sample_fmt(void* @obj, string @name, AVSampleFormat @fmt, int @search_flags) => vectors.av_opt_set_sample_fmt(@obj, @name, @fmt, @search_flags); + + public static int av_opt_set_video_rate(void* @obj, string @name, AVRational @val, int @search_flags) => vectors.av_opt_set_video_rate(@obj, @name, @val, @search_flags); + + /// Show the obj options. + /// log context to use for showing the options + /// requested flags for the options to show. Show only the options for which it is opt->flags & req_flags. + /// rejected flags for the options to show. Show only the options for which it is !(opt->flags & req_flags). + public static int av_opt_show2(void* @obj, void* @av_log_obj, int @req_flags, int @rej_flags) => vectors.av_opt_show2(@obj, @av_log_obj, @req_flags, @rej_flags); + + /// Audio output devices iterator. + public static AVOutputFormat* av_output_audio_device_next(AVOutputFormat* @d) => vectors.av_output_audio_device_next(@d); + + /// Video output devices iterator. + public static AVOutputFormat* av_output_video_device_next(AVOutputFormat* @d) => vectors.av_output_video_device_next(@d); + + /// Wrap an existing array as a packet side data. + /// packet + /// side information type + /// the side data array. It must be allocated with the av_malloc() family of functions. The ownership of the data is transferred to pkt. + /// side information size + /// a non-negative number on success, a negative AVERROR code on failure. On failure, the packet is unchanged and the data remains owned by the caller. + public static int av_packet_add_side_data(AVPacket* @pkt, AVPacketSideDataType @type, byte* @data, ulong @size) => vectors.av_packet_add_side_data(@pkt, @type, @data, @size); + + /// Allocate an AVPacket and set its fields to default values. The resulting struct must be freed using av_packet_free(). + /// An AVPacket filled with default values or NULL on failure. + public static AVPacket* av_packet_alloc() => vectors.av_packet_alloc(); + + /// Create a new packet that references the same data as src. + /// newly created AVPacket on success, NULL on error. + public static AVPacket* av_packet_clone(AVPacket* @src) => vectors.av_packet_clone(@src); + + /// Copy only "properties" fields from src to dst. + /// Destination packet + /// Source packet + /// 0 on success AVERROR on failure. + public static int av_packet_copy_props(AVPacket* @dst, AVPacket* @src) => vectors.av_packet_copy_props(@dst, @src); + + /// Free the packet, if the packet is reference counted, it will be unreferenced first. + /// packet to be freed. The pointer will be set to NULL. + public static void av_packet_free(AVPacket** @pkt) => vectors.av_packet_free(@pkt); + + /// Convenience function to free all the side data stored. All the other fields stay untouched. + /// packet + public static void av_packet_free_side_data(AVPacket* @pkt) => vectors.av_packet_free_side_data(@pkt); + + /// Initialize a reference-counted packet from av_malloc()ed data. + /// packet to be initialized. This function will set the data, size, and buf fields, all others are left untouched. + /// Data allocated by av_malloc() to be used as packet data. If this function returns successfully, the data is owned by the underlying AVBuffer. The caller may not access the data through other means. + /// size of data in bytes, without the padding. I.e. the full buffer size is assumed to be size + AV_INPUT_BUFFER_PADDING_SIZE. + /// 0 on success, a negative AVERROR on error + public static int av_packet_from_data(AVPacket* @pkt, byte* @data, int @size) => vectors.av_packet_from_data(@pkt, @data, @size); + + /// Get side information from packet. + /// packet + /// desired side information type + /// If supplied, *size will be set to the size of the side data or to zero if the desired side data is not present. + /// pointer to data if present or NULL otherwise + public static byte* av_packet_get_side_data(AVPacket* @pkt, AVPacketSideDataType @type, ulong* @size) => vectors.av_packet_get_side_data(@pkt, @type, @size); + + /// Ensure the data described by a given packet is reference counted. + /// packet whose data should be made reference counted. + /// 0 on success, a negative AVERROR on error. On failure, the packet is unchanged. + public static int av_packet_make_refcounted(AVPacket* @pkt) => vectors.av_packet_make_refcounted(@pkt); + + /// Create a writable reference for the data described by a given packet, avoiding data copy if possible. + /// Packet whose data should be made writable. + /// 0 on success, a negative AVERROR on failure. On failure, the packet is unchanged. + public static int av_packet_make_writable(AVPacket* @pkt) => vectors.av_packet_make_writable(@pkt); + + /// Move every field in src to dst and reset src. + /// Destination packet + /// Source packet, will be reset + public static void av_packet_move_ref(AVPacket* @dst, AVPacket* @src) => vectors.av_packet_move_ref(@dst, @src); + + /// Allocate new information of a packet. + /// packet + /// side information type + /// side information size + /// pointer to fresh allocated data or NULL otherwise + public static byte* av_packet_new_side_data(AVPacket* @pkt, AVPacketSideDataType @type, ulong @size) => vectors.av_packet_new_side_data(@pkt, @type, @size); + + /// Pack a dictionary for use in side_data. + /// The dictionary to pack. + /// pointer to store the size of the returned data + /// pointer to data if successful, NULL otherwise + public static byte* av_packet_pack_dictionary(AVDictionary* @dict, ulong* @size) => vectors.av_packet_pack_dictionary(@dict, @size); + + /// Setup a new reference to the data described by a given packet + /// Destination packet. Will be completely overwritten. + /// Source packet + /// 0 on success, a negative AVERROR on error. On error, dst will be blank (as if returned by av_packet_alloc()). + public static int av_packet_ref(AVPacket* @dst, AVPacket* @src) => vectors.av_packet_ref(@dst, @src); + + /// Convert valid timing fields (timestamps / durations) in a packet from one timebase to another. Timestamps with unknown values (AV_NOPTS_VALUE) will be ignored. + /// packet on which the conversion will be performed + /// source timebase, in which the timing fields in pkt are expressed + /// destination timebase, to which the timing fields will be converted + public static void av_packet_rescale_ts(AVPacket* @pkt, AVRational @tb_src, AVRational @tb_dst) => vectors.av_packet_rescale_ts(@pkt, @tb_src, @tb_dst); + + /// Shrink the already allocated side data buffer + /// packet + /// side information type + /// new side information size + /// 0 on success, < 0 on failure + public static int av_packet_shrink_side_data(AVPacket* @pkt, AVPacketSideDataType @type, ulong @size) => vectors.av_packet_shrink_side_data(@pkt, @type, @size); + + public static string av_packet_side_data_name(AVPacketSideDataType @type) => vectors.av_packet_side_data_name(@type); + + /// Unpack a dictionary from side_data. + /// data from side_data + /// size of the data + /// the metadata storage dictionary + /// 0 on success, < 0 on failure + public static int av_packet_unpack_dictionary(byte* @data, ulong @size, AVDictionary** @dict) => vectors.av_packet_unpack_dictionary(@data, @size, @dict); + + /// Wipe the packet. + /// The packet to be unreferenced. + public static void av_packet_unref(AVPacket* @pkt) => vectors.av_packet_unref(@pkt); + + /// Parse CPU caps from a string and update the given AV_CPU_* flags based on that. + /// negative on error. + public static int av_parse_cpu_caps(uint* @flags, string @s) => vectors.av_parse_cpu_caps(@flags, @s); + + public static void av_parser_close(AVCodecParserContext* @s) => vectors.av_parser_close(@s); + + public static AVCodecParserContext* av_parser_init(int @codec_id) => vectors.av_parser_init(@codec_id); + + /// Iterate over all registered codec parsers. + /// a pointer where libavcodec will store the iteration state. Must point to NULL to start the iteration. + /// the next registered codec parser or NULL when the iteration is finished + public static AVCodecParser* av_parser_iterate(void** @opaque) => vectors.av_parser_iterate(@opaque); + + /// Parse a packet. + /// parser context. + /// codec context. + /// set to pointer to parsed buffer or NULL if not yet finished. + /// set to size of parsed buffer or zero if not yet finished. + /// input buffer. + /// buffer size in bytes without the padding. I.e. the full buffer size is assumed to be buf_size + AV_INPUT_BUFFER_PADDING_SIZE. To signal EOF, this should be 0 (so that the last frame can be output). + /// input presentation timestamp. + /// input decoding timestamp. + /// input byte position in stream. + /// the number of bytes of the input bitstream used. + public static int av_parser_parse2(AVCodecParserContext* @s, AVCodecContext* @avctx, byte** @poutbuf, int* @poutbuf_size, byte* @buf, int @buf_size, long @pts, long @dts, long @pos) => vectors.av_parser_parse2(@s, @avctx, @poutbuf, @poutbuf_size, @buf, @buf_size, @pts, @dts, @pos); + + /// Returns number of planes in pix_fmt, a negative AVERROR if pix_fmt is not a valid pixel format. + /// number of planes in pix_fmt, a negative AVERROR if pix_fmt is not a valid pixel format. + public static int av_pix_fmt_count_planes(AVPixelFormat @pix_fmt) => vectors.av_pix_fmt_count_planes(@pix_fmt); + + /// Returns a pixel format descriptor for provided pixel format or NULL if this pixel format is unknown. + /// a pixel format descriptor for provided pixel format or NULL if this pixel format is unknown. + public static AVPixFmtDescriptor* av_pix_fmt_desc_get(AVPixelFormat @pix_fmt) => vectors.av_pix_fmt_desc_get(@pix_fmt); + + /// Returns an AVPixelFormat id described by desc, or AV_PIX_FMT_NONE if desc is not a valid pointer to a pixel format descriptor. + /// an AVPixelFormat id described by desc, or AV_PIX_FMT_NONE if desc is not a valid pointer to a pixel format descriptor. + public static AVPixelFormat av_pix_fmt_desc_get_id(AVPixFmtDescriptor* @desc) => vectors.av_pix_fmt_desc_get_id(@desc); + + /// Iterate over all pixel format descriptors known to libavutil. + /// previous descriptor. NULL to get the first descriptor. + /// next descriptor or NULL after the last descriptor + public static AVPixFmtDescriptor* av_pix_fmt_desc_next(AVPixFmtDescriptor* @prev) => vectors.av_pix_fmt_desc_next(@prev); + + /// Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor. + /// the pixel format + /// store log2_chroma_w (horizontal/width shift) + /// store log2_chroma_h (vertical/height shift) + /// 0 on success, AVERROR(ENOSYS) on invalid or unknown pixel format + public static int av_pix_fmt_get_chroma_sub_sample(AVPixelFormat @pix_fmt, int* @h_shift, int* @v_shift) => vectors.av_pix_fmt_get_chroma_sub_sample(@pix_fmt, @h_shift, @v_shift); + + /// Utility function to swap the endianness of a pixel format. + /// the pixel format + /// pixel format with swapped endianness if it exists, otherwise AV_PIX_FMT_NONE + public static AVPixelFormat av_pix_fmt_swap_endianness(AVPixelFormat @pix_fmt) => vectors.av_pix_fmt_swap_endianness(@pix_fmt); + + /// Send a nice dump of a packet to the log. + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct. + /// The importance level of the message, lower values signifying higher importance. + /// packet to dump + /// True if the payload must be displayed, too. + /// AVStream that the packet belongs to + public static void av_pkt_dump_log2(void* @avcl, int @level, AVPacket* @pkt, int @dump_payload, AVStream* @st) => vectors.av_pkt_dump_log2(@avcl, @level, @pkt, @dump_payload, @st); + + /// Send a nice dump of a packet to the specified file stream. + /// The file stream pointer where the dump should be sent to. + /// packet to dump + /// True if the payload must be displayed, too. + /// AVStream that the packet belongs to + public static void av_pkt_dump2(_iobuf* @f, AVPacket* @pkt, int @dump_payload, AVStream* @st) => vectors.av_pkt_dump2(@f, @pkt, @dump_payload, @st); + + /// Like av_probe_input_buffer2() but returns 0 on success + public static int av_probe_input_buffer(AVIOContext* @pb, AVInputFormat** @fmt, string @url, void* @logctx, uint @offset, uint @max_probe_size) => vectors.av_probe_input_buffer(@pb, @fmt, @url, @logctx, @offset, @max_probe_size); + + /// Probe a bytestream to determine the input format. Each time a probe returns with a score that is too low, the probe buffer size is increased and another attempt is made. When the maximum probe size is reached, the input format with the highest score is returned. + /// the bytestream to probe + /// the input format is put here + /// the url of the stream + /// the log context + /// the offset within the bytestream to probe from + /// the maximum probe buffer size (zero for default) + /// the score in case of success, a negative value corresponding to an the maximal score is AVPROBE_SCORE_MAX AVERROR code otherwise + public static int av_probe_input_buffer2(AVIOContext* @pb, AVInputFormat** @fmt, string @url, void* @logctx, uint @offset, uint @max_probe_size) => vectors.av_probe_input_buffer2(@pb, @fmt, @url, @logctx, @offset, @max_probe_size); + + /// Guess the file format. + /// data to be probed + /// Whether the file is already opened; determines whether demuxers with or without AVFMT_NOFILE are probed. + public static AVInputFormat* av_probe_input_format(AVProbeData* @pd, int @is_opened) => vectors.av_probe_input_format(@pd, @is_opened); + + /// Guess the file format. + /// data to be probed + /// Whether the file is already opened; determines whether demuxers with or without AVFMT_NOFILE are probed. + /// A probe score larger that this is required to accept a detection, the variable is set to the actual detection score afterwards. If the score is < = AVPROBE_SCORE_MAX / 4 it is recommended to retry with a larger probe buffer. + public static AVInputFormat* av_probe_input_format2(AVProbeData* @pd, int @is_opened, int* @score_max) => vectors.av_probe_input_format2(@pd, @is_opened, @score_max); + + /// Guess the file format. + /// Whether the file is already opened; determines whether demuxers with or without AVFMT_NOFILE are probed. + /// The score of the best detection. + public static AVInputFormat* av_probe_input_format3(AVProbeData* @pd, int @is_opened, int* @score_ret) => vectors.av_probe_input_format3(@pd, @is_opened, @score_ret); + + public static void av_program_add_stream_index(AVFormatContext* @ac, int @progid, uint @idx) => vectors.av_program_add_stream_index(@ac, @progid, @idx); + + /// Convert an AVRational to a IEEE 32-bit `float` expressed in fixed-point format. + /// Rational to be converted + /// Equivalent floating-point value, expressed as an unsigned 32-bit integer. + public static uint av_q2intfloat(AVRational @q) => vectors.av_q2intfloat(@q); + + /// Return the next frame of a stream. This function returns what is stored in the file, and does not validate that what is there are valid frames for the decoder. It will split what is stored in the file into frames and return one for each call. It will not omit invalid data between valid frames so as to give the decoder the maximum information possible for decoding. + /// 0 if OK, < 0 on error or end of file. On error, pkt will be blank (as if it came from av_packet_alloc()). + public static int av_read_frame(AVFormatContext* @s, AVPacket* @pkt) => vectors.av_read_frame(@s, @pkt); + + public static void av_read_image_line(ushort* @dst, in byte_ptr4 @data, in int4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component) => vectors.av_read_image_line(@dst, @data, @linesize, @desc, @x, @y, @c, @w, @read_pal_component); + + /// Read a line from an image, and write the values of the pixel format component c to dst. + /// the array containing the pointers to the planes of the image + /// the array containing the linesizes of the image + /// the pixel format descriptor for the image + /// the horizontal coordinate of the first pixel to read + /// the vertical coordinate of the first pixel to read + /// the width of the line to read, that is the number of values to write to dst + /// if not zero and the format is a paletted format writes the values corresponding to the palette component c in data[1] to dst, rather than the palette indexes in data[0]. The behavior is undefined if the format is not paletted. + /// size of elements in dst array (2 or 4 byte) + public static void av_read_image_line2(void* @dst, in byte_ptr4 @data, in int4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component, int @dst_element_size) => vectors.av_read_image_line2(@dst, @data, @linesize, @desc, @x, @y, @c, @w, @read_pal_component, @dst_element_size); + + /// Pause a network-based stream (e.g. RTSP stream). + public static int av_read_pause(AVFormatContext* @s) => vectors.av_read_pause(@s); + + /// Start playing a network-based stream (e.g. RTSP stream) at the current position. + public static int av_read_play(AVFormatContext* @s) => vectors.av_read_play(@s); + + /// Allocate, reallocate, or free a block of memory. + /// Pointer to a memory block already allocated with av_realloc() or `NULL` + /// Size in bytes of the memory block to be allocated or reallocated + /// Pointer to a newly-reallocated block or `NULL` if the block cannot be reallocated + public static void* av_realloc(void* @ptr, ulong @size) => vectors.av_realloc(@ptr, @size); + + /// Allocate, reallocate, or free an array. + /// Pointer to a memory block already allocated with av_realloc() or `NULL` + /// Number of elements in the array + /// Size of the single element of the array + /// Pointer to a newly-reallocated block or NULL if the block cannot be reallocated + public static void* av_realloc_array(void* @ptr, ulong @nmemb, ulong @size) => vectors.av_realloc_array(@ptr, @nmemb, @size); + + /// Allocate, reallocate, or free a block of memory. + public static void* av_realloc_f(void* @ptr, ulong @nelem, ulong @elsize) => vectors.av_realloc_f(@ptr, @nelem, @elsize); + + /// Allocate, reallocate, or free a block of memory through a pointer to a pointer. + /// Pointer to a pointer to a memory block already allocated with av_realloc(), or a pointer to `NULL`. The pointer is updated on success, or freed on failure. + /// Size in bytes for the memory block to be allocated or reallocated + /// Zero on success, an AVERROR error code on failure + public static int av_reallocp(void* @ptr, ulong @size) => vectors.av_reallocp(@ptr, @size); + + /// Allocate, reallocate an array through a pointer to a pointer. + /// Pointer to a pointer to a memory block already allocated with av_realloc(), or a pointer to `NULL`. The pointer is updated on success, or freed on failure. + /// Number of elements + /// Size of the single element + /// Zero on success, an AVERROR error code on failure + public static int av_reallocp_array(void* @ptr, ulong @nmemb, ulong @size) => vectors.av_reallocp_array(@ptr, @nmemb, @size); + + /// Reduce a fraction. + /// Destination numerator + /// Destination denominator + /// Source numerator + /// Source denominator + /// Maximum allowed values for `dst_num` & `dst_den` + /// 1 if the operation is exact, 0 otherwise + public static int av_reduce(int* @dst_num, int* @dst_den, long @num, long @den, long @max) => vectors.av_reduce(@dst_num, @dst_den, @num, @den, @max); + + /// Rescale a 64-bit integer with rounding to nearest. + public static long av_rescale(long @a, long @b, long @c) => vectors.av_rescale(@a, @b, @c); + + /// Rescale a timestamp while preserving known durations. + /// Input time base + /// Input timestamp + /// Duration time base; typically this is finer-grained (greater) than `in_tb` and `out_tb` + /// Duration till the next call to this function (i.e. duration of the current packet/frame) + /// Pointer to a timestamp expressed in terms of `fs_tb`, acting as a state variable + /// Output timebase + /// Timestamp expressed in terms of `out_tb` + public static long av_rescale_delta(AVRational @in_tb, long @in_ts, AVRational @fs_tb, int @duration, long* @last, AVRational @out_tb) => vectors.av_rescale_delta(@in_tb, @in_ts, @fs_tb, @duration, @last, @out_tb); + + /// Rescale a 64-bit integer by 2 rational numbers. + public static long av_rescale_q(long @a, AVRational @bq, AVRational @cq) => vectors.av_rescale_q(@a, @bq, @cq); + + /// Rescale a 64-bit integer by 2 rational numbers with specified rounding. + public static long av_rescale_q_rnd(long @a, AVRational @bq, AVRational @cq, AVRounding @rnd) => vectors.av_rescale_q_rnd(@a, @bq, @cq, @rnd); + + /// Rescale a 64-bit integer with specified rounding. + public static long av_rescale_rnd(long @a, long @b, long @c, AVRounding @rnd) => vectors.av_rescale_rnd(@a, @b, @c, @rnd); + + /// Check if the sample format is planar. + /// the sample format to inspect + /// 1 if the sample format is planar, 0 if it is interleaved + public static int av_sample_fmt_is_planar(AVSampleFormat @sample_fmt) => vectors.av_sample_fmt_is_planar(@sample_fmt); + + /// Allocate a samples buffer for nb_samples samples, and fill data pointers and linesize accordingly. The allocated samples buffer can be freed by using av_freep(&audio_data[0]) Allocated data will be initialized to silence. + /// array to be filled with the pointer for each channel + /// aligned size for audio buffer(s), may be NULL + /// number of audio channels + /// number of samples per channel + /// buffer size alignment (0 = default, 1 = no alignment) + /// >=0 on success or a negative error code on failure + public static int av_samples_alloc(byte** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) => vectors.av_samples_alloc(@audio_data, @linesize, @nb_channels, @nb_samples, @sample_fmt, @align); + + /// Allocate a data pointers array, samples buffer for nb_samples samples, and fill data pointers and linesize accordingly. + public static int av_samples_alloc_array_and_samples(byte*** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) => vectors.av_samples_alloc_array_and_samples(@audio_data, @linesize, @nb_channels, @nb_samples, @sample_fmt, @align); + + /// Copy samples from src to dst. + /// destination array of pointers to data planes + /// source array of pointers to data planes + /// offset in samples at which the data will be written to dst + /// offset in samples at which the data will be read from src + /// number of samples to be copied + /// number of audio channels + /// audio sample format + public static int av_samples_copy(byte** @dst, byte** @src, int @dst_offset, int @src_offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt) => vectors.av_samples_copy(@dst, @src, @dst_offset, @src_offset, @nb_samples, @nb_channels, @sample_fmt); + + /// Fill plane data pointers and linesize for samples with sample format sample_fmt. + /// array to be filled with the pointer for each channel + /// calculated linesize, may be NULL + /// the pointer to a buffer containing the samples + /// the number of channels + /// the number of samples in a single channel + /// the sample format + /// buffer size alignment (0 = default, 1 = no alignment) + /// minimum size in bytes required for the buffer on success, or a negative error code on failure + public static int av_samples_fill_arrays(byte** @audio_data, int* @linesize, byte* @buf, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) => vectors.av_samples_fill_arrays(@audio_data, @linesize, @buf, @nb_channels, @nb_samples, @sample_fmt, @align); + + /// Get the required buffer size for the given audio parameters. + /// calculated linesize, may be NULL + /// the number of channels + /// the number of samples in a single channel + /// the sample format + /// buffer size alignment (0 = default, 1 = no alignment) + /// required buffer size, or negative error code on failure + public static int av_samples_get_buffer_size(int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) => vectors.av_samples_get_buffer_size(@linesize, @nb_channels, @nb_samples, @sample_fmt, @align); + + /// Fill an audio buffer with silence. + /// array of pointers to data planes + /// offset in samples at which to start filling + /// number of samples to fill + /// number of audio channels + /// audio sample format + public static int av_samples_set_silence(byte** @audio_data, int @offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt) => vectors.av_samples_set_silence(@audio_data, @offset, @nb_samples, @nb_channels, @sample_fmt); + + /// Generate an SDP for an RTP session. + /// array of AVFormatContexts describing the RTP streams. If the array is composed by only one context, such context can contain multiple AVStreams (one AVStream per RTP stream). Otherwise, all the contexts in the array (an AVCodecContext per RTP stream) must contain only one AVStream. + /// number of AVCodecContexts contained in ac + /// buffer where the SDP will be stored (must be allocated by the caller) + /// the size of the buffer + /// 0 if OK, AVERROR_xxx on error + public static int av_sdp_create(AVFormatContext** @ac, int @n_files, byte* @buf, int @size) => vectors.av_sdp_create(@ac, @n_files, @buf, @size); + + /// Seek to the keyframe at timestamp. 'timestamp' in 'stream_index'. + /// media file handle + /// If stream_index is (-1), a default stream is selected, and timestamp is automatically converted from AV_TIME_BASE units to the stream specific time_base. + /// Timestamp in AVStream.time_base units or, if no stream is specified, in AV_TIME_BASE units. + /// flags which select direction and seeking mode + /// >= 0 on success + public static int av_seek_frame(AVFormatContext* @s, int @stream_index, long @timestamp, int @flags) => vectors.av_seek_frame(@s, @stream_index, @timestamp, @flags); + + /// Parse the key/value pairs list in opts. For each key/value pair found, stores the value in the field in ctx that is named like the key. ctx must be an AVClass context, storing is done using AVOptions. + /// options string to parse, may be NULL + /// a 0-terminated list of characters used to separate key from value + /// a 0-terminated list of characters used to separate two pairs from each other + /// the number of successfully set key/value pairs, or a negative value corresponding to an AVERROR code in case of error: AVERROR(EINVAL) if opts cannot be parsed, the error code issued by av_opt_set() if a key/value pair cannot be set + public static int av_set_options_string(void* @ctx, string @opts, string @key_val_sep, string @pairs_sep) => vectors.av_set_options_string(@ctx, @opts, @key_val_sep, @pairs_sep); + + /// Reduce packet size, correctly zeroing padding + /// packet + /// new size + public static void av_shrink_packet(AVPacket* @pkt, int @size) => vectors.av_shrink_packet(@pkt, @size); + + /// Multiply two `size_t` values checking for overflow. + /// Pointer to the result of the operation + /// 0 on success, AVERROR(EINVAL) on overflow + public static int av_size_mult(ulong @a, ulong @b, ulong* @r) => vectors.av_size_mult(@a, @b, @r); + + /// Duplicate a string. + /// String to be duplicated + /// Pointer to a newly-allocated string containing a copy of `s` or `NULL` if the string cannot be allocated + public static byte* av_strdup(string @s) => vectors.av_strdup(@s); + + /// Wrap an existing array as stream side data. + /// stream + /// side information type + /// the side data array. It must be allocated with the av_malloc() family of functions. The ownership of the data is transferred to st. + /// side information size + /// zero on success, a negative AVERROR code on failure. On failure, the stream is unchanged and the data remains owned by the caller. + public static int av_stream_add_side_data(AVStream* @st, AVPacketSideDataType @type, byte* @data, ulong @size) => vectors.av_stream_add_side_data(@st, @type, @data, @size); + + /// Get the AVClass for AVStream. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + public static AVClass* av_stream_get_class() => vectors.av_stream_get_class(); + + /// Get the internal codec timebase from a stream. + /// input stream to extract the timebase from + public static AVRational av_stream_get_codec_timebase(AVStream* @st) => vectors.av_stream_get_codec_timebase(@st); + + /// Returns the pts of the last muxed packet + its duration + public static long av_stream_get_end_pts(AVStream* @st) => vectors.av_stream_get_end_pts(@st); + + public static AVCodecParserContext* av_stream_get_parser(AVStream* @s) => vectors.av_stream_get_parser(@s); + + /// Get side information from stream. + /// stream + /// desired side information type + /// If supplied, *size will be set to the size of the side data or to zero if the desired side data is not present. + /// pointer to data if present or NULL otherwise + public static byte* av_stream_get_side_data(AVStream* @stream, AVPacketSideDataType @type, ulong* @size) => vectors.av_stream_get_side_data(@stream, @type, @size); + + /// Allocate new information from stream. + /// stream + /// desired side information type + /// side information size + /// pointer to fresh allocated data or NULL otherwise + public static byte* av_stream_new_side_data(AVStream* @stream, AVPacketSideDataType @type, ulong @size) => vectors.av_stream_new_side_data(@stream, @type, @size); + + /// Put a description of the AVERROR code errnum in errbuf. In case of failure the global variable errno is set to indicate the error. Even in case of failure av_strerror() will print a generic error message indicating the errnum provided to errbuf. + /// error code to describe + /// buffer to which description is written + /// the size in bytes of errbuf + /// 0 on success, a negative value if a description for errnum cannot be found + public static int av_strerror(int @errnum, byte* @errbuf, ulong @errbuf_size) => vectors.av_strerror(@errnum, @errbuf, @errbuf_size); + + /// Duplicate a substring of a string. + /// String to be duplicated + /// Maximum length of the resulting string (not counting the terminating byte) + /// Pointer to a newly-allocated string containing a substring of `s` or `NULL` if the string cannot be allocated + public static byte* av_strndup(string @s, ulong @len) => vectors.av_strndup(@s, @len); + + /// Subtract one rational from another. + /// First rational + /// Second rational + /// b-c + public static AVRational av_sub_q(AVRational @b, AVRational @c) => vectors.av_sub_q(@b, @c); + + /// Wrapper to work around the lack of mkstemp() on mingw. Also, tries to create file in /tmp first, if possible. *prefix can be a character constant; *filename will be allocated internally. + /// file descriptor of opened file (or negative value corresponding to an AVERROR code on error) and opened file name in **filename. + [Obsolete("as fd numbers cannot be passed saftely between libs on some platforms")] + public static int av_tempfile(string @prefix, byte** @filename, int @log_offset, void* @log_ctx) => vectors.av_tempfile(@prefix, @filename, @log_offset, @log_ctx); + + /// Adjust frame number for NTSC drop frame time code. + /// frame number to adjust + /// frame per second, multiples of 30 + /// adjusted frame number + public static int av_timecode_adjust_ntsc_framenum2(int @framenum, int @fps) => vectors.av_timecode_adjust_ntsc_framenum2(@framenum, @fps); + + /// Check if the timecode feature is available for the given frame rate + /// 0 if supported, < 0 otherwise + public static int av_timecode_check_frame_rate(AVRational @rate) => vectors.av_timecode_check_frame_rate(@rate); + + /// Convert sei info to SMPTE 12M binary representation. + /// frame rate in rational form + /// drop flag + /// hour + /// minute + /// second + /// frame number + /// the SMPTE binary representation + public static uint av_timecode_get_smpte(AVRational @rate, int @drop, int @hh, int @mm, int @ss, int @ff) => vectors.av_timecode_get_smpte(@rate, @drop, @hh, @mm, @ss, @ff); + + /// Convert frame number to SMPTE 12M binary representation. + /// timecode data correctly initialized + /// frame number + /// the SMPTE binary representation + public static uint av_timecode_get_smpte_from_framenum(AVTimecode* @tc, int @framenum) => vectors.av_timecode_get_smpte_from_framenum(@tc, @framenum); + + /// Init a timecode struct with the passed parameters. + /// pointer to an allocated AVTimecode + /// frame rate in rational form + /// miscellaneous flags such as drop frame, +24 hours, ... (see AVTimecodeFlag) + /// the first frame number + /// a pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct (used for av_log) + /// 0 on success, AVERROR otherwise + public static int av_timecode_init(AVTimecode* @tc, AVRational @rate, int @flags, int @frame_start, void* @log_ctx) => vectors.av_timecode_init(@tc, @rate, @flags, @frame_start, @log_ctx); + + /// Init a timecode struct from the passed timecode components. + /// pointer to an allocated AVTimecode + /// frame rate in rational form + /// miscellaneous flags such as drop frame, +24 hours, ... (see AVTimecodeFlag) + /// hours + /// minutes + /// seconds + /// frames + /// a pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct (used for av_log) + /// 0 on success, AVERROR otherwise + public static int av_timecode_init_from_components(AVTimecode* @tc, AVRational @rate, int @flags, int @hh, int @mm, int @ss, int @ff, void* @log_ctx) => vectors.av_timecode_init_from_components(@tc, @rate, @flags, @hh, @mm, @ss, @ff, @log_ctx); + + /// Parse timecode representation (hh:mm:ss[:;.]ff). + /// pointer to an allocated AVTimecode + /// frame rate in rational form + /// timecode string which will determine the frame start + /// a pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct (used for av_log). + /// 0 on success, AVERROR otherwise + public static int av_timecode_init_from_string(AVTimecode* @tc, AVRational @rate, string @str, void* @log_ctx) => vectors.av_timecode_init_from_string(@tc, @rate, @str, @log_ctx); + + /// Get the timecode string from the 25-bit timecode format (MPEG GOP format). + /// destination buffer, must be at least AV_TIMECODE_STR_SIZE long + /// the 25-bits timecode + /// the buf parameter + public static byte* av_timecode_make_mpeg_tc_string(byte* @buf, uint @tc25bit) => vectors.av_timecode_make_mpeg_tc_string(@buf, @tc25bit); + + /// Get the timecode string from the SMPTE timecode format. + /// destination buffer, must be at least AV_TIMECODE_STR_SIZE long + /// the 32-bit SMPTE timecode + /// prevent the use of a drop flag when it is known the DF bit is arbitrary + /// the buf parameter + public static byte* av_timecode_make_smpte_tc_string(byte* @buf, uint @tcsmpte, int @prevent_df) => vectors.av_timecode_make_smpte_tc_string(@buf, @tcsmpte, @prevent_df); + + /// Get the timecode string from the SMPTE timecode format. + /// destination buffer, must be at least AV_TIMECODE_STR_SIZE long + /// frame rate of the timecode + /// the 32-bit SMPTE timecode + /// prevent the use of a drop flag when it is known the DF bit is arbitrary + /// prevent the use of a field flag when it is known the field bit is arbitrary (e.g. because it is used as PC flag) + /// the buf parameter + public static byte* av_timecode_make_smpte_tc_string2(byte* @buf, AVRational @rate, uint @tcsmpte, int @prevent_df, int @skip_field) => vectors.av_timecode_make_smpte_tc_string2(@buf, @rate, @tcsmpte, @prevent_df, @skip_field); + + /// Load timecode string in buf. + /// timecode data correctly initialized + /// destination buffer, must be at least AV_TIMECODE_STR_SIZE long + /// frame number + /// the buf parameter + public static byte* av_timecode_make_string(AVTimecode* @tc, byte* @buf, int @framenum) => vectors.av_timecode_make_string(@tc, @buf, @framenum); + + public static void av_tree_destroy(AVTreeNode* @t) => vectors.av_tree_destroy(@t); + + /// Apply enu(opaque, &elem) to all the elements in the tree in a given range. + /// a comparison function that returns < 0 for an element below the range, > 0 for an element above the range and == 0 for an element inside the range + public static void av_tree_enumerate(AVTreeNode* @t, void* @opaque, av_tree_enumerate_cmp_func @cmp, av_tree_enumerate_enu_func @enu) => vectors.av_tree_enumerate(@t, @opaque, @cmp, @enu); + + /// Find an element. + /// a pointer to the root node of the tree + /// compare function used to compare elements in the tree, API identical to that of Standard C's qsort It is guaranteed that the first and only the first argument to cmp() will be the key parameter to av_tree_find(), thus it could if the user wants, be a different type (like an opaque context). + /// If next is not NULL, then next[0] will contain the previous element and next[1] the next element. If either does not exist, then the corresponding entry in next is unchanged. + /// An element with cmp(key, elem) == 0 or NULL if no such element exists in the tree. + public static void* av_tree_find(AVTreeNode* @root, void* @key, av_tree_find_cmp_func @cmp, ref void_ptr2 @next) => vectors.av_tree_find(@root, @key, @cmp, ref @next); + + /// Insert or remove an element. + /// A pointer to a pointer to the root node of the tree; note that the root node can change during insertions, this is required to keep the tree balanced. + /// pointer to the element key to insert in the tree + /// compare function used to compare elements in the tree, API identical to that of Standard C's qsort + /// Used to allocate and free AVTreeNodes. For insertion the user must set it to an allocated and zeroed object of at least av_tree_node_size bytes size. av_tree_insert() will set it to NULL if it has been consumed. For deleting elements *next is set to NULL by the user and av_tree_insert() will set it to the AVTreeNode which was used for the removed element. This allows the use of flat arrays, which have lower overhead compared to many malloced elements. You might want to define a function like: + /// If no insertion happened, the found element; if an insertion or removal happened, then either key or NULL will be returned. Which one it is depends on the tree state and the implementation. You should make no assumptions that it's one or the other in the code. + public static void* av_tree_insert(AVTreeNode** @rootp, void* @key, av_tree_insert_cmp_func @cmp, AVTreeNode** @next) => vectors.av_tree_insert(@rootp, @key, @cmp, @next); + + /// Allocate an AVTreeNode. + public static AVTreeNode* av_tree_node_alloc() => vectors.av_tree_node_alloc(); + + /// Split a URL string into components. + /// the buffer for the protocol + /// the size of the proto buffer + /// the buffer for the authorization + /// the size of the authorization buffer + /// the buffer for the host name + /// the size of the hostname buffer + /// a pointer to store the port number in + /// the buffer for the path + /// the size of the path buffer + /// the URL to split + public static void av_url_split(byte* @proto, int @proto_size, byte* @authorization, int @authorization_size, byte* @hostname, int @hostname_size, int* @port_ptr, byte* @path, int @path_size, string @url) => vectors.av_url_split(@proto, @proto_size, @authorization, @authorization_size, @hostname, @hostname_size, @port_ptr, @path, @path_size, @url); + + /// Sleep for a period of time. Although the duration is expressed in microseconds, the actual delay may be rounded to the precision of the system timer. + /// Number of microseconds to sleep. + /// zero on success or (negative) error code. + public static int av_usleep(uint @usec) => vectors.av_usleep(@usec); + + /// Return an informative version string. This usually is the actual release version number or a git commit description. This string has no fixed format and can change any time. It should never be parsed by code. + public static string av_version_info() => vectors.av_version_info(); + + /// Send the specified message to the log if the level is less than or equal to the current av_log_level. By default, all logging messages are sent to stderr. This behavior can be altered by setting a different logging callback function. + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct. + /// The importance level of the message expressed using a "Logging Constant". + /// The format string (printf-compatible) that specifies how subsequent arguments are converted to output. + /// The arguments referenced by the format string. + public static void av_vlog(void* @avcl, int @level, string @fmt, byte* @vl) => vectors.av_vlog(@avcl, @level, @fmt, @vl); + + /// Write a packet to an output media file. + /// media file handle + /// The packet containing the data to be written. Note that unlike av_interleaved_write_frame(), this function does not take ownership of the packet passed to it (though some muxers may make an internal reference to the input packet). This parameter can be NULL (at any time, not just at the end), in order to immediately flush data buffered within the muxer, for muxers that buffer up data internally before writing it to the output. Packet's "stream_index" field must be set to the index of the corresponding stream in "s->streams". The timestamps ( "pts", "dts") must be set to correct values in the stream's timebase (unless the output format is flagged with the AVFMT_NOTIMESTAMPS flag, then they can be set to AV_NOPTS_VALUE). The dts for subsequent packets passed to this function must be strictly increasing when compared in their respective timebases (unless the output format is flagged with the AVFMT_TS_NONSTRICT, then they merely have to be nondecreasing). "duration") should also be set if known. + /// < 0 on error, = 0 if OK, 1 if flushed and there is no more data to flush + public static int av_write_frame(AVFormatContext* @s, AVPacket* @pkt) => vectors.av_write_frame(@s, @pkt); + + public static void av_write_image_line(ushort* @src, ref byte_ptr4 @data, in int4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w) => vectors.av_write_image_line(@src, ref @data, @linesize, @desc, @x, @y, @c, @w); + + /// Write the values from src to the pixel format component c of an image line. + /// array containing the values to write + /// the array containing the pointers to the planes of the image to write into. It is supposed to be zeroed. + /// the array containing the linesizes of the image + /// the pixel format descriptor for the image + /// the horizontal coordinate of the first pixel to write + /// the vertical coordinate of the first pixel to write + /// the width of the line to write, that is the number of values to write to the image line + /// size of elements in src array (2 or 4 byte) + public static void av_write_image_line2(void* @src, ref byte_ptr4 @data, in int4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @src_element_size) => vectors.av_write_image_line2(@src, ref @data, @linesize, @desc, @x, @y, @c, @w, @src_element_size); + + /// Write the stream trailer to an output media file and free the file private data. + /// media file handle + /// 0 if OK, AVERROR_xxx on error + public static int av_write_trailer(AVFormatContext* @s) => vectors.av_write_trailer(@s); + + /// Write an uncoded frame to an output media file. + public static int av_write_uncoded_frame(AVFormatContext* @s, int @stream_index, AVFrame* @frame) => vectors.av_write_uncoded_frame(@s, @stream_index, @frame); + + /// Test whether a muxer supports uncoded frame. + /// >=0 if an uncoded frame can be written to that muxer and stream, < 0 if not + public static int av_write_uncoded_frame_query(AVFormatContext* @s, int @stream_index) => vectors.av_write_uncoded_frame_query(@s, @stream_index); + + /// Encode extradata length to a buffer. Used by xiph codecs. + /// buffer to write to; must be at least (v/255+1) bytes long + /// size of extradata in bytes + /// number of bytes written to the buffer. + public static uint av_xiphlacing(byte* @s, uint @v) => vectors.av_xiphlacing(@s, @v); + + /// Modify width and height values so that they will result in a memory buffer that is acceptable for the codec if you do not use any horizontal padding. + public static void avcodec_align_dimensions(AVCodecContext* @s, int* @width, int* @height) => vectors.avcodec_align_dimensions(@s, @width, @height); + + /// Modify width and height values so that they will result in a memory buffer that is acceptable for the codec if you also ensure that all line sizes are a multiple of the respective linesize_align[i]. + public static void avcodec_align_dimensions2(AVCodecContext* @s, int* @width, int* @height, ref int8 @linesize_align) => vectors.avcodec_align_dimensions2(@s, @width, @height, ref @linesize_align); + + /// Allocate an AVCodecContext and set its fields to default values. The resulting struct should be freed with avcodec_free_context(). + /// if non-NULL, allocate private data and initialize defaults for the given codec. It is illegal to then call avcodec_open2() with a different codec. If NULL, then the codec-specific defaults won't be initialized, which may result in suboptimal default settings (this is important mainly for encoders, e.g. libx264). + /// An AVCodecContext filled with default values or NULL on failure. + public static AVCodecContext* avcodec_alloc_context3(AVCodec* @codec) => vectors.avcodec_alloc_context3(@codec); + + /// Converts swscale x/y chroma position to AVChromaLocation. + /// horizontal chroma sample position + /// vertical chroma sample position + public static AVChromaLocation avcodec_chroma_pos_to_enum(int @xpos, int @ypos) => vectors.avcodec_chroma_pos_to_enum(@xpos, @ypos); + + /// Close a given AVCodecContext and free all the data associated with it (but not the AVCodecContext itself). + public static int avcodec_close(AVCodecContext* @avctx) => vectors.avcodec_close(@avctx); + + /// Return the libavcodec build-time configuration. + public static string avcodec_configuration() => vectors.avcodec_configuration(); + + /// Decode a subtitle message. Return a negative value on error, otherwise return the number of bytes used. If no subtitle could be decompressed, got_sub_ptr is zero. Otherwise, the subtitle is stored in *sub. Note that AV_CODEC_CAP_DR1 is not available for subtitle codecs. This is for simplicity, because the performance difference is expected to be negligible and reusing a get_buffer written for video codecs would probably perform badly due to a potentially very different allocation pattern. + /// the codec context + /// The preallocated AVSubtitle in which the decoded subtitle will be stored, must be freed with avsubtitle_free if *got_sub_ptr is set. + /// Zero if no subtitle could be decompressed, otherwise, it is nonzero. + /// The input AVPacket containing the input buffer. + public static int avcodec_decode_subtitle2(AVCodecContext* @avctx, AVSubtitle* @sub, int* @got_sub_ptr, AVPacket* @avpkt) => vectors.avcodec_decode_subtitle2(@avctx, @sub, @got_sub_ptr, @avpkt); + + public static int avcodec_default_execute(AVCodecContext* @c, avcodec_default_execute_func_func @func, void* @arg, int* @ret, int @count, int @size) => vectors.avcodec_default_execute(@c, @func, @arg, @ret, @count, @size); + + public static int avcodec_default_execute2(AVCodecContext* @c, avcodec_default_execute2_func_func @func, void* @arg, int* @ret, int @count) => vectors.avcodec_default_execute2(@c, @func, @arg, @ret, @count); + + /// The default callback for AVCodecContext.get_buffer2(). It is made public so it can be called by custom get_buffer2() implementations for decoders without AV_CODEC_CAP_DR1 set. + public static int avcodec_default_get_buffer2(AVCodecContext* @s, AVFrame* @frame, int @flags) => vectors.avcodec_default_get_buffer2(@s, @frame, @flags); + + /// The default callback for AVCodecContext.get_encode_buffer(). It is made public so it can be called by custom get_encode_buffer() implementations for encoders without AV_CODEC_CAP_DR1 set. + public static int avcodec_default_get_encode_buffer(AVCodecContext* @s, AVPacket* @pkt, int @flags) => vectors.avcodec_default_get_encode_buffer(@s, @pkt, @flags); + + public static AVPixelFormat avcodec_default_get_format(AVCodecContext* @s, AVPixelFormat* @fmt) => vectors.avcodec_default_get_format(@s, @fmt); + + /// Returns descriptor for given codec ID or NULL if no descriptor exists. + /// descriptor for given codec ID or NULL if no descriptor exists. + public static AVCodecDescriptor* avcodec_descriptor_get(AVCodecID @id) => vectors.avcodec_descriptor_get(@id); + + /// Returns codec descriptor with the given name or NULL if no such descriptor exists. + /// codec descriptor with the given name or NULL if no such descriptor exists. + public static AVCodecDescriptor* avcodec_descriptor_get_by_name(string @name) => vectors.avcodec_descriptor_get_by_name(@name); + + /// Iterate over all codec descriptors known to libavcodec. + /// previous descriptor. NULL to get the first descriptor. + /// next descriptor or NULL after the last descriptor + public static AVCodecDescriptor* avcodec_descriptor_next(AVCodecDescriptor* @prev) => vectors.avcodec_descriptor_next(@prev); + + /// @{ + public static int avcodec_encode_subtitle(AVCodecContext* @avctx, byte* @buf, int @buf_size, AVSubtitle* @sub) => vectors.avcodec_encode_subtitle(@avctx, @buf, @buf_size, @sub); + + /// Converts AVChromaLocation to swscale x/y chroma position. + /// horizontal chroma sample position + /// vertical chroma sample position + public static int avcodec_enum_to_chroma_pos(int* @xpos, int* @ypos, AVChromaLocation @pos) => vectors.avcodec_enum_to_chroma_pos(@xpos, @ypos, @pos); + + /// Fill AVFrame audio data and linesize pointers. + /// the AVFrame frame->nb_samples must be set prior to calling the function. This function fills in frame->data, frame->extended_data, frame->linesize[0]. + /// channel count + /// sample format + /// buffer to use for frame data + /// size of buffer + /// plane size sample alignment (0 = default) + /// >=0 on success, negative error code on failure + public static int avcodec_fill_audio_frame(AVFrame* @frame, int @nb_channels, AVSampleFormat @sample_fmt, byte* @buf, int @buf_size, int @align) => vectors.avcodec_fill_audio_frame(@frame, @nb_channels, @sample_fmt, @buf, @buf_size, @align); + + /// Find the best pixel format to convert to given a certain source pixel format. When converting from one pixel format to another, information loss may occur. For example, when converting from RGB24 to GRAY, the color information will be lost. Similarly, other losses occur when converting from some formats to other formats. avcodec_find_best_pix_fmt_of_2() searches which of the given pixel formats should be used to suffer the least amount of loss. The pixel formats from which it chooses one, are determined by the pix_fmt_list parameter. + /// AV_PIX_FMT_NONE terminated array of pixel formats to choose from + /// source pixel format + /// Whether the source pixel format alpha channel is used. + /// Combination of flags informing you what kind of losses will occur. + /// The best pixel format to convert to or -1 if none was found. + public static AVPixelFormat avcodec_find_best_pix_fmt_of_list(AVPixelFormat* @pix_fmt_list, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr) => vectors.avcodec_find_best_pix_fmt_of_list(@pix_fmt_list, @src_pix_fmt, @has_alpha, @loss_ptr); + + /// Find a registered decoder with a matching codec ID. + /// AVCodecID of the requested decoder + /// A decoder if one was found, NULL otherwise. + public static AVCodec* avcodec_find_decoder(AVCodecID @id) => vectors.avcodec_find_decoder(@id); + + /// Find a registered decoder with the specified name. + /// name of the requested decoder + /// A decoder if one was found, NULL otherwise. + public static AVCodec* avcodec_find_decoder_by_name(string @name) => vectors.avcodec_find_decoder_by_name(@name); + + /// Find a registered encoder with a matching codec ID. + /// AVCodecID of the requested encoder + /// An encoder if one was found, NULL otherwise. + public static AVCodec* avcodec_find_encoder(AVCodecID @id) => vectors.avcodec_find_encoder(@id); + + /// Find a registered encoder with the specified name. + /// name of the requested encoder + /// An encoder if one was found, NULL otherwise. + public static AVCodec* avcodec_find_encoder_by_name(string @name) => vectors.avcodec_find_encoder_by_name(@name); + + /// Reset the internal codec state / flush internal buffers. Should be called e.g. when seeking or when switching to a different stream. + public static void avcodec_flush_buffers(AVCodecContext* @avctx) => vectors.avcodec_flush_buffers(@avctx); + + /// Free the codec context and everything associated with it and write NULL to the provided pointer. + public static void avcodec_free_context(AVCodecContext** @avctx) => vectors.avcodec_free_context(@avctx); + + /// Get the AVClass for AVCodecContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + public static AVClass* avcodec_get_class() => vectors.avcodec_get_class(); + + [Obsolete("This function should not be used.")] + public static AVClass* avcodec_get_frame_class() => vectors.avcodec_get_frame_class(); + + /// Retrieve supported hardware configurations for a codec. + public static AVCodecHWConfig* avcodec_get_hw_config(AVCodec* @codec, int @index) => vectors.avcodec_get_hw_config(@codec, @index); + + /// Create and return a AVHWFramesContext with values adequate for hardware decoding. This is meant to get called from the get_format callback, and is a helper for preparing a AVHWFramesContext for AVCodecContext.hw_frames_ctx. This API is for decoding with certain hardware acceleration modes/APIs only. + /// The context which is currently calling get_format, and which implicitly contains all state needed for filling the returned AVHWFramesContext properly. + /// A reference to the AVHWDeviceContext describing the device which will be used by the hardware decoder. + /// The hwaccel format you are going to return from get_format. + /// On success, set to a reference to an _uninitialized_ AVHWFramesContext, created from the given device_ref. Fields will be set to values required for decoding. Not changed if an error is returned. + /// zero on success, a negative value on error. The following error codes have special semantics: AVERROR(ENOENT): the decoder does not support this functionality. Setup is always manual, or it is a decoder which does not support setting AVCodecContext.hw_frames_ctx at all, or it is a software format. AVERROR(EINVAL): it is known that hardware decoding is not supported for this configuration, or the device_ref is not supported for the hwaccel referenced by hw_pix_fmt. + public static int avcodec_get_hw_frames_parameters(AVCodecContext* @avctx, AVBufferRef* @device_ref, AVPixelFormat @hw_pix_fmt, AVBufferRef** @out_frames_ref) => vectors.avcodec_get_hw_frames_parameters(@avctx, @device_ref, @hw_pix_fmt, @out_frames_ref); + + /// Get the name of a codec. + /// a static string identifying the codec; never NULL + public static string avcodec_get_name(AVCodecID @id) => vectors.avcodec_get_name(@id); + + /// Get the AVClass for AVSubtitleRect. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + public static AVClass* avcodec_get_subtitle_rect_class() => vectors.avcodec_get_subtitle_rect_class(); + + /// Get the type of the given codec. + public static AVMediaType avcodec_get_type(AVCodecID @codec_id) => vectors.avcodec_get_type(@codec_id); + + /// Returns a positive value if s is open (i.e. avcodec_open2() was called on it with no corresponding avcodec_close()), 0 otherwise. + /// a positive value if s is open (i.e. avcodec_open2() was called on it with no corresponding avcodec_close()), 0 otherwise. + public static int avcodec_is_open(AVCodecContext* @s) => vectors.avcodec_is_open(@s); + + /// Return the libavcodec license. + public static string avcodec_license() => vectors.avcodec_license(); + + /// Initialize the AVCodecContext to use the given AVCodec. Prior to using this function the context has to be allocated with avcodec_alloc_context3(). + /// The context to initialize. + /// The codec to open this context for. If a non-NULL codec has been previously passed to avcodec_alloc_context3() or for this context, then this parameter MUST be either NULL or equal to the previously passed codec. + /// A dictionary filled with AVCodecContext and codec-private options. On return this object will be filled with options that were not found. + /// zero on success, a negative value on error + public static int avcodec_open2(AVCodecContext* @avctx, AVCodec* @codec, AVDictionary** @options) => vectors.avcodec_open2(@avctx, @codec, @options); + + /// Allocate a new AVCodecParameters and set its fields to default values (unknown/invalid/0). The returned struct must be freed with avcodec_parameters_free(). + public static AVCodecParameters* avcodec_parameters_alloc() => vectors.avcodec_parameters_alloc(); + + /// Copy the contents of src to dst. Any allocated fields in dst are freed and replaced with newly allocated duplicates of the corresponding fields in src. + /// >= 0 on success, a negative AVERROR code on failure. + public static int avcodec_parameters_copy(AVCodecParameters* @dst, AVCodecParameters* @src) => vectors.avcodec_parameters_copy(@dst, @src); + + /// Free an AVCodecParameters instance and everything associated with it and write NULL to the supplied pointer. + public static void avcodec_parameters_free(AVCodecParameters** @par) => vectors.avcodec_parameters_free(@par); + + /// Fill the parameters struct based on the values from the supplied codec context. Any allocated fields in par are freed and replaced with duplicates of the corresponding fields in codec. + /// >= 0 on success, a negative AVERROR code on failure + public static int avcodec_parameters_from_context(AVCodecParameters* @par, AVCodecContext* @codec) => vectors.avcodec_parameters_from_context(@par, @codec); + + /// Fill the codec context based on the values from the supplied codec parameters. Any allocated fields in codec that have a corresponding field in par are freed and replaced with duplicates of the corresponding field in par. Fields in codec that do not have a counterpart in par are not touched. + /// >= 0 on success, a negative AVERROR code on failure. + public static int avcodec_parameters_to_context(AVCodecContext* @codec, AVCodecParameters* @par) => vectors.avcodec_parameters_to_context(@codec, @par); + + /// Return a value representing the fourCC code associated to the pixel format pix_fmt, or 0 if no associated fourCC code can be found. + public static uint avcodec_pix_fmt_to_codec_tag(AVPixelFormat @pix_fmt) => vectors.avcodec_pix_fmt_to_codec_tag(@pix_fmt); + + /// Return a name for the specified profile, if available. + /// the ID of the codec to which the requested profile belongs + /// the profile value for which a name is requested + /// A name for the profile if found, NULL otherwise. + public static string avcodec_profile_name(AVCodecID @codec_id, int @profile) => vectors.avcodec_profile_name(@codec_id, @profile); + + /// Return decoded output data from a decoder. + /// codec context + /// This will be set to a reference-counted video or audio frame (depending on the decoder type) allocated by the decoder. Note that the function will always call av_frame_unref(frame) before doing anything else. + /// 0: success, a frame was returned AVERROR(EAGAIN): output is not available in this state - user must try to send new input AVERROR_EOF: the decoder has been fully flushed, and there will be no more output frames AVERROR(EINVAL): codec not opened, or it is an encoder AVERROR_INPUT_CHANGED: current decoded frame has changed parameters with respect to first decoded frame. Applicable when flag AV_CODEC_FLAG_DROPCHANGED is set. other negative values: legitimate decoding errors + public static int avcodec_receive_frame(AVCodecContext* @avctx, AVFrame* @frame) => vectors.avcodec_receive_frame(@avctx, @frame); + + /// Read encoded data from the encoder. + /// codec context + /// This will be set to a reference-counted packet allocated by the encoder. Note that the function will always call av_packet_unref(avpkt) before doing anything else. + /// 0 on success, otherwise negative error code: AVERROR(EAGAIN): output is not available in the current state - user must try to send input AVERROR_EOF: the encoder has been fully flushed, and there will be no more output packets AVERROR(EINVAL): codec not opened, or it is a decoder other errors: legitimate encoding errors + public static int avcodec_receive_packet(AVCodecContext* @avctx, AVPacket* @avpkt) => vectors.avcodec_receive_packet(@avctx, @avpkt); + + /// Supply a raw video or audio frame to the encoder. Use avcodec_receive_packet() to retrieve buffered output packets. + /// codec context + /// AVFrame containing the raw audio or video frame to be encoded. Ownership of the frame remains with the caller, and the encoder will not write to the frame. The encoder may create a reference to the frame data (or copy it if the frame is not reference-counted). It can be NULL, in which case it is considered a flush packet. This signals the end of the stream. If the encoder still has packets buffered, it will return them after this call. Once flushing mode has been entered, additional flush packets are ignored, and sending frames will return AVERROR_EOF. + /// 0 on success, otherwise negative error code: AVERROR(EAGAIN): input is not accepted in the current state - user must read output with avcodec_receive_packet() (once all output is read, the packet should be resent, and the call will not fail with EAGAIN). AVERROR_EOF: the encoder has been flushed, and no new frames can be sent to it AVERROR(EINVAL): codec not opened, it is a decoder, or requires flush AVERROR(ENOMEM): failed to add packet to internal queue, or similar other errors: legitimate encoding errors + public static int avcodec_send_frame(AVCodecContext* @avctx, AVFrame* @frame) => vectors.avcodec_send_frame(@avctx, @frame); + + /// Supply raw packet data as input to a decoder. + /// codec context + /// The input AVPacket. Usually, this will be a single video frame, or several complete audio frames. Ownership of the packet remains with the caller, and the decoder will not write to the packet. The decoder may create a reference to the packet data (or copy it if the packet is not reference-counted). Unlike with older APIs, the packet is always fully consumed, and if it contains multiple frames (e.g. some audio codecs), will require you to call avcodec_receive_frame() multiple times afterwards before you can send a new packet. It can be NULL (or an AVPacket with data set to NULL and size set to 0); in this case, it is considered a flush packet, which signals the end of the stream. Sending the first flush packet will return success. Subsequent ones are unnecessary and will return AVERROR_EOF. If the decoder still has frames buffered, it will return them after sending a flush packet. + /// 0 on success, otherwise negative error code: AVERROR(EAGAIN): input is not accepted in the current state - user must read output with avcodec_receive_frame() (once all output is read, the packet should be resent, and the call will not fail with EAGAIN). AVERROR_EOF: the decoder has been flushed, and no new packets can be sent to it (also returned if more than 1 flush packet is sent) AVERROR(EINVAL): codec not opened, it is an encoder, or requires flush AVERROR(ENOMEM): failed to add packet to internal queue, or similar other errors: legitimate decoding errors + public static int avcodec_send_packet(AVCodecContext* @avctx, AVPacket* @avpkt) => vectors.avcodec_send_packet(@avctx, @avpkt); + + /// @} + public static void avcodec_string(byte* @buf, int @buf_size, AVCodecContext* @enc, int @encode) => vectors.avcodec_string(@buf, @buf_size, @enc, @encode); + + /// Return the LIBAVCODEC_VERSION_INT constant. + public static uint avcodec_version() => vectors.avcodec_version(); + + /// Send control message from application to device. + /// device context. + /// message type. + /// message data. Exact type depends on message type. + /// size of message data. + /// >= 0 on success, negative on error. AVERROR(ENOSYS) when device doesn't implement handler of the message. + public static int avdevice_app_to_dev_control_message(AVFormatContext* @s, AVAppToDevMessageType @type, void* @data, ulong @data_size) => vectors.avdevice_app_to_dev_control_message(@s, @type, @data, @data_size); + + /// Initialize capabilities probing API based on AVOption API. + /// Device capabilities data. Pointer to a NULL pointer must be passed. + /// Context of the device. + /// An AVDictionary filled with device-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. The same options must be passed later to avformat_write_header() for output devices or avformat_open_input() for input devices, or at any other place that affects device-private options. + /// >= 0 on success, negative otherwise. + [Obsolete()] + public static int avdevice_capabilities_create(AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s, AVDictionary** @device_options) => vectors.avdevice_capabilities_create(@caps, @s, @device_options); + + /// Free resources created by avdevice_capabilities_create() + /// Device capabilities data to be freed. + /// Context of the device. + [Obsolete()] + public static void avdevice_capabilities_free(AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s) => vectors.avdevice_capabilities_free(@caps, @s); + + /// Return the libavdevice build-time configuration. + public static string avdevice_configuration() => vectors.avdevice_configuration(); + + /// Send control message from device to application. + /// device context. + /// message type. + /// message data. Can be NULL. + /// size of message data. + /// >= 0 on success, negative on error. AVERROR(ENOSYS) when application doesn't implement handler of the message. + public static int avdevice_dev_to_app_control_message(AVFormatContext* @s, AVDevToAppMessageType @type, void* @data, ulong @data_size) => vectors.avdevice_dev_to_app_control_message(@s, @type, @data, @data_size); + + /// Convenient function to free result of avdevice_list_devices(). + public static void avdevice_free_list_devices(AVDeviceInfoList** @device_list) => vectors.avdevice_free_list_devices(@device_list); + + /// Return the libavdevice license. + public static string avdevice_license() => vectors.avdevice_license(); + + /// List devices. + /// device context. + /// list of autodetected devices. + /// count of autodetected devices, negative on error. + public static int avdevice_list_devices(AVFormatContext* @s, AVDeviceInfoList** @device_list) => vectors.avdevice_list_devices(@s, @device_list); + + /// List devices. + /// device format. May be NULL if device name is set. + /// device name. May be NULL if device format is set. + /// An AVDictionary filled with device-private options. May be NULL. The same options must be passed later to avformat_write_header() for output devices or avformat_open_input() for input devices, or at any other place that affects device-private options. + /// list of autodetected devices + /// count of autodetected devices, negative on error. + public static int avdevice_list_input_sources(AVInputFormat* @device, string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list) => vectors.avdevice_list_input_sources(@device, @device_name, @device_options, @device_list); + + public static int avdevice_list_output_sinks(AVOutputFormat* @device, string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list) => vectors.avdevice_list_output_sinks(@device, @device_name, @device_options, @device_list); + + /// Initialize libavdevice and register all the input and output devices. + public static void avdevice_register_all() => vectors.avdevice_register_all(); + + /// Return the LIBAVDEVICE_VERSION_INT constant. + public static uint avdevice_version() => vectors.avdevice_version(); + + /// Negotiate the media format, dimensions, etc of all inputs to a filter. + /// the filter to negotiate the properties for its inputs + /// zero on successful negotiation + public static int avfilter_config_links(AVFilterContext* @filter) => vectors.avfilter_config_links(@filter); + + /// Return the libavfilter build-time configuration. + public static string avfilter_configuration() => vectors.avfilter_configuration(); + + /// Get the number of elements in an AVFilter's inputs or outputs array. + public static uint avfilter_filter_pad_count(AVFilter* @filter, int @is_output) => vectors.avfilter_filter_pad_count(@filter, @is_output); + + /// Free a filter context. This will also remove the filter from its filtergraph's list of filters. + /// the filter to free + public static void avfilter_free(AVFilterContext* @filter) => vectors.avfilter_free(@filter); + + /// Get a filter definition matching the given name. + /// the filter name to find + /// the filter definition, if any matching one is registered. NULL if none found. + public static AVFilter* avfilter_get_by_name(string @name) => vectors.avfilter_get_by_name(@name); + + /// Returns AVClass for AVFilterContext. + /// AVClass for AVFilterContext. + public static AVClass* avfilter_get_class() => vectors.avfilter_get_class(); + + /// Allocate a filter graph. + /// the allocated filter graph on success or NULL. + public static AVFilterGraph* avfilter_graph_alloc() => vectors.avfilter_graph_alloc(); + + /// Create a new filter instance in a filter graph. + /// graph in which the new filter will be used + /// the filter to create an instance of + /// Name to give to the new instance (will be copied to AVFilterContext.name). This may be used by the caller to identify different filters, libavfilter itself assigns no semantics to this parameter. May be NULL. + /// the context of the newly created filter instance (note that it is also retrievable directly through AVFilterGraph.filters or with avfilter_graph_get_filter()) on success or NULL on failure. + public static AVFilterContext* avfilter_graph_alloc_filter(AVFilterGraph* @graph, AVFilter* @filter, string @name) => vectors.avfilter_graph_alloc_filter(@graph, @filter, @name); + + /// Check validity and configure all the links and formats in the graph. + /// the filter graph + /// context used for logging + /// >= 0 in case of success, a negative AVERROR code otherwise + public static int avfilter_graph_config(AVFilterGraph* @graphctx, void* @log_ctx) => vectors.avfilter_graph_config(@graphctx, @log_ctx); + + /// Create and add a filter instance into an existing graph. The filter instance is created from the filter filt and inited with the parameter args. opaque is currently ignored. + /// the instance name to give to the created filter instance + /// the filter graph + /// a negative AVERROR error code in case of failure, a non negative value otherwise + public static int avfilter_graph_create_filter(AVFilterContext** @filt_ctx, AVFilter* @filt, string @name, string @args, void* @opaque, AVFilterGraph* @graph_ctx) => vectors.avfilter_graph_create_filter(@filt_ctx, @filt, @name, @args, @opaque, @graph_ctx); + + /// Dump a graph into a human-readable string representation. + /// the graph to dump + /// formatting options; currently ignored + /// a string, or NULL in case of memory allocation failure; the string must be freed using av_free + public static byte* avfilter_graph_dump(AVFilterGraph* @graph, string @options) => vectors.avfilter_graph_dump(@graph, @options); + + /// Free a graph, destroy its links, and set *graph to NULL. If *graph is NULL, do nothing. + public static void avfilter_graph_free(AVFilterGraph** @graph) => vectors.avfilter_graph_free(@graph); + + /// Get a filter instance identified by instance name from graph. + /// filter graph to search through. + /// filter instance name (should be unique in the graph). + /// the pointer to the found filter instance or NULL if it cannot be found. + public static AVFilterContext* avfilter_graph_get_filter(AVFilterGraph* @graph, string @name) => vectors.avfilter_graph_get_filter(@graph, @name); + + /// Add a graph described by a string to a graph. + /// the filter graph where to link the parsed graph context + /// string to be parsed + /// linked list to the inputs of the graph + /// linked list to the outputs of the graph + /// zero on success, a negative AVERROR code on error + public static int avfilter_graph_parse(AVFilterGraph* @graph, string @filters, AVFilterInOut* @inputs, AVFilterInOut* @outputs, void* @log_ctx) => vectors.avfilter_graph_parse(@graph, @filters, @inputs, @outputs, @log_ctx); + + /// Add a graph described by a string to a graph. + /// the filter graph where to link the parsed graph context + /// string to be parsed + /// pointer to a linked list to the inputs of the graph, may be NULL. If non-NULL, *inputs is updated to contain the list of open inputs after the parsing, should be freed with avfilter_inout_free(). + /// pointer to a linked list to the outputs of the graph, may be NULL. If non-NULL, *outputs is updated to contain the list of open outputs after the parsing, should be freed with avfilter_inout_free(). + /// non negative on success, a negative AVERROR code on error + public static int avfilter_graph_parse_ptr(AVFilterGraph* @graph, string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs, void* @log_ctx) => vectors.avfilter_graph_parse_ptr(@graph, @filters, @inputs, @outputs, @log_ctx); + + /// Add a graph described by a string to a graph. + /// the filter graph where to link the parsed graph context + /// string to be parsed + /// a linked list of all free (unlinked) inputs of the parsed graph will be returned here. It is to be freed by the caller using avfilter_inout_free(). + /// a linked list of all free (unlinked) outputs of the parsed graph will be returned here. It is to be freed by the caller using avfilter_inout_free(). + /// zero on success, a negative AVERROR code on error + public static int avfilter_graph_parse2(AVFilterGraph* @graph, string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs) => vectors.avfilter_graph_parse2(@graph, @filters, @inputs, @outputs); + + /// Queue a command for one or more filter instances. + /// the filter graph + /// the filter(s) to which the command should be sent "all" sends to all filters otherwise it can be a filter or filter instance name which will send the command to all matching filters. + /// the command to sent, for handling simplicity all commands must be alphanumeric only + /// the argument for the command + /// time at which the command should be sent to the filter + public static int avfilter_graph_queue_command(AVFilterGraph* @graph, string @target, string @cmd, string @arg, int @flags, double @ts) => vectors.avfilter_graph_queue_command(@graph, @target, @cmd, @arg, @flags, @ts); + + /// Request a frame on the oldest sink link. + /// the return value of ff_request_frame(), or AVERROR_EOF if all links returned AVERROR_EOF + public static int avfilter_graph_request_oldest(AVFilterGraph* @graph) => vectors.avfilter_graph_request_oldest(@graph); + + /// Send a command to one or more filter instances. + /// the filter graph + /// the filter(s) to which the command should be sent "all" sends to all filters otherwise it can be a filter or filter instance name which will send the command to all matching filters. + /// the command to send, for handling simplicity all commands must be alphanumeric only + /// the argument for the command + /// a buffer with size res_size where the filter(s) can return a response. + public static int avfilter_graph_send_command(AVFilterGraph* @graph, string @target, string @cmd, string @arg, byte* @res, int @res_len, int @flags) => vectors.avfilter_graph_send_command(@graph, @target, @cmd, @arg, @res, @res_len, @flags); + + /// Enable or disable automatic format conversion inside the graph. + /// any of the AVFILTER_AUTO_CONVERT_* constants + public static void avfilter_graph_set_auto_convert(AVFilterGraph* @graph, uint @flags) => vectors.avfilter_graph_set_auto_convert(@graph, @flags); + + /// Initialize a filter with the supplied dictionary of options. + /// uninitialized filter context to initialize + /// An AVDictionary filled with options for this filter. On return this parameter will be destroyed and replaced with a dict containing options that were not found. This dictionary must be freed by the caller. May be NULL, then this function is equivalent to avfilter_init_str() with the second parameter set to NULL. + /// 0 on success, a negative AVERROR on failure + public static int avfilter_init_dict(AVFilterContext* @ctx, AVDictionary** @options) => vectors.avfilter_init_dict(@ctx, @options); + + /// Initialize a filter with the supplied parameters. + /// uninitialized filter context to initialize + /// Options to initialize the filter with. This must be a ':'-separated list of options in the 'key=value' form. May be NULL if the options have been set directly using the AVOptions API or there are no options that need to be set. + /// 0 on success, a negative AVERROR on failure + public static int avfilter_init_str(AVFilterContext* @ctx, string @args) => vectors.avfilter_init_str(@ctx, @args); + + /// Allocate a single AVFilterInOut entry. Must be freed with avfilter_inout_free(). + /// allocated AVFilterInOut on success, NULL on failure. + public static AVFilterInOut* avfilter_inout_alloc() => vectors.avfilter_inout_alloc(); + + /// Free the supplied list of AVFilterInOut and set *inout to NULL. If *inout is NULL, do nothing. + public static void avfilter_inout_free(AVFilterInOut** @inout) => vectors.avfilter_inout_free(@inout); + + /// Insert a filter in the middle of an existing link. + /// the link into which the filter should be inserted + /// the filter to be inserted + /// the input pad on the filter to connect + /// the output pad on the filter to connect + /// zero on success + public static int avfilter_insert_filter(AVFilterLink* @link, AVFilterContext* @filt, uint @filt_srcpad_idx, uint @filt_dstpad_idx) => vectors.avfilter_insert_filter(@link, @filt, @filt_srcpad_idx, @filt_dstpad_idx); + + /// Return the libavfilter license. + public static string avfilter_license() => vectors.avfilter_license(); + + /// Link two filters together. + /// the source filter + /// index of the output pad on the source filter + /// the destination filter + /// index of the input pad on the destination filter + /// zero on success + public static int avfilter_link(AVFilterContext* @src, uint @srcpad, AVFilterContext* @dst, uint @dstpad) => vectors.avfilter_link(@src, @srcpad, @dst, @dstpad); + + /// Free the link in *link, and set its pointer to NULL. + public static void avfilter_link_free(AVFilterLink** @link) => vectors.avfilter_link_free(@link); + + /// Get the number of elements in an AVFilter's inputs or outputs array. + [Obsolete("Use avfilter_filter_pad_count() instead.")] + public static int avfilter_pad_count(AVFilterPad* @pads) => vectors.avfilter_pad_count(@pads); + + /// Get the name of an AVFilterPad. + /// an array of AVFilterPads + /// index of the pad in the array; it is the caller's responsibility to ensure the index is valid + /// name of the pad_idx'th pad in pads + public static string avfilter_pad_get_name(AVFilterPad* @pads, int @pad_idx) => vectors.avfilter_pad_get_name(@pads, @pad_idx); + + /// Get the type of an AVFilterPad. + /// an array of AVFilterPads + /// index of the pad in the array; it is the caller's responsibility to ensure the index is valid + /// type of the pad_idx'th pad in pads + public static AVMediaType avfilter_pad_get_type(AVFilterPad* @pads, int @pad_idx) => vectors.avfilter_pad_get_type(@pads, @pad_idx); + + /// Make the filter instance process a command. It is recommended to use avfilter_graph_send_command(). + public static int avfilter_process_command(AVFilterContext* @filter, string @cmd, string @arg, byte* @res, int @res_len, int @flags) => vectors.avfilter_process_command(@filter, @cmd, @arg, @res, @res_len, @flags); + + /// Return the LIBAVFILTER_VERSION_INT constant. + public static uint avfilter_version() => vectors.avfilter_version(); + + /// Allocate an AVFormatContext. avformat_free_context() can be used to free the context and everything allocated by the framework within it. + public static AVFormatContext* avformat_alloc_context() => vectors.avformat_alloc_context(); + + /// Allocate an AVFormatContext for an output format. avformat_free_context() can be used to free the context and everything allocated by the framework within it. + /// format to use for allocating the context, if NULL format_name and filename are used instead + /// the name of output format to use for allocating the context, if NULL filename is used instead + /// the name of the filename to use for allocating the context, may be NULL + /// >= 0 in case of success, a negative AVERROR code in case of failure + public static int avformat_alloc_output_context2(AVFormatContext** @ctx, AVOutputFormat* @oformat, string @format_name, string @filename) => vectors.avformat_alloc_output_context2(@ctx, @oformat, @format_name, @filename); + + /// Close an opened input AVFormatContext. Free it and all its contents and set *s to NULL. + public static void avformat_close_input(AVFormatContext** @s) => vectors.avformat_close_input(@s); + + /// Return the libavformat build-time configuration. + public static string avformat_configuration() => vectors.avformat_configuration(); + + /// Read packets of a media file to get stream information. This is useful for file formats with no headers such as MPEG. This function also computes the real framerate in case of MPEG-2 repeat frame mode. The logical file position is not changed by this function; examined packets may be buffered for later processing. + /// media file handle + /// If non-NULL, an ic.nb_streams long array of pointers to dictionaries, where i-th member contains options for codec corresponding to i-th stream. On return each dictionary will be filled with options that were not found. + /// >=0 if OK, AVERROR_xxx on error + public static int avformat_find_stream_info(AVFormatContext* @ic, AVDictionary** @options) => vectors.avformat_find_stream_info(@ic, @options); + + /// Discard all internally buffered data. This can be useful when dealing with discontinuities in the byte stream. Generally works only with formats that can resync. This includes headerless formats like MPEG-TS/TS but should also work with NUT, Ogg and in a limited way AVI for example. + /// media file handle + /// >=0 on success, error code otherwise + public static int avformat_flush(AVFormatContext* @s) => vectors.avformat_flush(@s); + + /// Free an AVFormatContext and all its streams. + /// context to free + public static void avformat_free_context(AVFormatContext* @s) => vectors.avformat_free_context(@s); + + /// Get the AVClass for AVFormatContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + public static AVClass* avformat_get_class() => vectors.avformat_get_class(); + + /// Returns the table mapping MOV FourCCs for audio to AVCodecID. + /// the table mapping MOV FourCCs for audio to AVCodecID. + public static AVCodecTag* avformat_get_mov_audio_tags() => vectors.avformat_get_mov_audio_tags(); + + /// Returns the table mapping MOV FourCCs for video to libavcodec AVCodecID. + /// the table mapping MOV FourCCs for video to libavcodec AVCodecID. + public static AVCodecTag* avformat_get_mov_video_tags() => vectors.avformat_get_mov_video_tags(); + + /// Returns the table mapping RIFF FourCCs for audio to AVCodecID. + /// the table mapping RIFF FourCCs for audio to AVCodecID. + public static AVCodecTag* avformat_get_riff_audio_tags() => vectors.avformat_get_riff_audio_tags(); + + /// @{ Get the tables mapping RIFF FourCCs to libavcodec AVCodecIDs. The tables are meant to be passed to av_codec_get_id()/av_codec_get_tag() as in the following code: + /// the table mapping RIFF FourCCs for video to libavcodec AVCodecID. + public static AVCodecTag* avformat_get_riff_video_tags() => vectors.avformat_get_riff_video_tags(); + + /// Get the index entry count for the given AVStream. + /// stream + /// the number of index entries in the stream + public static int avformat_index_get_entries_count(AVStream* @st) => vectors.avformat_index_get_entries_count(@st); + + /// Get the AVIndexEntry corresponding to the given index. + /// Stream containing the requested AVIndexEntry. + /// The desired index. + /// A pointer to the requested AVIndexEntry if it exists, NULL otherwise. + public static AVIndexEntry* avformat_index_get_entry(AVStream* @st, int @idx) => vectors.avformat_index_get_entry(@st, @idx); + + /// Get the AVIndexEntry corresponding to the given timestamp. + /// Stream containing the requested AVIndexEntry. + /// If AVSEEK_FLAG_BACKWARD then the returned entry will correspond to the timestamp which is < = the requested one, if backward is 0, then it will be >= if AVSEEK_FLAG_ANY seek to any frame, only keyframes otherwise. + /// A pointer to the requested AVIndexEntry if it exists, NULL otherwise. + public static AVIndexEntry* avformat_index_get_entry_from_timestamp(AVStream* @st, long @wanted_timestamp, int @flags) => vectors.avformat_index_get_entry_from_timestamp(@st, @wanted_timestamp, @flags); + + /// Allocate the stream private data and initialize the codec, but do not write the header. May optionally be used before avformat_write_header to initialize stream parameters before actually writing the header. If using this function, do not pass the same options to avformat_write_header. + /// Media file handle, must be allocated with avformat_alloc_context(). Its oformat field must be set to the desired output format; Its pb field must be set to an already opened AVIOContext. + /// An AVDictionary filled with AVFormatContext and muxer-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. + /// AVSTREAM_INIT_IN_WRITE_HEADER on success if the codec requires avformat_write_header to fully initialize, AVSTREAM_INIT_IN_INIT_OUTPUT on success if the codec has been fully initialized, negative AVERROR on failure. + public static int avformat_init_output(AVFormatContext* @s, AVDictionary** @options) => vectors.avformat_init_output(@s, @options); + + /// Return the libavformat license. + public static string avformat_license() => vectors.avformat_license(); + + /// Check if the stream st contained in s is matched by the stream specifier spec. + /// >0 if st is matched by spec; 0 if st is not matched by spec; AVERROR code if spec is invalid + public static int avformat_match_stream_specifier(AVFormatContext* @s, AVStream* @st, string @spec) => vectors.avformat_match_stream_specifier(@s, @st, @spec); + + /// Undo the initialization done by avformat_network_init. Call it only once for each time you called avformat_network_init. + public static int avformat_network_deinit() => vectors.avformat_network_deinit(); + + /// Do global initialization of network libraries. This is optional, and not recommended anymore. + public static int avformat_network_init() => vectors.avformat_network_init(); + + /// Add a new stream to a media file. + /// media file handle + /// unused, does nothing + /// newly created stream or NULL on error. + public static AVStream* avformat_new_stream(AVFormatContext* @s, AVCodec* @c) => vectors.avformat_new_stream(@s, @c); + + /// Open an input stream and read the header. The codecs are not opened. The stream must be closed with avformat_close_input(). + /// Pointer to user-supplied AVFormatContext (allocated by avformat_alloc_context). May be a pointer to NULL, in which case an AVFormatContext is allocated by this function and written into ps. Note that a user-supplied AVFormatContext will be freed on failure. + /// URL of the stream to open. + /// If non-NULL, this parameter forces a specific input format. Otherwise the format is autodetected. + /// A dictionary filled with AVFormatContext and demuxer-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. + /// 0 on success, a negative AVERROR on failure. + public static int avformat_open_input(AVFormatContext** @ps, string @url, AVInputFormat* @fmt, AVDictionary** @options) => vectors.avformat_open_input(@ps, @url, @fmt, @options); + + /// Test if the given container can store a codec. + /// container to check for compatibility + /// codec to potentially store in container + /// standards compliance level, one of FF_COMPLIANCE_* + /// 1 if codec with ID codec_id can be stored in ofmt, 0 if it cannot. A negative number if this information is not available. + public static int avformat_query_codec(AVOutputFormat* @ofmt, AVCodecID @codec_id, int @std_compliance) => vectors.avformat_query_codec(@ofmt, @codec_id, @std_compliance); + + public static int avformat_queue_attached_pictures(AVFormatContext* @s) => vectors.avformat_queue_attached_pictures(@s); + + /// Seek to timestamp ts. Seeking will be done so that the point from which all active streams can be presented successfully will be closest to ts and within min/max_ts. Active streams are all streams that have AVStream.discard < AVDISCARD_ALL. + /// media file handle + /// index of the stream which is used as time base reference + /// smallest acceptable timestamp + /// target timestamp + /// largest acceptable timestamp + /// flags + /// >=0 on success, error code otherwise + public static int avformat_seek_file(AVFormatContext* @s, int @stream_index, long @min_ts, long @ts, long @max_ts, int @flags) => vectors.avformat_seek_file(@s, @stream_index, @min_ts, @ts, @max_ts, @flags); + + /// Transfer internal timing information from one stream to another. + /// target output format for ost + /// output stream which needs timings copy and adjustments + /// reference input stream to copy timings from + /// define from where the stream codec timebase needs to be imported + public static int avformat_transfer_internal_stream_timing_info(AVOutputFormat* @ofmt, AVStream* @ost, AVStream* @ist, AVTimebaseSource @copy_tb) => vectors.avformat_transfer_internal_stream_timing_info(@ofmt, @ost, @ist, @copy_tb); + + /// Return the LIBAVFORMAT_VERSION_INT constant. + public static uint avformat_version() => vectors.avformat_version(); + + /// Allocate the stream private data and write the stream header to an output media file. + /// Media file handle, must be allocated with avformat_alloc_context(). Its oformat field must be set to the desired output format; Its pb field must be set to an already opened AVIOContext. + /// An AVDictionary filled with AVFormatContext and muxer-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. + /// AVSTREAM_INIT_IN_WRITE_HEADER on success if the codec had not already been fully initialized in avformat_init, AVSTREAM_INIT_IN_INIT_OUTPUT on success if the codec had already been fully initialized in avformat_init, negative AVERROR on failure. + public static int avformat_write_header(AVFormatContext* @s, AVDictionary** @options) => vectors.avformat_write_header(@s, @options); + + /// Accept and allocate a client context on a server context. + /// the server context + /// the client context, must be unallocated + /// >= 0 on success or a negative value corresponding to an AVERROR on failure + public static int avio_accept(AVIOContext* @s, AVIOContext** @c) => vectors.avio_accept(@s, @c); + + /// Allocate and initialize an AVIOContext for buffered I/O. It must be later freed with avio_context_free(). + /// Memory block for input/output operations via AVIOContext. The buffer must be allocated with av_malloc() and friends. It may be freed and replaced with a new buffer by libavformat. AVIOContext.buffer holds the buffer currently in use, which must be later freed with av_free(). + /// The buffer size is very important for performance. For protocols with fixed blocksize it should be set to this blocksize. For others a typical size is a cache page, e.g. 4kb. + /// Set to 1 if the buffer should be writable, 0 otherwise. + /// An opaque pointer to user-specific data. + /// A function for refilling the buffer, may be NULL. For stream protocols, must never return 0 but rather a proper AVERROR code. + /// A function for writing the buffer contents, may be NULL. The function may not change the input buffers content. + /// A function for seeking to specified byte position, may be NULL. + /// Allocated AVIOContext or NULL on failure. + public static AVIOContext* avio_alloc_context(byte* @buffer, int @buffer_size, int @write_flag, void* @opaque, avio_alloc_context_read_packet_func @read_packet, avio_alloc_context_write_packet_func @write_packet, avio_alloc_context_seek_func @seek) => vectors.avio_alloc_context(@buffer, @buffer_size, @write_flag, @opaque, @read_packet, @write_packet, @seek); + + /// Return AVIO_FLAG_* access flags corresponding to the access permissions of the resource in url, or a negative value corresponding to an AVERROR code in case of failure. The returned access flags are masked by the value in flags. + public static int avio_check(string @url, int @flags) => vectors.avio_check(@url, @flags); + + /// Close the resource accessed by the AVIOContext s and free it. This function can only be used if s was opened by avio_open(). + /// 0 on success, an AVERROR < 0 on error. + public static int avio_close(AVIOContext* @s) => vectors.avio_close(@s); + + /// Close directory. + /// directory read context. + /// >=0 on success or negative on error. + public static int avio_close_dir(AVIODirContext** @s) => vectors.avio_close_dir(@s); + + /// Return the written size and a pointer to the buffer. The buffer must be freed with av_free(). Padding of AV_INPUT_BUFFER_PADDING_SIZE is added to the buffer. + /// IO context + /// pointer to a byte buffer + /// the length of the byte buffer + public static int avio_close_dyn_buf(AVIOContext* @s, byte** @pbuffer) => vectors.avio_close_dyn_buf(@s, @pbuffer); + + /// Close the resource accessed by the AVIOContext *s, free it and set the pointer pointing to it to NULL. This function can only be used if s was opened by avio_open(). + /// 0 on success, an AVERROR < 0 on error. + public static int avio_closep(AVIOContext** @s) => vectors.avio_closep(@s); + + /// Free the supplied IO context and everything associated with it. + /// Double pointer to the IO context. This function will write NULL into s. + public static void avio_context_free(AVIOContext** @s) => vectors.avio_context_free(@s); + + /// Iterate through names of available protocols. + /// A private pointer representing current protocol. It must be a pointer to NULL on first iteration and will be updated by successive calls to avio_enum_protocols. + /// If set to 1, iterate over output protocols, otherwise over input protocols. + /// A static string containing the name of current protocol or NULL + public static string avio_enum_protocols(void** @opaque, int @output) => vectors.avio_enum_protocols(@opaque, @output); + + /// Similar to feof() but also returns nonzero on read errors. + /// non zero if and only if at end of file or a read error happened when reading. + public static int avio_feof(AVIOContext* @s) => vectors.avio_feof(@s); + + /// Return the name of the protocol that will handle the passed URL. + /// Name of the protocol or NULL. + public static string avio_find_protocol_name(string @url) => vectors.avio_find_protocol_name(@url); + + /// Force flushing of buffered data. + public static void avio_flush(AVIOContext* @s) => vectors.avio_flush(@s); + + /// Free entry allocated by avio_read_dir(). + /// entry to be freed. + public static void avio_free_directory_entry(AVIODirEntry** @entry) => vectors.avio_free_directory_entry(@entry); + + /// Return the written size and a pointer to the buffer. The AVIOContext stream is left intact. The buffer must NOT be freed. No padding is added to the buffer. + /// IO context + /// pointer to a byte buffer + /// the length of the byte buffer + public static int avio_get_dyn_buf(AVIOContext* @s, byte** @pbuffer) => vectors.avio_get_dyn_buf(@s, @pbuffer); + + /// Read a string from pb into buf. The reading will terminate when either a NULL character was encountered, maxlen bytes have been read, or nothing more can be read from pb. The result is guaranteed to be NULL-terminated, it will be truncated if buf is too small. Note that the string is not interpreted or validated in any way, it might get truncated in the middle of a sequence for multi-byte encodings. + /// number of bytes read (is always < = maxlen). If reading ends on EOF or error, the return value will be one more than bytes actually read. + public static int avio_get_str(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen) => vectors.avio_get_str(@pb, @maxlen, @buf, @buflen); + + public static int avio_get_str16be(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen) => vectors.avio_get_str16be(@pb, @maxlen, @buf, @buflen); + + /// Read a UTF-16 string from pb and convert it to UTF-8. The reading will terminate when either a null or invalid character was encountered or maxlen bytes have been read. + /// number of bytes read (is always < = maxlen) + public static int avio_get_str16le(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen) => vectors.avio_get_str16le(@pb, @maxlen, @buf, @buflen); + + /// Perform one step of the protocol handshake to accept a new client. This function must be called on a client returned by avio_accept() before using it as a read/write context. It is separate from avio_accept() because it may block. A step of the handshake is defined by places where the application may decide to change the proceedings. For example, on a protocol with a request header and a reply header, each one can constitute a step because the application may use the parameters from the request to change parameters in the reply; or each individual chunk of the request can constitute a step. If the handshake is already finished, avio_handshake() does nothing and returns 0 immediately. + /// the client context to perform the handshake on + /// 0 on a complete and successful handshake > 0 if the handshake progressed, but is not complete < 0 for an AVERROR code + public static int avio_handshake(AVIOContext* @c) => vectors.avio_handshake(@c); + + /// Create and initialize a AVIOContext for accessing the resource indicated by url. + /// Used to return the pointer to the created AVIOContext. In case of failure the pointed to value is set to NULL. + /// resource to access + /// flags which control how the resource indicated by url is to be opened + /// >= 0 in case of success, a negative value corresponding to an AVERROR code in case of failure + public static int avio_open(AVIOContext** @s, string @url, int @flags) => vectors.avio_open(@s, @url, @flags); + + /// Open directory for reading. + /// directory read context. Pointer to a NULL pointer must be passed. + /// directory to be listed. + /// A dictionary filled with protocol-private options. On return this parameter will be destroyed and replaced with a dictionary containing options that were not found. May be NULL. + /// >=0 on success or negative on error. + public static int avio_open_dir(AVIODirContext** @s, string @url, AVDictionary** @options) => vectors.avio_open_dir(@s, @url, @options); + + /// Open a write only memory stream. + /// new IO context + /// zero if no error. + public static int avio_open_dyn_buf(AVIOContext** @s) => vectors.avio_open_dyn_buf(@s); + + /// Create and initialize a AVIOContext for accessing the resource indicated by url. + /// Used to return the pointer to the created AVIOContext. In case of failure the pointed to value is set to NULL. + /// resource to access + /// flags which control how the resource indicated by url is to be opened + /// an interrupt callback to be used at the protocols level + /// A dictionary filled with protocol-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. + /// >= 0 in case of success, a negative value corresponding to an AVERROR code in case of failure + public static int avio_open2(AVIOContext** @s, string @url, int @flags, AVIOInterruptCB* @int_cb, AVDictionary** @options) => vectors.avio_open2(@s, @url, @flags, @int_cb, @options); + + /// Pause and resume playing - only meaningful if using a network streaming protocol (e.g. MMS). + /// IO context from which to call the read_pause function pointer + /// 1 for pause, 0 for resume + public static int avio_pause(AVIOContext* @h, int @pause) => vectors.avio_pause(@h, @pause); + + /// Write a NULL terminated array of strings to the context. Usually you don't need to use this function directly but its macro wrapper, avio_print. + public static void avio_print_string_array(AVIOContext* @s, byte*[] @strings) => vectors.avio_print_string_array(@s, @strings); + + /// Writes a formatted string to the context. + /// number of bytes written, < 0 on error. + public static int avio_printf(AVIOContext* @s, string @fmt) => vectors.avio_printf(@s, @fmt); + + /// Get AVClass by names of available protocols. + /// A AVClass of input protocol name or NULL + public static AVClass* avio_protocol_get_class(string @name) => vectors.avio_protocol_get_class(@name); + + /// Write a NULL-terminated string. + /// number of bytes written. + public static int avio_put_str(AVIOContext* @s, string @str) => vectors.avio_put_str(@s, @str); + + /// Convert an UTF-8 string to UTF-16BE and write it. + /// the AVIOContext + /// NULL-terminated UTF-8 string + /// number of bytes written. + public static int avio_put_str16be(AVIOContext* @s, string @str) => vectors.avio_put_str16be(@s, @str); + + /// Convert an UTF-8 string to UTF-16LE and write it. + /// the AVIOContext + /// NULL-terminated UTF-8 string + /// number of bytes written. + public static int avio_put_str16le(AVIOContext* @s, string @str) => vectors.avio_put_str16le(@s, @str); + + /// @{ + public static int avio_r8(AVIOContext* @s) => vectors.avio_r8(@s); + + public static uint avio_rb16(AVIOContext* @s) => vectors.avio_rb16(@s); + + public static uint avio_rb24(AVIOContext* @s) => vectors.avio_rb24(@s); + + public static uint avio_rb32(AVIOContext* @s) => vectors.avio_rb32(@s); + + public static ulong avio_rb64(AVIOContext* @s) => vectors.avio_rb64(@s); + + /// Read size bytes from AVIOContext into buf. + /// number of bytes read or AVERROR + public static int avio_read(AVIOContext* @s, byte* @buf, int @size) => vectors.avio_read(@s, @buf, @size); + + /// Get next directory entry. + /// directory read context. + /// next entry or NULL when no more entries. + /// >=0 on success or negative on error. End of list is not considered an error. + public static int avio_read_dir(AVIODirContext* @s, AVIODirEntry** @next) => vectors.avio_read_dir(@s, @next); + + /// Read size bytes from AVIOContext into buf. Unlike avio_read(), this is allowed to read fewer bytes than requested. The missing bytes can be read in the next call. This always tries to read at least 1 byte. Useful to reduce latency in certain cases. + /// number of bytes read or AVERROR + public static int avio_read_partial(AVIOContext* @s, byte* @buf, int @size) => vectors.avio_read_partial(@s, @buf, @size); + + /// Read contents of h into print buffer, up to max_size bytes, or up to EOF. + /// 0 for success (max_size bytes read or EOF reached), negative error code otherwise + public static int avio_read_to_bprint(AVIOContext* @h, AVBPrint* @pb, ulong @max_size) => vectors.avio_read_to_bprint(@h, @pb, @max_size); + + public static uint avio_rl16(AVIOContext* @s) => vectors.avio_rl16(@s); + + public static uint avio_rl24(AVIOContext* @s) => vectors.avio_rl24(@s); + + public static uint avio_rl32(AVIOContext* @s) => vectors.avio_rl32(@s); + + public static ulong avio_rl64(AVIOContext* @s) => vectors.avio_rl64(@s); + + /// fseek() equivalent for AVIOContext. + /// new position or AVERROR. + public static long avio_seek(AVIOContext* @s, long @offset, int @whence) => vectors.avio_seek(@s, @offset, @whence); + + /// Seek to a given timestamp relative to some component stream. Only meaningful if using a network streaming protocol (e.g. MMS.). + /// IO context from which to call the seek function pointers + /// The stream index that the timestamp is relative to. If stream_index is (-1) the timestamp should be in AV_TIME_BASE units from the beginning of the presentation. If a stream_index >= 0 is used and the protocol does not support seeking based on component streams, the call will fail. + /// timestamp in AVStream.time_base units or if there is no stream specified then in AV_TIME_BASE units. + /// Optional combination of AVSEEK_FLAG_BACKWARD, AVSEEK_FLAG_BYTE and AVSEEK_FLAG_ANY. The protocol may silently ignore AVSEEK_FLAG_BACKWARD and AVSEEK_FLAG_ANY, but AVSEEK_FLAG_BYTE will fail if used and not supported. + /// >= 0 on success + public static long avio_seek_time(AVIOContext* @h, int @stream_index, long @timestamp, int @flags) => vectors.avio_seek_time(@h, @stream_index, @timestamp, @flags); + + /// Get the filesize. + /// filesize or AVERROR + public static long avio_size(AVIOContext* @s) => vectors.avio_size(@s); + + /// Skip given number of bytes forward + /// new position or AVERROR. + public static long avio_skip(AVIOContext* @s, long @offset) => vectors.avio_skip(@s, @offset); + + /// Writes a formatted string to the context taking a va_list. + /// number of bytes written, < 0 on error. + public static int avio_vprintf(AVIOContext* @s, string @fmt, byte* @ap) => vectors.avio_vprintf(@s, @fmt, @ap); + + public static void avio_w8(AVIOContext* @s, int @b) => vectors.avio_w8(@s, @b); + + public static void avio_wb16(AVIOContext* @s, uint @val) => vectors.avio_wb16(@s, @val); + + public static void avio_wb24(AVIOContext* @s, uint @val) => vectors.avio_wb24(@s, @val); + + public static void avio_wb32(AVIOContext* @s, uint @val) => vectors.avio_wb32(@s, @val); + + public static void avio_wb64(AVIOContext* @s, ulong @val) => vectors.avio_wb64(@s, @val); + + public static void avio_wl16(AVIOContext* @s, uint @val) => vectors.avio_wl16(@s, @val); + + public static void avio_wl24(AVIOContext* @s, uint @val) => vectors.avio_wl24(@s, @val); + + public static void avio_wl32(AVIOContext* @s, uint @val) => vectors.avio_wl32(@s, @val); + + public static void avio_wl64(AVIOContext* @s, ulong @val) => vectors.avio_wl64(@s, @val); + + public static void avio_write(AVIOContext* @s, byte* @buf, int @size) => vectors.avio_write(@s, @buf, @size); + + /// Mark the written bytestream as a specific type. + /// the stream time the current bytestream pos corresponds to (in AV_TIME_BASE units), or AV_NOPTS_VALUE if unknown or not applicable + /// the kind of data written starting at the current pos + public static void avio_write_marker(AVIOContext* @s, long @time, AVIODataMarkerType @type) => vectors.avio_write_marker(@s, @time, @type); + + /// Free all allocated data in the given subtitle struct. + /// AVSubtitle to free. + public static void avsubtitle_free(AVSubtitle* @sub) => vectors.avsubtitle_free(@sub); + + /// Return the libavutil build-time configuration. + public static string avutil_configuration() => vectors.avutil_configuration(); + + /// Return the libavutil license. + public static string avutil_license() => vectors.avutil_license(); + + /// Return the LIBAVUTIL_VERSION_INT constant. + public static uint avutil_version() => vectors.avutil_version(); + + /// Return the libpostproc build-time configuration. + public static string postproc_configuration() => vectors.postproc_configuration(); + + /// Return the libpostproc license. + public static string postproc_license() => vectors.postproc_license(); + + /// Return the LIBPOSTPROC_VERSION_INT constant. + public static uint postproc_version() => vectors.postproc_version(); + + public static void pp_free_context(void* @ppContext) => vectors.pp_free_context(@ppContext); + + public static void pp_free_mode(void* @mode) => vectors.pp_free_mode(@mode); + + public static void* pp_get_context(int @width, int @height, int @flags) => vectors.pp_get_context(@width, @height, @flags); + + /// Return a pp_mode or NULL if an error occurred. + /// the string after "-pp" on the command line + /// a number from 0 to PP_QUALITY_MAX + public static void* pp_get_mode_by_name_and_quality(string @name, int @quality) => vectors.pp_get_mode_by_name_and_quality(@name, @quality); + + public static void pp_postprocess(in byte_ptr3 @src, in int3 @srcStride, ref byte_ptr3 @dst, in int3 @dstStride, int @horizontalSize, int @verticalSize, sbyte* @QP_store, int @QP_stride, void* @mode, void* @ppContext, int @pict_type) => vectors.pp_postprocess(@src, @srcStride, ref @dst, @dstStride, @horizontalSize, @verticalSize, @QP_store, @QP_stride, @mode, @ppContext, @pict_type); + + /// Allocate SwrContext. + /// NULL on error, allocated context otherwise + public static SwrContext* swr_alloc() => vectors.swr_alloc(); + + /// Allocate SwrContext if needed and set/reset common parameters. + /// existing Swr context if available, or NULL if not + /// output channel layout (AV_CH_LAYOUT_*) + /// output sample format (AV_SAMPLE_FMT_*). + /// output sample rate (frequency in Hz) + /// input channel layout (AV_CH_LAYOUT_*) + /// input sample format (AV_SAMPLE_FMT_*). + /// input sample rate (frequency in Hz) + /// logging level offset + /// parent logging context, can be NULL + /// NULL on error, allocated context otherwise + [Obsolete("use ")] + public static SwrContext* swr_alloc_set_opts(SwrContext* @s, long @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, long @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx) => vectors.swr_alloc_set_opts(@s, @out_ch_layout, @out_sample_fmt, @out_sample_rate, @in_ch_layout, @in_sample_fmt, @in_sample_rate, @log_offset, @log_ctx); + + /// Allocate SwrContext if needed and set/reset common parameters. + /// Pointer to an existing Swr context if available, or to NULL if not. On success, *ps will be set to the allocated context. + /// output channel layout (e.g. AV_CHANNEL_LAYOUT_*) + /// output sample format (AV_SAMPLE_FMT_*). + /// output sample rate (frequency in Hz) + /// input channel layout (e.g. AV_CHANNEL_LAYOUT_*) + /// input sample format (AV_SAMPLE_FMT_*). + /// input sample rate (frequency in Hz) + /// logging level offset + /// parent logging context, can be NULL + /// 0 on success, a negative AVERROR code on error. On error, the Swr context is freed and *ps set to NULL. + public static int swr_alloc_set_opts2(SwrContext** @ps, AVChannelLayout* @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, AVChannelLayout* @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx) => vectors.swr_alloc_set_opts2(@ps, @out_ch_layout, @out_sample_fmt, @out_sample_rate, @in_ch_layout, @in_sample_fmt, @in_sample_rate, @log_offset, @log_ctx); + + /// Generate a channel mixing matrix. + /// input channel layout + /// output channel layout + /// mix level for the center channel + /// mix level for the surround channel(s) + /// mix level for the low-frequency effects channel + /// if 1.0, coefficients will be normalized to prevent overflow. if INT_MAX, coefficients will not be normalized. + /// mixing coefficients; matrix[i + stride * o] is the weight of input channel i in output channel o. + /// distance between adjacent input channels in the matrix array + /// matrixed stereo downmix mode (e.g. dplii) + /// parent logging context, can be NULL + /// 0 on success, negative AVERROR code on failure + [Obsolete("use ")] + public static int swr_build_matrix(ulong @in_layout, ulong @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @rematrix_maxval, double @rematrix_volume, double* @matrix, int @stride, AVMatrixEncoding @matrix_encoding, void* @log_ctx) => vectors.swr_build_matrix(@in_layout, @out_layout, @center_mix_level, @surround_mix_level, @lfe_mix_level, @rematrix_maxval, @rematrix_volume, @matrix, @stride, @matrix_encoding, @log_ctx); + + /// Generate a channel mixing matrix. + /// input channel layout + /// output channel layout + /// mix level for the center channel + /// mix level for the surround channel(s) + /// mix level for the low-frequency effects channel + /// mixing coefficients; matrix[i + stride * o] is the weight of input channel i in output channel o. + /// distance between adjacent input channels in the matrix array + /// matrixed stereo downmix mode (e.g. dplii) + /// 0 on success, negative AVERROR code on failure + public static int swr_build_matrix2(AVChannelLayout* @in_layout, AVChannelLayout* @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @maxval, double @rematrix_volume, double* @matrix, long @stride, AVMatrixEncoding @matrix_encoding, void* @log_context) => vectors.swr_build_matrix2(@in_layout, @out_layout, @center_mix_level, @surround_mix_level, @lfe_mix_level, @maxval, @rematrix_volume, @matrix, @stride, @matrix_encoding, @log_context); + + /// Closes the context so that swr_is_initialized() returns 0. + /// Swr context to be closed + public static void swr_close(SwrContext* @s) => vectors.swr_close(@s); + + /// Configure or reconfigure the SwrContext using the information provided by the AVFrames. + /// audio resample context + /// 0 on success, AVERROR on failure. + public static int swr_config_frame(SwrContext* @swr, AVFrame* @out, AVFrame* @in) => vectors.swr_config_frame(@swr, @out, @in); + + /// Convert audio. + /// allocated Swr context, with parameters set + /// output buffers, only the first one need be set in case of packed audio + /// amount of space available for output in samples per channel + /// input buffers, only the first one need to be set in case of packed audio + /// number of input samples available in one channel + /// number of samples output per channel, negative value on error + public static int swr_convert(SwrContext* @s, byte** @out, int @out_count, byte** @in, int @in_count) => vectors.swr_convert(@s, @out, @out_count, @in, @in_count); + + /// Convert the samples in the input AVFrame and write them to the output AVFrame. + /// audio resample context + /// output AVFrame + /// input AVFrame + /// 0 on success, AVERROR on failure or nonmatching configuration. + public static int swr_convert_frame(SwrContext* @swr, AVFrame* @output, AVFrame* @input) => vectors.swr_convert_frame(@swr, @output, @input); + + /// Drops the specified number of output samples. + /// allocated Swr context + /// number of samples to be dropped + /// >= 0 on success, or a negative AVERROR code on failure + public static int swr_drop_output(SwrContext* @s, int @count) => vectors.swr_drop_output(@s, @count); + + /// Free the given SwrContext and set the pointer to NULL. + /// a pointer to a pointer to Swr context + public static void swr_free(SwrContext** @s) => vectors.swr_free(@s); + + /// Get the AVClass for SwrContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + /// the AVClass of SwrContext + public static AVClass* swr_get_class() => vectors.swr_get_class(); + + /// Gets the delay the next input sample will experience relative to the next output sample. + /// swr context + /// timebase in which the returned delay will be: + public static long swr_get_delay(SwrContext* @s, long @base) => vectors.swr_get_delay(@s, @base); + + /// Find an upper bound on the number of samples that the next swr_convert call will output, if called with in_samples of input samples. This depends on the internal state, and anything changing the internal state (like further swr_convert() calls) will may change the number of samples swr_get_out_samples() returns for the same number of input samples. + /// number of input samples. + public static int swr_get_out_samples(SwrContext* @s, int @in_samples) => vectors.swr_get_out_samples(@s, @in_samples); + + /// Initialize context after user parameters have been set. + /// Swr context to initialize + /// AVERROR error code in case of failure. + public static int swr_init(SwrContext* @s) => vectors.swr_init(@s); + + /// Injects the specified number of silence samples. + /// allocated Swr context + /// number of samples to be dropped + /// >= 0 on success, or a negative AVERROR code on failure + public static int swr_inject_silence(SwrContext* @s, int @count) => vectors.swr_inject_silence(@s, @count); + + /// Check whether an swr context has been initialized or not. + /// Swr context to check + /// positive if it has been initialized, 0 if not initialized + public static int swr_is_initialized(SwrContext* @s) => vectors.swr_is_initialized(@s); + + /// Convert the next timestamp from input to output timestamps are in 1/(in_sample_rate * out_sample_rate) units. + /// the output timestamp for the next output sample + public static long swr_next_pts(SwrContext* @s, long @pts) => vectors.swr_next_pts(@s, @pts); + + /// Set a customized input channel mapping. + /// allocated Swr context, not yet initialized + /// customized input channel mapping (array of channel indexes, -1 for a muted channel) + /// >= 0 on success, or AVERROR error code in case of failure. + public static int swr_set_channel_mapping(SwrContext* @s, int* @channel_map) => vectors.swr_set_channel_mapping(@s, @channel_map); + + /// Activate resampling compensation ("soft" compensation). This function is internally called when needed in swr_next_pts(). + /// allocated Swr context. If it is not initialized, or SWR_FLAG_RESAMPLE is not set, swr_init() is called with the flag set. + /// delta in PTS per sample + /// number of samples to compensate for + /// >= 0 on success, AVERROR error codes if: + public static int swr_set_compensation(SwrContext* @s, int @sample_delta, int @compensation_distance) => vectors.swr_set_compensation(@s, @sample_delta, @compensation_distance); + + /// Set a customized remix matrix. + /// allocated Swr context, not yet initialized + /// remix coefficients; matrix[i + stride * o] is the weight of input channel i in output channel o + /// offset between lines of the matrix + /// >= 0 on success, or AVERROR error code in case of failure. + public static int swr_set_matrix(SwrContext* @s, double* @matrix, int @stride) => vectors.swr_set_matrix(@s, @matrix, @stride); + + /// Return the swr build-time configuration. + public static string swresample_configuration() => vectors.swresample_configuration(); + + /// Return the swr license. + public static string swresample_license() => vectors.swresample_license(); + + /// Return the LIBSWRESAMPLE_VERSION_INT constant. + public static uint swresample_version() => vectors.swresample_version(); + + /// Allocate an empty SwsContext. This must be filled and passed to sws_init_context(). For filling see AVOptions, options.c and sws_setColorspaceDetails(). + public static SwsContext* sws_alloc_context() => vectors.sws_alloc_context(); + + /// Allocate and return an uninitialized vector with length coefficients. + public static SwsVector* sws_allocVec(int @length) => vectors.sws_allocVec(@length); + + /// Convert an 8-bit paletted frame into a frame with a color depth of 24 bits. + /// source frame buffer + /// destination frame buffer + /// number of pixels to convert + /// array with [256] entries, which must match color arrangement (RGB or BGR) of src + public static void sws_convertPalette8ToPacked24(byte* @src, byte* @dst, int @num_pixels, byte* @palette) => vectors.sws_convertPalette8ToPacked24(@src, @dst, @num_pixels, @palette); + + /// Convert an 8-bit paletted frame into a frame with a color depth of 32 bits. + /// source frame buffer + /// destination frame buffer + /// number of pixels to convert + /// array with [256] entries, which must match color arrangement (RGB or BGR) of src + public static void sws_convertPalette8ToPacked32(byte* @src, byte* @dst, int @num_pixels, byte* @palette) => vectors.sws_convertPalette8ToPacked32(@src, @dst, @num_pixels, @palette); + + /// Finish the scaling process for a pair of source/destination frames previously submitted with sws_frame_start(). Must be called after all sws_send_slice() and sws_receive_slice() calls are done, before any new sws_frame_start() calls. + public static void sws_frame_end(SwsContext* @c) => vectors.sws_frame_end(@c); + + /// Initialize the scaling process for a given pair of source/destination frames. Must be called before any calls to sws_send_slice() and sws_receive_slice(). + /// The destination frame. + /// The source frame. The data buffers must be allocated, but the frame data does not have to be ready at this point. Data availability is then signalled by sws_send_slice(). + /// 0 on success, a negative AVERROR code on failure + public static int sws_frame_start(SwsContext* @c, AVFrame* @dst, AVFrame* @src) => vectors.sws_frame_start(@c, @dst, @src); + + /// Free the swscaler context swsContext. If swsContext is NULL, then does nothing. + public static void sws_freeContext(SwsContext* @swsContext) => vectors.sws_freeContext(@swsContext); + + public static void sws_freeFilter(SwsFilter* @filter) => vectors.sws_freeFilter(@filter); + + public static void sws_freeVec(SwsVector* @a) => vectors.sws_freeVec(@a); + + /// Get the AVClass for swsContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + public static AVClass* sws_get_class() => vectors.sws_get_class(); + + /// Check if context can be reused, otherwise reallocate a new one. + public static SwsContext* sws_getCachedContext(SwsContext* @context, int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param) => vectors.sws_getCachedContext(@context, @srcW, @srcH, @srcFormat, @dstW, @dstH, @dstFormat, @flags, @srcFilter, @dstFilter, @param); + + /// Return a pointer to yuv<->rgb coefficients for the given colorspace suitable for sws_setColorspaceDetails(). + /// One of the SWS_CS_* macros. If invalid, SWS_CS_DEFAULT is used. + public static int* sws_getCoefficients(int @colorspace) => vectors.sws_getCoefficients(@colorspace); + + /// #if LIBSWSCALE_VERSION_MAJOR > 6 + /// negative error code on error, non negative otherwise #else + public static int sws_getColorspaceDetails(SwsContext* @c, int** @inv_table, int* @srcRange, int** @table, int* @dstRange, int* @brightness, int* @contrast, int* @saturation) => vectors.sws_getColorspaceDetails(@c, @inv_table, @srcRange, @table, @dstRange, @brightness, @contrast, @saturation); + + /// Allocate and return an SwsContext. You need it to perform scaling/conversion operations using sws_scale(). + /// the width of the source image + /// the height of the source image + /// the source image format + /// the width of the destination image + /// the height of the destination image + /// the destination image format + /// specify which algorithm and options to use for rescaling + /// extra parameters to tune the used scaler For SWS_BICUBIC param[0] and [1] tune the shape of the basis function, param[0] tunes f(1) and param[1] f´(1) For SWS_GAUSS param[0] tunes the exponent and thus cutoff frequency For SWS_LANCZOS param[0] tunes the width of the window function + /// a pointer to an allocated context, or NULL in case of error + public static SwsContext* sws_getContext(int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param) => vectors.sws_getContext(@srcW, @srcH, @srcFormat, @dstW, @dstH, @dstFormat, @flags, @srcFilter, @dstFilter, @param); + + public static SwsFilter* sws_getDefaultFilter(float @lumaGBlur, float @chromaGBlur, float @lumaSharpen, float @chromaSharpen, float @chromaHShift, float @chromaVShift, int @verbose) => vectors.sws_getDefaultFilter(@lumaGBlur, @chromaGBlur, @lumaSharpen, @chromaSharpen, @chromaHShift, @chromaVShift, @verbose); + + /// Return a normalized Gaussian curve used to filter stuff quality = 3 is high quality, lower is lower quality. + public static SwsVector* sws_getGaussianVec(double @variance, double @quality) => vectors.sws_getGaussianVec(@variance, @quality); + + /// Initialize the swscaler context sws_context. + /// zero or positive value on success, a negative value on error + public static int sws_init_context(SwsContext* @sws_context, SwsFilter* @srcFilter, SwsFilter* @dstFilter) => vectors.sws_init_context(@sws_context, @srcFilter, @dstFilter); + + /// Returns a positive value if an endianness conversion for pix_fmt is supported, 0 otherwise. + /// the pixel format + /// a positive value if an endianness conversion for pix_fmt is supported, 0 otherwise. + public static int sws_isSupportedEndiannessConversion(AVPixelFormat @pix_fmt) => vectors.sws_isSupportedEndiannessConversion(@pix_fmt); + + /// Return a positive value if pix_fmt is a supported input format, 0 otherwise. + public static int sws_isSupportedInput(AVPixelFormat @pix_fmt) => vectors.sws_isSupportedInput(@pix_fmt); + + /// Return a positive value if pix_fmt is a supported output format, 0 otherwise. + public static int sws_isSupportedOutput(AVPixelFormat @pix_fmt) => vectors.sws_isSupportedOutput(@pix_fmt); + + /// Scale all the coefficients of a so that their sum equals height. + public static void sws_normalizeVec(SwsVector* @a, double @height) => vectors.sws_normalizeVec(@a, @height); + + /// Request a horizontal slice of the output data to be written into the frame previously provided to sws_frame_start(). + /// first row of the slice; must be a multiple of sws_receive_slice_alignment() + /// number of rows in the slice; must be a multiple of sws_receive_slice_alignment(), except for the last slice (i.e. when slice_start+slice_height is equal to output frame height) + /// a non-negative number if the data was successfully written into the output AVERROR(EAGAIN) if more input data needs to be provided before the output can be produced another negative AVERROR code on other kinds of scaling failure + public static int sws_receive_slice(SwsContext* @c, uint @slice_start, uint @slice_height) => vectors.sws_receive_slice(@c, @slice_start, @slice_height); + + /// Returns alignment required for output slices requested with sws_receive_slice(). Slice offsets and sizes passed to sws_receive_slice() must be multiples of the value returned from this function. + /// alignment required for output slices requested with sws_receive_slice(). Slice offsets and sizes passed to sws_receive_slice() must be multiples of the value returned from this function. + public static uint sws_receive_slice_alignment(SwsContext* @c) => vectors.sws_receive_slice_alignment(@c); + + /// Scale the image slice in srcSlice and put the resulting scaled slice in the image in dst. A slice is a sequence of consecutive rows in an image. + /// the scaling context previously created with sws_getContext() + /// the array containing the pointers to the planes of the source slice + /// the array containing the strides for each plane of the source image + /// the position in the source image of the slice to process, that is the number (counted starting from zero) in the image of the first row of the slice + /// the height of the source slice, that is the number of rows in the slice + /// the array containing the pointers to the planes of the destination image + /// the array containing the strides for each plane of the destination image + /// the height of the output slice + public static int sws_scale(SwsContext* @c, byte*[] @srcSlice, int[] @srcStride, int @srcSliceY, int @srcSliceH, byte*[] @dst, int[] @dstStride) => vectors.sws_scale(@c, @srcSlice, @srcStride, @srcSliceY, @srcSliceH, @dst, @dstStride); + + /// Scale source data from src and write the output to dst. + /// The destination frame. See documentation for sws_frame_start() for more details. + /// The source frame. + /// 0 on success, a negative AVERROR code on failure + public static int sws_scale_frame(SwsContext* @c, AVFrame* @dst, AVFrame* @src) => vectors.sws_scale_frame(@c, @dst, @src); + + /// Scale all the coefficients of a by the scalar value. + public static void sws_scaleVec(SwsVector* @a, double @scalar) => vectors.sws_scaleVec(@a, @scalar); + + /// Indicate that a horizontal slice of input data is available in the source frame previously provided to sws_frame_start(). The slices may be provided in any order, but may not overlap. For vertically subsampled pixel formats, the slices must be aligned according to subsampling. + /// first row of the slice + /// number of rows in the slice + /// a non-negative number on success, a negative AVERROR code on failure. + public static int sws_send_slice(SwsContext* @c, uint @slice_start, uint @slice_height) => vectors.sws_send_slice(@c, @slice_start, @slice_height); + + /// Returns negative error code on error, non negative otherwise #else Returns -1 if not supported #endif + /// the yuv2rgb coefficients describing the input yuv space, normally ff_yuv2rgb_coeffs[x] + /// flag indicating the while-black range of the input (1=jpeg / 0=mpeg) + /// the yuv2rgb coefficients describing the output yuv space, normally ff_yuv2rgb_coeffs[x] + /// flag indicating the while-black range of the output (1=jpeg / 0=mpeg) + /// 16.16 fixed point brightness correction + /// 16.16 fixed point contrast correction + /// 16.16 fixed point saturation correction #if LIBSWSCALE_VERSION_MAJOR > 6 + /// negative error code on error, non negative otherwise #else + public static int sws_setColorspaceDetails(SwsContext* @c, in int4 @inv_table, int @srcRange, in int4 @table, int @dstRange, int @brightness, int @contrast, int @saturation) => vectors.sws_setColorspaceDetails(@c, @inv_table, @srcRange, @table, @dstRange, @brightness, @contrast, @saturation); + + /// Return the libswscale build-time configuration. + public static string swscale_configuration() => vectors.swscale_configuration(); + + /// Return the libswscale license. + public static string swscale_license() => vectors.swscale_license(); + + /// Color conversion and scaling library. + public static uint swscale_version() => vectors.swscale_version(); + +} diff --git a/FFmpeg.AutoGen.Abstractions/generated/ffmpeg.functions.inline.g.cs b/FFmpeg.AutoGen.Abstractions/generated/ffmpeg.functions.inline.g.cs new file mode 100644 index 00000000..59838415 --- /dev/null +++ b/FFmpeg.AutoGen.Abstractions/generated/ffmpeg.functions.inline.g.cs @@ -0,0 +1,368 @@ +using System; + +namespace FFmpeg.AutoGen.Abstractions; + +public static unsafe partial class ffmpeg +{ + /// Compute ceil(log2(x)). + /// value used to compute ceil(log2(x)) + /// computed ceiling of log2(x) + public static int av_ceil_log2_c(int @x) + { + return av_log2((uint)(x - 1U) << 1); + } + // original body hash: Y9QGw919/NB5ltczSPmZu5WZt+BfR1GGQ58ULgOxiNo= + + /// Clip a signed integer value into the amin-amax range. + /// value to clip + /// minimum value of the clip range + /// maximum value of the clip range + /// clipped value + public static int av_clip_c(int @a, int @amin, int @amax) + { + if (a < amin) + return amin; + else if (a > amax) + return amax; + else + return a; + } + // original body hash: FGSX8EvLhMgYqP9+0z1+Clej4HxjpENDPDX7uAYLx6k= + + /// Clip a signed integer value into the -32768,32767 range. + /// value to clip + /// clipped value + public static short av_clip_int16_c(int @a) + { + if (((a + 32768U) & ~65535) != 0) + return (short)((a >> 31) ^ 32767); + else + return (short)a; + } + // original body hash: l7ot2X+8YIG7Ze9ecaMTap87pGl9Q5kffGq1e9dS9Es= + + /// Clip a signed integer value into the -128,127 range. + /// value to clip + /// clipped value + public static sbyte av_clip_int8_c(int @a) + { + if (((a + 128U) & ~255) != 0) + return (sbyte)((a >> 31) ^ 127); + else + return (sbyte)a; + } + // original body hash: 959D6ojD8+Bo9o7pGvHcWTnCDg5Ax0o328RGYDIiUvo= + + /// Clip a signed integer into the -(2^p),(2^p-1) range. + /// value to clip + /// bit position to clip at + /// clipped value + public static int av_clip_intp2_c(int @a, int @p) + { + if ((((uint)a + (1 << p)) & ~((2 << p) - 1)) != 0) + return (a >> 31) ^ ((1 << p) - 1); + else + return a; + } + // original body hash: /qM73AkEE6w4/NOhpvKw1SVRZPxbN61+Yqc3i9L/2bM= + + /// Clip a signed integer value into the 0-65535 range. + /// value to clip + /// clipped value + public static ushort av_clip_uint16_c(int @a) + { + if ((a & (~65535)) != 0) + return (ushort)((~a) >> 31); + else + return (ushort)a; + } + // original body hash: nI5Vkw30nAjS2NmNSdCSnHeAUcY47XT0lnrnsUK/bJ4= + + /// Clip a signed integer value into the 0-255 range. + /// value to clip + /// clipped value + public static byte av_clip_uint8_c(int @a) + { + if ((a & (~255)) != 0) + return (byte)((~a) >> 31); + else + return (byte)a; + } + // original body hash: 32OGGgXBFRL7EcU8DizK9KbIFfU356+5hgUEyAOjIUY= + + /// Clip a signed integer to an unsigned power of two range. + /// value to clip + /// bit position to clip at + /// clipped value + public static uint av_clip_uintp2_c(int @a, int @p) + { + if ((a & ~((1 << p) - 1)) != 0) + return (uint)((~a) >> 31 & ((1 << p) - 1)); + else + return (uint)a; + } + // original body hash: 01v+7HjG6Id/YAdTCeWBkPwvakfGiCosPM6u5MXI8pU= + + /// Clip a signed 64bit integer value into the amin-amax range. + /// value to clip + /// minimum value of the clip range + /// maximum value of the clip range + /// clipped value + public static long av_clip64_c(long @a, long @amin, long @amax) + { + if (a < amin) + return amin; + else if (a > amax) + return amax; + else + return a; + } + // original body hash: FGSX8EvLhMgYqP9+0z1+Clej4HxjpENDPDX7uAYLx6k= + + /// Clip a double value into the amin-amax range. If a is nan or -inf amin will be returned. If a is +inf amax will be returned. + /// value to clip + /// minimum value of the clip range + /// maximum value of the clip range + /// clipped value + public static double av_clipd_c(double @a, double @amin, double @amax) + { + return ((((a) > (amin) ? (a) : (amin))) > (amax) ? (amax) : (((a) > (amin) ? (a) : (amin)))); + } + // original body hash: 3g76qefPWCYqXraY2vYdxoH58/EKn5EeR9v7cGEBM6Y= + + /// Clip a float value into the amin-amax range. If a is nan or -inf amin will be returned. If a is +inf amax will be returned. + /// value to clip + /// minimum value of the clip range + /// maximum value of the clip range + /// clipped value + public static float av_clipf_c(float @a, float @amin, float @amax) + { + return ((((a) > (amin) ? (a) : (amin))) > (amax) ? (amax) : (((a) > (amin) ? (a) : (amin)))); + } + // original body hash: 3g76qefPWCYqXraY2vYdxoH58/EKn5EeR9v7cGEBM6Y= + + /// Clip a signed 64-bit integer value into the -2147483648,2147483647 range. + /// value to clip + /// clipped value + public static int av_clipl_int32_c(long @a) + { + if ((((ulong)a + 2147483648UL) & ~(4294967295UL)) != 0) + return (int)((a >> 63) ^ 2147483647); + else + return (int)a; + } + // original body hash: 00dWv9FNYsEeRh1lPjYlSw3TQiOlthet3Kyi6z91Hbo= + + /// Compare two rationals. + /// First rational + /// Second rational + /// One of the following values: - 0 if `a == b` - 1 if `a > b` - -1 if `a < b` - `INT_MIN` if one of the values is of the form `0 / 0` + public static int av_cmp_q(AVRational @a, AVRational @b) + { + long tmp = a.num * (long)b.den - b.num * (long)a.den; + if (tmp != 0) + return (int)((tmp ^ a.den ^ b.den) >> 63) | 1; + else if (b.den != 0 && a.den != 0) + return 0; + else if (a.num != 0 && b.num != 0) + return (a.num >> 31) - (b.num >> 31); + else + return (-2147483647 - 1); + } + // original body hash: M+RGb5gXGdDjfY/gK5ZeCYeYrZAxjTXZA9+XVu0I66Q= + + /// Reinterpret a double as a 64-bit integer. + public static ulong av_double2int(double @f) + { + return (ulong)@f; + } + // original body hash: 2HuHK8WLchm3u+cK6H4QWhflx2JqfewtaSpj2Cwfi8M= + + /// Reinterpret a float as a 32-bit integer. + public static uint av_float2int(float @f) + { + return (uint)@f; + } + // original body hash: uBvsHd8EeFnxDvSdDE1+k5Um29kCuf0aEJhAvDy0wZk= + + /// Reinterpret a 64-bit integer as a double. + public static double av_int2double(ulong @i) + { + return (double)@i; + } + // original body hash: iFt3hVHTpF9jjqIGAAf/c7FrGfenOXGxdsyMjmrbwvw= + + /// Reinterpret a 32-bit integer as a float. + public static float av_int2float(uint @i) + { + return (float)@i; + } + // original body hash: wLGFPpW+aIvxW79y6BVY1LKz/j7yc3BdiaJ7mD4oQmw= + + /// Invert a rational. + /// value + /// 1 / q + public static AVRational av_inv_q(AVRational @q) + { + var r = new AVRational { @num = q.den, @den = q.num }; + return r; + } + // original body hash: sXbO4D7vmayAx56EFqz9C0kakcSPSryJHdk0hr0MOFY= + + /// Fill the provided buffer with a string containing an error string corresponding to the AVERROR code errnum. + /// a buffer + /// size in bytes of errbuf + /// error code to describe + /// the buffer in input, filled with the error description + public static byte* av_make_error_string(byte* @errbuf, ulong @errbuf_size, int @errnum) + { + av_strerror(errnum, errbuf, errbuf_size); + return errbuf; + } + // original body hash: DRHQHyLQNo9pTxA+wRw4zVDrC7Md1u3JWawQX0BVkqE= + + /// Create an AVRational. + public static AVRational av_make_q(int @num, int @den) + { + var r = new AVRational { @num = num, @den = den }; + return r; + } + // original body hash: IAPYNNcg3GX0PGxINeLQhb41dH921lPVKcnqxCk7ERA= + + /// Clear high bits from an unsigned integer starting with specific bit position + /// value to clip + /// bit position to clip at + /// clipped value + public static uint av_mod_uintp2_c(uint @a, uint @p) + { + return a & (uint)((1 << (int)p) - 1); + } + // original body hash: ncn4Okxr9Nas1g/qCfpRHKtywuNmJuf3UED+o3wjadc= + + public static int av_parity_c(uint @v) + { + return av_popcount_c(v) & 1; + } + // original body hash: Hsrq5CWkNvuNTnqES92ZJYVYpKXFwosrZNja/oaUd0s= + + /// Count number of bits set to one in x + /// value to count bits of + /// the number of bits set to one in x + public static int av_popcount_c(uint @x) + { + x -= (x >> 1) & 1431655765; + x = (x & 858993459) + ((x >> 2) & 858993459); + x = (x + (x >> 4)) & 252645135; + x += x >> 8; + return (int)((x + (x >> 16)) & 63); + } + // original body hash: 6EqV8Ll7t/MGINV9Nh3TSEbNyUYeskm7HucpU0SAkgg= + + /// Count number of bits set to one in x + /// value to count bits of + /// the number of bits set to one in x + public static int av_popcount64_c(ulong @x) + { + return av_popcount_c((uint)x) + av_popcount_c((uint)(x >> 32)); + } + // original body hash: 4wjPAKU9R0yS6OI8Y9h3L6de+uXt/lBm+zX7t5Ch18k= + + /// Convert an AVRational to a `double`. + /// AVRational to convert + /// `a` in floating-point form + public static double av_q2d(AVRational @a) + { + return a.num / (double)a.den; + } + // original body hash: j4R2BS8nF6czcUDVk5kKi9nLEdlTI/NRDYtnc1KFeyE= + + /// Add two signed 32-bit values with saturation. + /// one value + /// another value + /// sum with signed saturation + public static int av_sat_add32_c(int @a, int @b) + { + return av_clipl_int32_c((long)a + b); + } + // original body hash: GAAy4GsS2n+9kJ/8hzuONPUOGIsiOj7PvXnLHUVrimY= + + /// Add two signed 64-bit values with saturation. + /// one value + /// another value + /// sum with signed saturation + public static long av_sat_add64_c(long @a, long @b) + { + try + { + return @a + @b; + } + catch (OverflowException) + { + return ((double)@a + (double)@b) > 0d ? long.MaxValue : long.MinValue; + } + } + // original body hash: qeup76rp1rjakhMYQJWWEYIkpgscUcDfzDIrjyqk5iM= + + /// Add a doubled value to another value with saturation at both stages. + /// first value + /// value doubled and added to a + /// sum sat(a + sat(2*b)) with signed saturation + public static int av_sat_dadd32_c(int @a, int @b) + { + return av_sat_add32_c(a, av_sat_add32_c(b, b)); + } + // original body hash: Kbha6XFULk7dxB6zc5WRwoPczQVN7HBcNs9Hjlj/Caw= + + /// Subtract a doubled value from another value with saturation at both stages. + /// first value + /// value doubled and subtracted from a + /// difference sat(a - sat(2*b)) with signed saturation + public static int av_sat_dsub32_c(int @a, int @b) + { + return av_sat_sub32_c(a, av_sat_add32_c(b, b)); + } + // original body hash: ypu4i+30n3CeMxdL8pq7XDYAFBi1N5d2mkIT6zQ1bO0= + + /// Subtract two signed 32-bit values with saturation. + /// one value + /// another value + /// difference with signed saturation + public static int av_sat_sub32_c(int @a, int @b) + { + return av_clipl_int32_c((long)a - b); + } + // original body hash: /tgXI2zbIgliqOwZbpnq7jSiVj0N70RjBFsbkIkWhsM= + + /// Subtract two signed 64-bit values with saturation. + /// one value + /// another value + /// difference with signed saturation + public static long av_sat_sub64_c(long @a, long @b) + { + try + { + return @a - @b; + } + catch (OverflowException) + { + return ((double)@a - (double)@b) > 0d ? long.MaxValue : long.MinValue; + } + } + // original body hash: 6YrSxDrYVG1ac1wlCiXKMhTwj7Kx6eym/YtspKusrGk= + + /// Return x default pointer in case p is NULL. + public static void* av_x_if_null(void* @p, void* @x) + { + return (void*)(p != null ? p : x); + } + // original body hash: zOY924eIk3VeTSNb9XcE2Yw8aZ4/jlzQSfP06k5n0nU= + + /// ftell() equivalent for AVIOContext. + /// position or AVERROR. + public static long avio_tell(AVIOContext* @s) + { + return avio_seek(s, 0, 1); + } + // original body hash: o18c3ypeh9EsmYaplTel2ssgM2PZKTTDfMjsqEopycw= + +} diff --git a/FFmpeg.AutoGen.Abstractions/generated/ffmpeg.macros.g.cs b/FFmpeg.AutoGen.Abstractions/generated/ffmpeg.macros.g.cs new file mode 100644 index 00000000..469de607 --- /dev/null +++ b/FFmpeg.AutoGen.Abstractions/generated/ffmpeg.macros.g.cs @@ -0,0 +1,1687 @@ +namespace FFmpeg.AutoGen.Abstractions; + +public static unsafe partial class ffmpeg +{ + /// _WIN32_WINNT = 0x602 + public const int _WIN32_WINNT = 0x602; + // public static attribute_deprecated = __declspec(deprecated); + // public static av_alias = __attribute__((may_alias)); + // public static av_alloc_size = (...); + // public static av_always_inline = __forceinline; + /// AV_BUFFER_FLAG_READONLY = (1 << 0) + public const int AV_BUFFER_FLAG_READONLY = 0x1 << 0x0; + /// AV_BUFFERSINK_FLAG_NO_REQUEST = 0x2 + public const int AV_BUFFERSINK_FLAG_NO_REQUEST = 0x2; + /// AV_BUFFERSINK_FLAG_PEEK = 0x1 + public const int AV_BUFFERSINK_FLAG_PEEK = 0x1; + // public static av_builtin_constant_p = __builtin_constant_p; + // public static av_ceil_log2 = av_ceil_log2_c; + // public static AV_CEIL_RSHIFT = (a,b) (!av_builtin_constant_p(b) ? -((-(a)) >> (b)) : ((a) + (1<<(b)) - 1) >> (b)); + /// AV_CH_BACK_CENTER = (1ULL << AV_CHAN_BACK_CENTER ) + public static readonly ulong AV_CH_BACK_CENTER = 0x1UL << 8; + /// AV_CH_BACK_LEFT = (1ULL << AV_CHAN_BACK_LEFT ) + public static readonly ulong AV_CH_BACK_LEFT = 0x1UL << 4; + /// AV_CH_BACK_RIGHT = (1ULL << AV_CHAN_BACK_RIGHT ) + public static readonly ulong AV_CH_BACK_RIGHT = 0x1UL << 5; + /// AV_CH_BOTTOM_FRONT_CENTER = (1ULL << AV_CHAN_BOTTOM_FRONT_CENTER ) + public static readonly ulong AV_CH_BOTTOM_FRONT_CENTER = 0x1UL << 38; + /// AV_CH_BOTTOM_FRONT_LEFT = (1ULL << AV_CHAN_BOTTOM_FRONT_LEFT ) + public static readonly ulong AV_CH_BOTTOM_FRONT_LEFT = 0x1UL << 39; + /// AV_CH_BOTTOM_FRONT_RIGHT = (1ULL << AV_CHAN_BOTTOM_FRONT_RIGHT ) + public static readonly ulong AV_CH_BOTTOM_FRONT_RIGHT = 0x1UL << 40; + /// AV_CH_FRONT_CENTER = (1ULL << AV_CHAN_FRONT_CENTER ) + public static readonly ulong AV_CH_FRONT_CENTER = 0x1UL << 2; + /// AV_CH_FRONT_LEFT = (1ULL << AV_CHAN_FRONT_LEFT ) + public static readonly ulong AV_CH_FRONT_LEFT = 0x1UL << 0; + /// AV_CH_FRONT_LEFT_OF_CENTER = (1ULL << AV_CHAN_FRONT_LEFT_OF_CENTER ) + public static readonly ulong AV_CH_FRONT_LEFT_OF_CENTER = 0x1UL << 6; + /// AV_CH_FRONT_RIGHT = (1ULL << AV_CHAN_FRONT_RIGHT ) + public static readonly ulong AV_CH_FRONT_RIGHT = 0x1UL << 1; + /// AV_CH_FRONT_RIGHT_OF_CENTER = (1ULL << AV_CHAN_FRONT_RIGHT_OF_CENTER) + public static readonly ulong AV_CH_FRONT_RIGHT_OF_CENTER = 0x1UL << 7; + /// AV_CH_LAYOUT_2_1 = (AV_CH_LAYOUT_STEREO|AV_CH_BACK_CENTER) + public static readonly ulong AV_CH_LAYOUT_2_1 = AV_CH_LAYOUT_STEREO | AV_CH_BACK_CENTER; + /// AV_CH_LAYOUT_2_2 = (AV_CH_LAYOUT_STEREO|AV_CH_SIDE_LEFT|AV_CH_SIDE_RIGHT) + public static readonly ulong AV_CH_LAYOUT_2_2 = AV_CH_LAYOUT_STEREO | AV_CH_SIDE_LEFT | AV_CH_SIDE_RIGHT; + /// AV_CH_LAYOUT_22POINT2 = (AV_CH_LAYOUT_5POINT1_BACK|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER|AV_CH_BACK_CENTER|AV_CH_LOW_FREQUENCY_2|AV_CH_SIDE_LEFT|AV_CH_SIDE_RIGHT|AV_CH_TOP_FRONT_LEFT|AV_CH_TOP_FRONT_RIGHT|AV_CH_TOP_FRONT_CENTER|AV_CH_TOP_CENTER|AV_CH_TOP_BACK_LEFT|AV_CH_TOP_BACK_RIGHT|AV_CH_TOP_SIDE_LEFT|AV_CH_TOP_SIDE_RIGHT|AV_CH_TOP_BACK_CENTER|AV_CH_BOTTOM_FRONT_CENTER|AV_CH_BOTTOM_FRONT_LEFT|AV_CH_BOTTOM_FRONT_RIGHT) + public static readonly ulong AV_CH_LAYOUT_22POINT2 = AV_CH_LAYOUT_5POINT1_BACK | AV_CH_FRONT_LEFT_OF_CENTER | AV_CH_FRONT_RIGHT_OF_CENTER | AV_CH_BACK_CENTER | AV_CH_LOW_FREQUENCY_2 | AV_CH_SIDE_LEFT | AV_CH_SIDE_RIGHT | AV_CH_TOP_FRONT_LEFT | AV_CH_TOP_FRONT_RIGHT | AV_CH_TOP_FRONT_CENTER | AV_CH_TOP_CENTER | AV_CH_TOP_BACK_LEFT | AV_CH_TOP_BACK_RIGHT | AV_CH_TOP_SIDE_LEFT | AV_CH_TOP_SIDE_RIGHT | AV_CH_TOP_BACK_CENTER | AV_CH_BOTTOM_FRONT_CENTER | AV_CH_BOTTOM_FRONT_LEFT | AV_CH_BOTTOM_FRONT_RIGHT; + /// AV_CH_LAYOUT_2POINT1 = (AV_CH_LAYOUT_STEREO|AV_CH_LOW_FREQUENCY) + public static readonly ulong AV_CH_LAYOUT_2POINT1 = AV_CH_LAYOUT_STEREO | AV_CH_LOW_FREQUENCY; + /// AV_CH_LAYOUT_3POINT1 = (AV_CH_LAYOUT_SURROUND|AV_CH_LOW_FREQUENCY) + public static readonly ulong AV_CH_LAYOUT_3POINT1 = AV_CH_LAYOUT_SURROUND | AV_CH_LOW_FREQUENCY; + /// AV_CH_LAYOUT_4POINT0 = (AV_CH_LAYOUT_SURROUND|AV_CH_BACK_CENTER) + public static readonly ulong AV_CH_LAYOUT_4POINT0 = AV_CH_LAYOUT_SURROUND | AV_CH_BACK_CENTER; + /// AV_CH_LAYOUT_4POINT1 = (AV_CH_LAYOUT_4POINT0|AV_CH_LOW_FREQUENCY) + public static readonly ulong AV_CH_LAYOUT_4POINT1 = AV_CH_LAYOUT_4POINT0 | AV_CH_LOW_FREQUENCY; + /// AV_CH_LAYOUT_5POINT0 = (AV_CH_LAYOUT_SURROUND|AV_CH_SIDE_LEFT|AV_CH_SIDE_RIGHT) + public static readonly ulong AV_CH_LAYOUT_5POINT0 = AV_CH_LAYOUT_SURROUND | AV_CH_SIDE_LEFT | AV_CH_SIDE_RIGHT; + /// AV_CH_LAYOUT_5POINT0_BACK = (AV_CH_LAYOUT_SURROUND|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT) + public static readonly ulong AV_CH_LAYOUT_5POINT0_BACK = AV_CH_LAYOUT_SURROUND | AV_CH_BACK_LEFT | AV_CH_BACK_RIGHT; + /// AV_CH_LAYOUT_5POINT1 = (AV_CH_LAYOUT_5POINT0|AV_CH_LOW_FREQUENCY) + public static readonly ulong AV_CH_LAYOUT_5POINT1 = AV_CH_LAYOUT_5POINT0 | AV_CH_LOW_FREQUENCY; + /// AV_CH_LAYOUT_5POINT1_BACK = (AV_CH_LAYOUT_5POINT0_BACK|AV_CH_LOW_FREQUENCY) + public static readonly ulong AV_CH_LAYOUT_5POINT1_BACK = AV_CH_LAYOUT_5POINT0_BACK | AV_CH_LOW_FREQUENCY; + /// AV_CH_LAYOUT_6POINT0 = (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_CENTER) + public static readonly ulong AV_CH_LAYOUT_6POINT0 = AV_CH_LAYOUT_5POINT0 | AV_CH_BACK_CENTER; + /// AV_CH_LAYOUT_6POINT0_FRONT = (AV_CH_LAYOUT_2_2|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER) + public static readonly ulong AV_CH_LAYOUT_6POINT0_FRONT = AV_CH_LAYOUT_2_2 | AV_CH_FRONT_LEFT_OF_CENTER | AV_CH_FRONT_RIGHT_OF_CENTER; + /// AV_CH_LAYOUT_6POINT1 = (AV_CH_LAYOUT_5POINT1|AV_CH_BACK_CENTER) + public static readonly ulong AV_CH_LAYOUT_6POINT1 = AV_CH_LAYOUT_5POINT1 | AV_CH_BACK_CENTER; + /// AV_CH_LAYOUT_6POINT1_BACK = (AV_CH_LAYOUT_5POINT1_BACK|AV_CH_BACK_CENTER) + public static readonly ulong AV_CH_LAYOUT_6POINT1_BACK = AV_CH_LAYOUT_5POINT1_BACK | AV_CH_BACK_CENTER; + /// AV_CH_LAYOUT_6POINT1_FRONT = (AV_CH_LAYOUT_6POINT0_FRONT|AV_CH_LOW_FREQUENCY) + public static readonly ulong AV_CH_LAYOUT_6POINT1_FRONT = AV_CH_LAYOUT_6POINT0_FRONT | AV_CH_LOW_FREQUENCY; + /// AV_CH_LAYOUT_7POINT0 = (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT) + public static readonly ulong AV_CH_LAYOUT_7POINT0 = AV_CH_LAYOUT_5POINT0 | AV_CH_BACK_LEFT | AV_CH_BACK_RIGHT; + /// AV_CH_LAYOUT_7POINT0_FRONT = (AV_CH_LAYOUT_5POINT0|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER) + public static readonly ulong AV_CH_LAYOUT_7POINT0_FRONT = AV_CH_LAYOUT_5POINT0 | AV_CH_FRONT_LEFT_OF_CENTER | AV_CH_FRONT_RIGHT_OF_CENTER; + /// AV_CH_LAYOUT_7POINT1 = (AV_CH_LAYOUT_5POINT1|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT) + public static readonly ulong AV_CH_LAYOUT_7POINT1 = AV_CH_LAYOUT_5POINT1 | AV_CH_BACK_LEFT | AV_CH_BACK_RIGHT; + /// AV_CH_LAYOUT_7POINT1_WIDE = (AV_CH_LAYOUT_5POINT1|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER) + public static readonly ulong AV_CH_LAYOUT_7POINT1_WIDE = AV_CH_LAYOUT_5POINT1 | AV_CH_FRONT_LEFT_OF_CENTER | AV_CH_FRONT_RIGHT_OF_CENTER; + /// AV_CH_LAYOUT_7POINT1_WIDE_BACK = (AV_CH_LAYOUT_5POINT1_BACK|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER) + public static readonly ulong AV_CH_LAYOUT_7POINT1_WIDE_BACK = AV_CH_LAYOUT_5POINT1_BACK | AV_CH_FRONT_LEFT_OF_CENTER | AV_CH_FRONT_RIGHT_OF_CENTER; + /// AV_CH_LAYOUT_HEXADECAGONAL = (AV_CH_LAYOUT_OCTAGONAL|AV_CH_WIDE_LEFT|AV_CH_WIDE_RIGHT|AV_CH_TOP_BACK_LEFT|AV_CH_TOP_BACK_RIGHT|AV_CH_TOP_BACK_CENTER|AV_CH_TOP_FRONT_CENTER|AV_CH_TOP_FRONT_LEFT|AV_CH_TOP_FRONT_RIGHT) + public static readonly ulong AV_CH_LAYOUT_HEXADECAGONAL = AV_CH_LAYOUT_OCTAGONAL | AV_CH_WIDE_LEFT | AV_CH_WIDE_RIGHT | AV_CH_TOP_BACK_LEFT | AV_CH_TOP_BACK_RIGHT | AV_CH_TOP_BACK_CENTER | AV_CH_TOP_FRONT_CENTER | AV_CH_TOP_FRONT_LEFT | AV_CH_TOP_FRONT_RIGHT; + /// AV_CH_LAYOUT_HEXAGONAL = (AV_CH_LAYOUT_5POINT0_BACK|AV_CH_BACK_CENTER) + public static readonly ulong AV_CH_LAYOUT_HEXAGONAL = AV_CH_LAYOUT_5POINT0_BACK | AV_CH_BACK_CENTER; + /// AV_CH_LAYOUT_MONO = (AV_CH_FRONT_CENTER) + public static readonly ulong AV_CH_LAYOUT_MONO = AV_CH_FRONT_CENTER; + /// AV_CH_LAYOUT_NATIVE = 0x8000000000000000ULL + public const ulong AV_CH_LAYOUT_NATIVE = 0x8000000000000000UL; + /// AV_CH_LAYOUT_OCTAGONAL = (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_LEFT|AV_CH_BACK_CENTER|AV_CH_BACK_RIGHT) + public static readonly ulong AV_CH_LAYOUT_OCTAGONAL = AV_CH_LAYOUT_5POINT0 | AV_CH_BACK_LEFT | AV_CH_BACK_CENTER | AV_CH_BACK_RIGHT; + /// AV_CH_LAYOUT_QUAD = (AV_CH_LAYOUT_STEREO|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT) + public static readonly ulong AV_CH_LAYOUT_QUAD = AV_CH_LAYOUT_STEREO | AV_CH_BACK_LEFT | AV_CH_BACK_RIGHT; + /// AV_CH_LAYOUT_STEREO = (AV_CH_FRONT_LEFT|AV_CH_FRONT_RIGHT) + public static readonly ulong AV_CH_LAYOUT_STEREO = AV_CH_FRONT_LEFT | AV_CH_FRONT_RIGHT; + /// AV_CH_LAYOUT_STEREO_DOWNMIX = (AV_CH_STEREO_LEFT|AV_CH_STEREO_RIGHT) + public static readonly ulong AV_CH_LAYOUT_STEREO_DOWNMIX = AV_CH_STEREO_LEFT | AV_CH_STEREO_RIGHT; + /// AV_CH_LAYOUT_SURROUND = (AV_CH_LAYOUT_STEREO|AV_CH_FRONT_CENTER) + public static readonly ulong AV_CH_LAYOUT_SURROUND = AV_CH_LAYOUT_STEREO | AV_CH_FRONT_CENTER; + /// AV_CH_LOW_FREQUENCY = (1ULL << AV_CHAN_LOW_FREQUENCY ) + public static readonly ulong AV_CH_LOW_FREQUENCY = 0x1UL << 3; + /// AV_CH_LOW_FREQUENCY_2 = (1ULL << AV_CHAN_LOW_FREQUENCY_2 ) + public static readonly ulong AV_CH_LOW_FREQUENCY_2 = 0x1UL << 35; + /// AV_CH_SIDE_LEFT = (1ULL << AV_CHAN_SIDE_LEFT ) + public static readonly ulong AV_CH_SIDE_LEFT = 0x1UL << 9; + /// AV_CH_SIDE_RIGHT = (1ULL << AV_CHAN_SIDE_RIGHT ) + public static readonly ulong AV_CH_SIDE_RIGHT = 0x1UL << 10; + /// AV_CH_STEREO_LEFT = (1ULL << AV_CHAN_STEREO_LEFT ) + public static readonly ulong AV_CH_STEREO_LEFT = 0x1UL << 29; + /// AV_CH_STEREO_RIGHT = (1ULL << AV_CHAN_STEREO_RIGHT ) + public static readonly ulong AV_CH_STEREO_RIGHT = 0x1UL << 30; + /// AV_CH_SURROUND_DIRECT_LEFT = (1ULL << AV_CHAN_SURROUND_DIRECT_LEFT ) + public static readonly ulong AV_CH_SURROUND_DIRECT_LEFT = 0x1UL << 33; + /// AV_CH_SURROUND_DIRECT_RIGHT = (1ULL << AV_CHAN_SURROUND_DIRECT_RIGHT) + public static readonly ulong AV_CH_SURROUND_DIRECT_RIGHT = 0x1UL << 34; + /// AV_CH_TOP_BACK_CENTER = (1ULL << AV_CHAN_TOP_BACK_CENTER ) + public static readonly ulong AV_CH_TOP_BACK_CENTER = 0x1UL << 16; + /// AV_CH_TOP_BACK_LEFT = (1ULL << AV_CHAN_TOP_BACK_LEFT ) + public static readonly ulong AV_CH_TOP_BACK_LEFT = 0x1UL << 15; + /// AV_CH_TOP_BACK_RIGHT = (1ULL << AV_CHAN_TOP_BACK_RIGHT ) + public static readonly ulong AV_CH_TOP_BACK_RIGHT = 0x1UL << 17; + /// AV_CH_TOP_CENTER = (1ULL << AV_CHAN_TOP_CENTER ) + public static readonly ulong AV_CH_TOP_CENTER = 0x1UL << 11; + /// AV_CH_TOP_FRONT_CENTER = (1ULL << AV_CHAN_TOP_FRONT_CENTER ) + public static readonly ulong AV_CH_TOP_FRONT_CENTER = 0x1UL << 13; + /// AV_CH_TOP_FRONT_LEFT = (1ULL << AV_CHAN_TOP_FRONT_LEFT ) + public static readonly ulong AV_CH_TOP_FRONT_LEFT = 0x1UL << 12; + /// AV_CH_TOP_FRONT_RIGHT = (1ULL << AV_CHAN_TOP_FRONT_RIGHT ) + public static readonly ulong AV_CH_TOP_FRONT_RIGHT = 0x1UL << 14; + /// AV_CH_TOP_SIDE_LEFT = (1ULL << AV_CHAN_TOP_SIDE_LEFT ) + public static readonly ulong AV_CH_TOP_SIDE_LEFT = 0x1UL << 36; + /// AV_CH_TOP_SIDE_RIGHT = (1ULL << AV_CHAN_TOP_SIDE_RIGHT ) + public static readonly ulong AV_CH_TOP_SIDE_RIGHT = 0x1UL << 37; + /// AV_CH_WIDE_LEFT = (1ULL << AV_CHAN_WIDE_LEFT ) + public static readonly ulong AV_CH_WIDE_LEFT = 0x1UL << 31; + /// AV_CH_WIDE_RIGHT = (1ULL << AV_CHAN_WIDE_RIGHT ) + public static readonly ulong AV_CH_WIDE_RIGHT = 0x1UL << 32; + // public static AV_CHANNEL_LAYOUT_2_1 = AV_CHANNEL_LAYOUT_MASK(0x3, AV_CH_LAYOUT_2_1); + // public static AV_CHANNEL_LAYOUT_2_2 = AV_CHANNEL_LAYOUT_MASK(0x4, AV_CH_LAYOUT_2_2); + // public static AV_CHANNEL_LAYOUT_22POINT2 = AV_CHANNEL_LAYOUT_MASK(0x18, AV_CH_LAYOUT_22POINT2); + // public static AV_CHANNEL_LAYOUT_2POINT1 = AV_CHANNEL_LAYOUT_MASK(0x3, AV_CH_LAYOUT_2POINT1); + // public static AV_CHANNEL_LAYOUT_3POINT1 = AV_CHANNEL_LAYOUT_MASK(0x4, AV_CH_LAYOUT_3POINT1); + // public static AV_CHANNEL_LAYOUT_4POINT0 = AV_CHANNEL_LAYOUT_MASK(0x4, AV_CH_LAYOUT_4POINT0); + // public static AV_CHANNEL_LAYOUT_4POINT1 = AV_CHANNEL_LAYOUT_MASK(0x5, AV_CH_LAYOUT_4POINT1); + // public static AV_CHANNEL_LAYOUT_5POINT0 = AV_CHANNEL_LAYOUT_MASK(0x5, AV_CH_LAYOUT_5POINT0); + // public static AV_CHANNEL_LAYOUT_5POINT0_BACK = AV_CHANNEL_LAYOUT_MASK(0x5, AV_CH_LAYOUT_5POINT0_BACK); + // public static AV_CHANNEL_LAYOUT_5POINT1 = AV_CHANNEL_LAYOUT_MASK(0x6, AV_CH_LAYOUT_5POINT1); + // public static AV_CHANNEL_LAYOUT_5POINT1_BACK = AV_CHANNEL_LAYOUT_MASK(0x6, AV_CH_LAYOUT_5POINT1_BACK); + // public static AV_CHANNEL_LAYOUT_6POINT0 = AV_CHANNEL_LAYOUT_MASK(0x6, AV_CH_LAYOUT_6POINT0); + // public static AV_CHANNEL_LAYOUT_6POINT0_FRONT = AV_CHANNEL_LAYOUT_MASK(0x6, AV_CH_LAYOUT_6POINT0_FRONT); + // public static AV_CHANNEL_LAYOUT_6POINT1 = AV_CHANNEL_LAYOUT_MASK(0x7, AV_CH_LAYOUT_6POINT1); + // public static AV_CHANNEL_LAYOUT_6POINT1_BACK = AV_CHANNEL_LAYOUT_MASK(0x7, AV_CH_LAYOUT_6POINT1_BACK); + // public static AV_CHANNEL_LAYOUT_6POINT1_FRONT = AV_CHANNEL_LAYOUT_MASK(0x7, AV_CH_LAYOUT_6POINT1_FRONT); + // public static AV_CHANNEL_LAYOUT_7POINT0 = AV_CHANNEL_LAYOUT_MASK(0x7, AV_CH_LAYOUT_7POINT0); + // public static AV_CHANNEL_LAYOUT_7POINT0_FRONT = AV_CHANNEL_LAYOUT_MASK(0x7, AV_CH_LAYOUT_7POINT0_FRONT); + // public static AV_CHANNEL_LAYOUT_7POINT1 = AV_CHANNEL_LAYOUT_MASK(0x8, AV_CH_LAYOUT_7POINT1); + // public static AV_CHANNEL_LAYOUT_7POINT1_WIDE = AV_CHANNEL_LAYOUT_MASK(0x8, AV_CH_LAYOUT_7POINT1_WIDE); + // public static AV_CHANNEL_LAYOUT_7POINT1_WIDE_BACK = AV_CHANNEL_LAYOUT_MASK(0x8, AV_CH_LAYOUT_7POINT1_WIDE_BACK); + // public static AV_CHANNEL_LAYOUT_AMBISONIC_FIRST_ORDER = { .order = AV_CHANNEL_ORDER_AMBISONIC, .nb_channels = 4, .u = { .mask = 0 }}; + // public static AV_CHANNEL_LAYOUT_HEXADECAGONAL = AV_CHANNEL_LAYOUT_MASK(0x10, AV_CH_LAYOUT_HEXADECAGONAL); + // public static AV_CHANNEL_LAYOUT_HEXAGONAL = AV_CHANNEL_LAYOUT_MASK(0x6, AV_CH_LAYOUT_HEXAGONAL); + // public static AV_CHANNEL_LAYOUT_MASK = nb; + // public static AV_CHANNEL_LAYOUT_MONO = AV_CHANNEL_LAYOUT_MASK(0x1, AV_CH_LAYOUT_MONO); + // public static AV_CHANNEL_LAYOUT_OCTAGONAL = AV_CHANNEL_LAYOUT_MASK(0x8, AV_CH_LAYOUT_OCTAGONAL); + // public static AV_CHANNEL_LAYOUT_QUAD = AV_CHANNEL_LAYOUT_MASK(0x4, AV_CH_LAYOUT_QUAD); + // public static AV_CHANNEL_LAYOUT_STEREO = AV_CHANNEL_LAYOUT_MASK(0x2, AV_CH_LAYOUT_STEREO); + // public static AV_CHANNEL_LAYOUT_STEREO_DOWNMIX = AV_CHANNEL_LAYOUT_MASK(0x2, AV_CH_LAYOUT_STEREO_DOWNMIX); + // public static AV_CHANNEL_LAYOUT_SURROUND = AV_CHANNEL_LAYOUT_MASK(0x3, AV_CH_LAYOUT_SURROUND); + // public static av_clip = av_clip_c; + // public static av_clip_int16 = av_clip_int16_c; + // public static av_clip_int8 = av_clip_int8_c; + // public static av_clip_intp2 = av_clip_intp2_c; + // public static av_clip_uint16 = av_clip_uint16_c; + // public static av_clip_uint8 = av_clip_uint8_c; + // public static av_clip_uintp2 = av_clip_uintp2_c; + // public static av_clip64 = av_clip64_c; + // public static av_clipd = av_clipd_c; + // public static av_clipf = av_clipf_c; + // public static av_clipl_int32 = av_clipl_int32_c; + /// AV_CODEC_CAP_AUTO_THREADS = AV_CODEC_CAP_OTHER_THREADS + public const int AV_CODEC_CAP_AUTO_THREADS = AV_CODEC_CAP_OTHER_THREADS; + /// AV_CODEC_CAP_AVOID_PROBING = (1 << 17) + public const int AV_CODEC_CAP_AVOID_PROBING = 0x1 << 0x11; + /// AV_CODEC_CAP_CHANNEL_CONF = (1 << 10) + public const int AV_CODEC_CAP_CHANNEL_CONF = 0x1 << 0xa; + /// AV_CODEC_CAP_DELAY = (1 << 5) + public const int AV_CODEC_CAP_DELAY = 0x1 << 0x5; + /// AV_CODEC_CAP_DR1 = (1 << 1) + public const int AV_CODEC_CAP_DR1 = 0x1 << 0x1; + /// AV_CODEC_CAP_DRAW_HORIZ_BAND = (1 << 0) + public const int AV_CODEC_CAP_DRAW_HORIZ_BAND = 0x1 << 0x0; + /// AV_CODEC_CAP_ENCODER_FLUSH = (1 << 21) + public const int AV_CODEC_CAP_ENCODER_FLUSH = 0x1 << 0x15; + /// AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE = (1 << 20) + public const int AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE = 0x1 << 0x14; + /// AV_CODEC_CAP_EXPERIMENTAL = (1 << 9) + public const int AV_CODEC_CAP_EXPERIMENTAL = 0x1 << 0x9; + /// AV_CODEC_CAP_FRAME_THREADS = (1 << 12) + public const int AV_CODEC_CAP_FRAME_THREADS = 0x1 << 0xc; + /// AV_CODEC_CAP_HARDWARE = (1 << 18) + public const int AV_CODEC_CAP_HARDWARE = 0x1 << 0x12; + /// AV_CODEC_CAP_HYBRID = (1 << 19) + public const int AV_CODEC_CAP_HYBRID = 0x1 << 0x13; + /// AV_CODEC_CAP_INTRA_ONLY = 0x40000000 + public const int AV_CODEC_CAP_INTRA_ONLY = 0x40000000; + /// AV_CODEC_CAP_LOSSLESS = 0x80000000 + public const uint AV_CODEC_CAP_LOSSLESS = 0x80000000U; + /// AV_CODEC_CAP_OTHER_THREADS = (1 << 15) + public const int AV_CODEC_CAP_OTHER_THREADS = 0x1 << 0xf; + /// AV_CODEC_CAP_PARAM_CHANGE = (1 << 14) + public const int AV_CODEC_CAP_PARAM_CHANGE = 0x1 << 0xe; + /// AV_CODEC_CAP_SLICE_THREADS = (1 << 13) + public const int AV_CODEC_CAP_SLICE_THREADS = 0x1 << 0xd; + /// AV_CODEC_CAP_SMALL_LAST_FRAME = (1 << 6) + public const int AV_CODEC_CAP_SMALL_LAST_FRAME = 0x1 << 0x6; + /// AV_CODEC_CAP_SUBFRAMES = (1 << 8) + public const int AV_CODEC_CAP_SUBFRAMES = 0x1 << 0x8; + /// AV_CODEC_CAP_TRUNCATED = (1 << 3) + public const int AV_CODEC_CAP_TRUNCATED = 0x1 << 0x3; + /// AV_CODEC_CAP_VARIABLE_FRAME_SIZE = (1 << 16) + public const int AV_CODEC_CAP_VARIABLE_FRAME_SIZE = 0x1 << 0x10; + /// AV_CODEC_EXPORT_DATA_FILM_GRAIN = 0x1 << 0x3 + public const int AV_CODEC_EXPORT_DATA_FILM_GRAIN = 0x1 << 0x3; + /// AV_CODEC_EXPORT_DATA_MVS = 0x1 << 0x0 + public const int AV_CODEC_EXPORT_DATA_MVS = 0x1 << 0x0; + /// AV_CODEC_EXPORT_DATA_PRFT = 0x1 << 0x1 + public const int AV_CODEC_EXPORT_DATA_PRFT = 0x1 << 0x1; + /// AV_CODEC_EXPORT_DATA_VIDEO_ENC_PARAMS = 0x1 << 0x2 + public const int AV_CODEC_EXPORT_DATA_VIDEO_ENC_PARAMS = 0x1 << 0x2; + /// AV_CODEC_FLAG_4MV = 0x1 << 0x2 + public const int AV_CODEC_FLAG_4MV = 0x1 << 0x2; + /// AV_CODEC_FLAG_AC_PRED = 0x1 << 0x18 + public const int AV_CODEC_FLAG_AC_PRED = 0x1 << 0x18; + /// AV_CODEC_FLAG_BITEXACT = 0x1 << 0x17 + public const int AV_CODEC_FLAG_BITEXACT = 0x1 << 0x17; + /// AV_CODEC_FLAG_CLOSED_GOP = 0x1U << 0x1f + public const uint AV_CODEC_FLAG_CLOSED_GOP = 0x1U << 0x1f; + /// AV_CODEC_FLAG_DROPCHANGED = 0x1 << 0x5 + public const int AV_CODEC_FLAG_DROPCHANGED = 0x1 << 0x5; + /// AV_CODEC_FLAG_GLOBAL_HEADER = 0x1 << 0x16 + public const int AV_CODEC_FLAG_GLOBAL_HEADER = 0x1 << 0x16; + /// AV_CODEC_FLAG_GRAY = 0x1 << 0xd + public const int AV_CODEC_FLAG_GRAY = 0x1 << 0xd; + /// AV_CODEC_FLAG_INTERLACED_DCT = 0x1 << 0x12 + public const int AV_CODEC_FLAG_INTERLACED_DCT = 0x1 << 0x12; + /// AV_CODEC_FLAG_INTERLACED_ME = 0x1 << 0x1d + public const int AV_CODEC_FLAG_INTERLACED_ME = 0x1 << 0x1d; + /// AV_CODEC_FLAG_LOOP_FILTER = 0x1 << 0xb + public const int AV_CODEC_FLAG_LOOP_FILTER = 0x1 << 0xb; + /// AV_CODEC_FLAG_LOW_DELAY = 0x1 << 0x13 + public const int AV_CODEC_FLAG_LOW_DELAY = 0x1 << 0x13; + /// AV_CODEC_FLAG_OUTPUT_CORRUPT = 0x1 << 0x3 + public const int AV_CODEC_FLAG_OUTPUT_CORRUPT = 0x1 << 0x3; + /// AV_CODEC_FLAG_PASS1 = 0x1 << 0x9 + public const int AV_CODEC_FLAG_PASS1 = 0x1 << 0x9; + /// AV_CODEC_FLAG_PASS2 = 0x1 << 0xa + public const int AV_CODEC_FLAG_PASS2 = 0x1 << 0xa; + /// AV_CODEC_FLAG_PSNR = 0x1 << 0xf + public const int AV_CODEC_FLAG_PSNR = 0x1 << 0xf; + /// AV_CODEC_FLAG_QPEL = 0x1 << 0x4 + public const int AV_CODEC_FLAG_QPEL = 0x1 << 0x4; + /// AV_CODEC_FLAG_QSCALE = 0x1 << 0x1 + public const int AV_CODEC_FLAG_QSCALE = 0x1 << 0x1; + /// AV_CODEC_FLAG_TRUNCATED = 0x1 << 0x10 + public const int AV_CODEC_FLAG_TRUNCATED = 0x1 << 0x10; + /// AV_CODEC_FLAG_UNALIGNED = 0x1 << 0x0 + public const int AV_CODEC_FLAG_UNALIGNED = 0x1 << 0x0; + /// AV_CODEC_FLAG2_CHUNKS = 0x1 << 0xf + public const int AV_CODEC_FLAG2_CHUNKS = 0x1 << 0xf; + /// AV_CODEC_FLAG2_DROP_FRAME_TIMECODE = 0x1 << 0xd + public const int AV_CODEC_FLAG2_DROP_FRAME_TIMECODE = 0x1 << 0xd; + /// AV_CODEC_FLAG2_EXPORT_MVS = 0x1 << 0x1c + public const int AV_CODEC_FLAG2_EXPORT_MVS = 0x1 << 0x1c; + /// AV_CODEC_FLAG2_FAST = 0x1 << 0x0 + public const int AV_CODEC_FLAG2_FAST = 0x1 << 0x0; + /// AV_CODEC_FLAG2_IGNORE_CROP = 0x1 << 0x10 + public const int AV_CODEC_FLAG2_IGNORE_CROP = 0x1 << 0x10; + /// AV_CODEC_FLAG2_LOCAL_HEADER = 0x1 << 0x3 + public const int AV_CODEC_FLAG2_LOCAL_HEADER = 0x1 << 0x3; + /// AV_CODEC_FLAG2_NO_OUTPUT = 0x1 << 0x2 + public const int AV_CODEC_FLAG2_NO_OUTPUT = 0x1 << 0x2; + /// AV_CODEC_FLAG2_RO_FLUSH_NOOP = 0x1 << 0x1e + public const int AV_CODEC_FLAG2_RO_FLUSH_NOOP = 0x1 << 0x1e; + /// AV_CODEC_FLAG2_SHOW_ALL = 0x1 << 0x16 + public const int AV_CODEC_FLAG2_SHOW_ALL = 0x1 << 0x16; + /// AV_CODEC_FLAG2_SKIP_MANUAL = 0x1 << 0x1d + public const int AV_CODEC_FLAG2_SKIP_MANUAL = 0x1 << 0x1d; + /// AV_CODEC_ID_H265 = AV_CODEC_ID_HEVC + public static readonly int AV_CODEC_ID_H265 = 173; + /// AV_CODEC_ID_H266 = AV_CODEC_ID_VVC + public static readonly int AV_CODEC_ID_H266 = 196; + /// AV_CODEC_ID_IFF_BYTERUN1 = AV_CODEC_ID_IFF_ILBM + public static readonly int AV_CODEC_ID_IFF_BYTERUN1 = 136; + /// AV_CODEC_PROP_BITMAP_SUB = 0x1 << 0x10 + public const int AV_CODEC_PROP_BITMAP_SUB = 0x1 << 0x10; + /// AV_CODEC_PROP_INTRA_ONLY = 0x1 << 0x0 + public const int AV_CODEC_PROP_INTRA_ONLY = 0x1 << 0x0; + /// AV_CODEC_PROP_LOSSLESS = 0x1 << 0x2 + public const int AV_CODEC_PROP_LOSSLESS = 0x1 << 0x2; + /// AV_CODEC_PROP_LOSSY = 0x1 << 0x1 + public const int AV_CODEC_PROP_LOSSY = 0x1 << 0x1; + /// AV_CODEC_PROP_REORDER = 0x1 << 0x3 + public const int AV_CODEC_PROP_REORDER = 0x1 << 0x3; + /// AV_CODEC_PROP_TEXT_SUB = 0x1 << 0x11 + public const int AV_CODEC_PROP_TEXT_SUB = 0x1 << 0x11; + // public static av_cold = __attribute__((cold)); + // public static av_const = __attribute__((const)); + /// AV_CPU_FLAG_3DNOW = 0x4 + public const int AV_CPU_FLAG_3DNOW = 0x4; + /// AV_CPU_FLAG_3DNOWEXT = 0x20 + public const int AV_CPU_FLAG_3DNOWEXT = 0x20; + /// AV_CPU_FLAG_AESNI = 0x80000 + public const int AV_CPU_FLAG_AESNI = 0x80000; + /// AV_CPU_FLAG_ALTIVEC = 0x1 + public const int AV_CPU_FLAG_ALTIVEC = 0x1; + /// AV_CPU_FLAG_ARMV5TE = 0x1 << 0x0 + public const int AV_CPU_FLAG_ARMV5TE = 0x1 << 0x0; + /// AV_CPU_FLAG_ARMV6 = 0x1 << 0x1 + public const int AV_CPU_FLAG_ARMV6 = 0x1 << 0x1; + /// AV_CPU_FLAG_ARMV6T2 = 0x1 << 0x2 + public const int AV_CPU_FLAG_ARMV6T2 = 0x1 << 0x2; + /// AV_CPU_FLAG_ARMV8 = 0x1 << 0x6 + public const int AV_CPU_FLAG_ARMV8 = 0x1 << 0x6; + /// AV_CPU_FLAG_ATOM = 0x10000000 + public const int AV_CPU_FLAG_ATOM = 0x10000000; + /// AV_CPU_FLAG_AVX = 0x4000 + public const int AV_CPU_FLAG_AVX = 0x4000; + /// AV_CPU_FLAG_AVX2 = 0x8000 + public const int AV_CPU_FLAG_AVX2 = 0x8000; + /// AV_CPU_FLAG_AVX512 = 0x100000 + public const int AV_CPU_FLAG_AVX512 = 0x100000; + /// AV_CPU_FLAG_AVX512ICL = 0x200000 + public const int AV_CPU_FLAG_AVX512ICL = 0x200000; + /// AV_CPU_FLAG_AVXSLOW = 0x8000000 + public const int AV_CPU_FLAG_AVXSLOW = 0x8000000; + /// AV_CPU_FLAG_BMI1 = 0x20000 + public const int AV_CPU_FLAG_BMI1 = 0x20000; + /// AV_CPU_FLAG_BMI2 = 0x40000 + public const int AV_CPU_FLAG_BMI2 = 0x40000; + /// AV_CPU_FLAG_CMOV = 0x1000 + public const int AV_CPU_FLAG_CMOV = 0x1000; + /// AV_CPU_FLAG_FMA3 = 0x10000 + public const int AV_CPU_FLAG_FMA3 = 0x10000; + /// AV_CPU_FLAG_FMA4 = 0x800 + public const int AV_CPU_FLAG_FMA4 = 0x800; + /// AV_CPU_FLAG_FORCE = 0x80000000U + public const uint AV_CPU_FLAG_FORCE = 0x80000000U; + /// AV_CPU_FLAG_LASX = 0x1 << 0x1 + public const int AV_CPU_FLAG_LASX = 0x1 << 0x1; + /// AV_CPU_FLAG_LSX = 0x1 << 0x0 + public const int AV_CPU_FLAG_LSX = 0x1 << 0x0; + /// AV_CPU_FLAG_MMI = 0x1 << 0x0 + public const int AV_CPU_FLAG_MMI = 0x1 << 0x0; + /// AV_CPU_FLAG_MMX = 0x1 + public const int AV_CPU_FLAG_MMX = 0x1; + /// AV_CPU_FLAG_MMX2 = 0x2 + public const int AV_CPU_FLAG_MMX2 = 0x2; + /// AV_CPU_FLAG_MMXEXT = 0x2 + public const int AV_CPU_FLAG_MMXEXT = 0x2; + /// AV_CPU_FLAG_MSA = 0x1 << 0x1 + public const int AV_CPU_FLAG_MSA = 0x1 << 0x1; + /// AV_CPU_FLAG_NEON = 0x1 << 0x5 + public const int AV_CPU_FLAG_NEON = 0x1 << 0x5; + /// AV_CPU_FLAG_POWER8 = 0x4 + public const int AV_CPU_FLAG_POWER8 = 0x4; + /// AV_CPU_FLAG_SETEND = 0x1 << 0x10 + public const int AV_CPU_FLAG_SETEND = 0x1 << 0x10; + /// AV_CPU_FLAG_SLOW_GATHER = 0x2000000 + public const int AV_CPU_FLAG_SLOW_GATHER = 0x2000000; + /// AV_CPU_FLAG_SSE = 0x8 + public const int AV_CPU_FLAG_SSE = 0x8; + /// AV_CPU_FLAG_SSE2 = 0x10 + public const int AV_CPU_FLAG_SSE2 = 0x10; + /// AV_CPU_FLAG_SSE2SLOW = 0x40000000 + public const int AV_CPU_FLAG_SSE2SLOW = 0x40000000; + /// AV_CPU_FLAG_SSE3 = 0x40 + public const int AV_CPU_FLAG_SSE3 = 0x40; + /// AV_CPU_FLAG_SSE3SLOW = 0x20000000 + public const int AV_CPU_FLAG_SSE3SLOW = 0x20000000; + /// AV_CPU_FLAG_SSE4 = 0x100 + public const int AV_CPU_FLAG_SSE4 = 0x100; + /// AV_CPU_FLAG_SSE42 = 0x200 + public const int AV_CPU_FLAG_SSE42 = 0x200; + /// AV_CPU_FLAG_SSSE3 = 0x80 + public const int AV_CPU_FLAG_SSSE3 = 0x80; + /// AV_CPU_FLAG_SSSE3SLOW = 0x4000000 + public const int AV_CPU_FLAG_SSSE3SLOW = 0x4000000; + /// AV_CPU_FLAG_VFP = 0x1 << 0x3 + public const int AV_CPU_FLAG_VFP = 0x1 << 0x3; + /// AV_CPU_FLAG_VFP_VM = 0x1 << 0x7 + public const int AV_CPU_FLAG_VFP_VM = 0x1 << 0x7; + /// AV_CPU_FLAG_VFPV3 = 0x1 << 0x4 + public const int AV_CPU_FLAG_VFPV3 = 0x1 << 0x4; + /// AV_CPU_FLAG_VSX = 0x2 + public const int AV_CPU_FLAG_VSX = 0x2; + /// AV_CPU_FLAG_XOP = 0x400 + public const int AV_CPU_FLAG_XOP = 0x400; + /// AV_DICT_APPEND = 32 + public const int AV_DICT_APPEND = 0x20; + /// AV_DICT_DONT_OVERWRITE = 16 + public const int AV_DICT_DONT_OVERWRITE = 0x10; + /// AV_DICT_DONT_STRDUP_KEY = 4 + public const int AV_DICT_DONT_STRDUP_KEY = 0x4; + /// AV_DICT_DONT_STRDUP_VAL = 8 + public const int AV_DICT_DONT_STRDUP_VAL = 0x8; + /// AV_DICT_IGNORE_SUFFIX = 2 + public const int AV_DICT_IGNORE_SUFFIX = 0x2; + /// AV_DICT_MATCH_CASE = 1 + public const int AV_DICT_MATCH_CASE = 0x1; + /// AV_DICT_MULTIKEY = 64 + public const int AV_DICT_MULTIKEY = 0x40; + /// AV_DISPOSITION_ATTACHED_PIC = (1 << 10) + public const int AV_DISPOSITION_ATTACHED_PIC = 0x1 << 0xa; + /// AV_DISPOSITION_CAPTIONS = (1 << 16) + public const int AV_DISPOSITION_CAPTIONS = 0x1 << 0x10; + /// AV_DISPOSITION_CLEAN_EFFECTS = (1 << 9) + public const int AV_DISPOSITION_CLEAN_EFFECTS = 0x1 << 0x9; + /// AV_DISPOSITION_COMMENT = (1 << 3) + public const int AV_DISPOSITION_COMMENT = 0x1 << 0x3; + /// AV_DISPOSITION_DEFAULT = (1 << 0) + public const int AV_DISPOSITION_DEFAULT = 0x1 << 0x0; + /// AV_DISPOSITION_DEPENDENT = (1 << 19) + public const int AV_DISPOSITION_DEPENDENT = 0x1 << 0x13; + /// AV_DISPOSITION_DESCRIPTIONS = (1 << 17) + public const int AV_DISPOSITION_DESCRIPTIONS = 0x1 << 0x11; + /// AV_DISPOSITION_DUB = (1 << 1) + public const int AV_DISPOSITION_DUB = 0x1 << 0x1; + /// AV_DISPOSITION_FORCED = (1 << 6) + public const int AV_DISPOSITION_FORCED = 0x1 << 0x6; + /// AV_DISPOSITION_HEARING_IMPAIRED = (1 << 7) + public const int AV_DISPOSITION_HEARING_IMPAIRED = 0x1 << 0x7; + /// AV_DISPOSITION_KARAOKE = (1 << 5) + public const int AV_DISPOSITION_KARAOKE = 0x1 << 0x5; + /// AV_DISPOSITION_LYRICS = (1 << 4) + public const int AV_DISPOSITION_LYRICS = 0x1 << 0x4; + /// AV_DISPOSITION_METADATA = (1 << 18) + public const int AV_DISPOSITION_METADATA = 0x1 << 0x12; + /// AV_DISPOSITION_NON_DIEGETIC = (1 << 12) + public const int AV_DISPOSITION_NON_DIEGETIC = 0x1 << 0xc; + /// AV_DISPOSITION_ORIGINAL = (1 << 2) + public const int AV_DISPOSITION_ORIGINAL = 0x1 << 0x2; + /// AV_DISPOSITION_STILL_IMAGE = (1 << 20) + public const int AV_DISPOSITION_STILL_IMAGE = 0x1 << 0x14; + /// AV_DISPOSITION_TIMED_THUMBNAILS = (1 << 11) + public const int AV_DISPOSITION_TIMED_THUMBNAILS = 0x1 << 0xb; + /// AV_DISPOSITION_VISUAL_IMPAIRED = (1 << 8) + public const int AV_DISPOSITION_VISUAL_IMPAIRED = 0x1 << 0x8; + /// AV_EF_AGGRESSIVE = 0x1 << 0x12 + public const int AV_EF_AGGRESSIVE = 0x1 << 0x12; + /// AV_EF_BITSTREAM = 0x1 << 0x1 + public const int AV_EF_BITSTREAM = 0x1 << 0x1; + /// AV_EF_BUFFER = 0x1 << 0x2 + public const int AV_EF_BUFFER = 0x1 << 0x2; + /// AV_EF_CAREFUL = 0x1 << 0x10 + public const int AV_EF_CAREFUL = 0x1 << 0x10; + /// AV_EF_COMPLIANT = 0x1 << 0x11 + public const int AV_EF_COMPLIANT = 0x1 << 0x11; + /// AV_EF_CRCCHECK = 0x1 << 0x0 + public const int AV_EF_CRCCHECK = 0x1 << 0x0; + /// AV_EF_EXPLODE = 0x1 << 0x3 + public const int AV_EF_EXPLODE = 0x1 << 0x3; + /// AV_EF_IGNORE_ERR = 0x1 << 0xf + public const int AV_EF_IGNORE_ERR = 0x1 << 0xf; + // public static av_err2str = (errnum) av_make_error_string((char[AV_ERROR_MAX_STRING_SIZE]){0}, AV_ERROR_MAX_STRING_SIZE, errnum); + /// AV_ERROR_MAX_STRING_SIZE = 64 + public const int AV_ERROR_MAX_STRING_SIZE = 0x40; + // public static av_extern_inline = inline; + /// AV_FOURCC_MAX_STRING_SIZE = 32 + public const int AV_FOURCC_MAX_STRING_SIZE = 0x20; + // public static av_fourcc2str = (fourcc) av_fourcc_make_string((char[AV_FOURCC_MAX_STRING_SIZE]){0}, fourcc); + /// AV_FRAME_FILENAME_FLAGS_MULTIPLE = 1 + public const int AV_FRAME_FILENAME_FLAGS_MULTIPLE = 0x1; + /// AV_FRAME_FLAG_CORRUPT = (1 << 0) + public const int AV_FRAME_FLAG_CORRUPT = 0x1 << 0x0; + /// AV_FRAME_FLAG_DISCARD = (1 << 2) + public const int AV_FRAME_FLAG_DISCARD = 0x1 << 0x2; + // public static AV_GCC_VERSION_AT_LEAST = x; + // public static AV_GCC_VERSION_AT_MOST = x; + /// AV_GET_BUFFER_FLAG_REF = 0x1 << 0x0 + public const int AV_GET_BUFFER_FLAG_REF = 0x1 << 0x0; + /// AV_GET_ENCODE_BUFFER_FLAG_REF = 0x1 << 0x0 + public const int AV_GET_ENCODE_BUFFER_FLAG_REF = 0x1 << 0x0; + // public static AV_GLUE = (a, b) a ## b; + // public static AV_HAS_BUILTIN = (x)(__has_builtin(x)); + /// AV_HAVE_BIGENDIAN = 0 + public const int AV_HAVE_BIGENDIAN = 0x0; + /// AV_HAVE_FAST_UNALIGNED = 1 + public const int AV_HAVE_FAST_UNALIGNED = 0x1; + /// AV_HWACCEL_CODEC_CAP_EXPERIMENTAL = 0x200 + public const int AV_HWACCEL_CODEC_CAP_EXPERIMENTAL = 0x200; + /// AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH = 0x1 << 0x1 + public const int AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH = 0x1 << 0x1; + /// AV_HWACCEL_FLAG_ALLOW_PROFILE_MISMATCH = 0x1 << 0x2 + public const int AV_HWACCEL_FLAG_ALLOW_PROFILE_MISMATCH = 0x1 << 0x2; + /// AV_HWACCEL_FLAG_IGNORE_LEVEL = 0x1 << 0x0 + public const int AV_HWACCEL_FLAG_IGNORE_LEVEL = 0x1 << 0x0; + /// AV_INPUT_BUFFER_MIN_SIZE = 0x4000 + public const int AV_INPUT_BUFFER_MIN_SIZE = 0x4000; + /// AV_INPUT_BUFFER_PADDING_SIZE = 64 + public const int AV_INPUT_BUFFER_PADDING_SIZE = 0x40; + // public static av_int_list_length = list; + // public static AV_IS_INPUT_DEVICE = (category)((category)(==41) || (category)(==43) || (category)(==45)); + // public static AV_IS_OUTPUT_DEVICE = (category)((category)(==40) || (category)(==42) || (category)(==44)); + // public static AV_JOIN = a; + // public static AV_LOG_C = (x)((x)(<<0x8)); + /// AV_LOG_DEBUG = 48 + public const int AV_LOG_DEBUG = 0x30; + /// AV_LOG_ERROR = 16 + public const int AV_LOG_ERROR = 0x10; + /// AV_LOG_FATAL = 8 + public const int AV_LOG_FATAL = 0x8; + /// AV_LOG_INFO = 32 + public const int AV_LOG_INFO = 0x20; + /// AV_LOG_MAX_OFFSET = (AV_LOG_TRACE - AV_LOG_QUIET) + public const int AV_LOG_MAX_OFFSET = AV_LOG_TRACE - AV_LOG_QUIET; + /// AV_LOG_PANIC = 0 + public const int AV_LOG_PANIC = 0x0; + /// AV_LOG_PRINT_LEVEL = 2 + public const int AV_LOG_PRINT_LEVEL = 0x2; + /// AV_LOG_QUIET = -8 + public const int AV_LOG_QUIET = -0x8; + /// AV_LOG_SKIP_REPEATED = 1 + public const int AV_LOG_SKIP_REPEATED = 0x1; + /// AV_LOG_TRACE = 56 + public const int AV_LOG_TRACE = 0x38; + /// AV_LOG_VERBOSE = 40 + public const int AV_LOG_VERBOSE = 0x28; + /// AV_LOG_WARNING = 24 + public const int AV_LOG_WARNING = 0x18; + // public static av_mod_uintp2 = av_mod_uintp2_c; + // public static AV_NE = be; + // public static av_noinline = __declspec(noinline); + /// AV_NOPTS_VALUE = ((int64_t)UINT64_C(0x8000000000000000)) + public static readonly long AV_NOPTS_VALUE = (long)(UINT64_C(0x8000000000000000L)); + // public static av_noreturn = __attribute__((noreturn)); + // public static AV_NOWARN_DEPRECATED = (code)(_Pragma("GCC diagnostic push")); + /// AV_NUM_DATA_POINTERS = 8 + public const int AV_NUM_DATA_POINTERS = 0x8; + /// AV_OPT_ALLOW_NULL = (1 << 2) + public const int AV_OPT_ALLOW_NULL = 0x1 << 0x2; + /// AV_OPT_FLAG_AUDIO_PARAM = 8 + public const int AV_OPT_FLAG_AUDIO_PARAM = 0x8; + /// AV_OPT_FLAG_BSF_PARAM = (1<<8) + public const int AV_OPT_FLAG_BSF_PARAM = 0x1 << 0x8; + /// AV_OPT_FLAG_CHILD_CONSTS = (1<<18) + public const int AV_OPT_FLAG_CHILD_CONSTS = 0x1 << 0x12; + /// AV_OPT_FLAG_DECODING_PARAM = 2 + public const int AV_OPT_FLAG_DECODING_PARAM = 0x2; + /// AV_OPT_FLAG_DEPRECATED = (1<<17) + public const int AV_OPT_FLAG_DEPRECATED = 0x1 << 0x11; + /// AV_OPT_FLAG_ENCODING_PARAM = 1 + public const int AV_OPT_FLAG_ENCODING_PARAM = 0x1; + /// AV_OPT_FLAG_EXPORT = 64 + public const int AV_OPT_FLAG_EXPORT = 0x40; + /// AV_OPT_FLAG_FILTERING_PARAM = (1<<16) + public const int AV_OPT_FLAG_FILTERING_PARAM = 0x1 << 0x10; + /// AV_OPT_FLAG_READONLY = 128 + public const int AV_OPT_FLAG_READONLY = 0x80; + /// AV_OPT_FLAG_RUNTIME_PARAM = (1<<15) + public const int AV_OPT_FLAG_RUNTIME_PARAM = 0x1 << 0xf; + /// AV_OPT_FLAG_SUBTITLE_PARAM = 32 + public const int AV_OPT_FLAG_SUBTITLE_PARAM = 0x20; + /// AV_OPT_FLAG_VIDEO_PARAM = 16 + public const int AV_OPT_FLAG_VIDEO_PARAM = 0x10; + /// AV_OPT_MULTI_COMPONENT_RANGE = (1 << 12) + public const int AV_OPT_MULTI_COMPONENT_RANGE = 0x1 << 0xc; + /// AV_OPT_SEARCH_CHILDREN = (1 << 0) + public const int AV_OPT_SEARCH_CHILDREN = 0x1 << 0x0; + /// AV_OPT_SEARCH_FAKE_OBJ = (1 << 1) + public const int AV_OPT_SEARCH_FAKE_OBJ = 0x1 << 0x1; + /// AV_OPT_SERIALIZE_OPT_FLAGS_EXACT = 0x00000002 + public const int AV_OPT_SERIALIZE_OPT_FLAGS_EXACT = 0x2; + /// AV_OPT_SERIALIZE_SKIP_DEFAULTS = 0x00000001 + public const int AV_OPT_SERIALIZE_SKIP_DEFAULTS = 0x1; + // public static av_opt_set_int_list = (obj, name, val, term, flags) (av_int_list_length(val, term) > INT_MAX / sizeof(*(val)) ? AVERROR(EINVAL) : av_opt_set_bin(obj, name, (const uint8_t *)(val), av_int_list_length(val, term) * sizeof(*(val)), flags)); + // public static av_parity = av_parity_c; + /// AV_PARSER_PTS_NB = 0x4 + public const int AV_PARSER_PTS_NB = 0x4; + // public static AV_PIX_FMT_0BGR32 = AV_PIX_FMT_NE(0BGR, RGB0); + // public static AV_PIX_FMT_0RGB32 = AV_PIX_FMT_NE(0x0, RGB, BGR0); + // public static AV_PIX_FMT_AYUV64 = AV_PIX_FMT_NE(AYUV64BE, AYUV64LE); + // public static AV_PIX_FMT_BAYER_BGGR16 = AV_PIX_FMT_NE(BAYER_BGGR16BE, BAYER_BGGR16LE); + // public static AV_PIX_FMT_BAYER_GBRG16 = AV_PIX_FMT_NE(BAYER_GBRG16BE, BAYER_GBRG16LE); + // public static AV_PIX_FMT_BAYER_GRBG16 = AV_PIX_FMT_NE(BAYER_GRBG16BE, BAYER_GRBG16LE); + // public static AV_PIX_FMT_BAYER_RGGB16 = AV_PIX_FMT_NE(BAYER_RGGB16BE, BAYER_RGGB16LE); + // public static AV_PIX_FMT_BGR32 = AV_PIX_FMT_NE(ABGR, RGBA); + // public static AV_PIX_FMT_BGR32_1 = AV_PIX_FMT_NE(BGRA, ARGB); + // public static AV_PIX_FMT_BGR444 = AV_PIX_FMT_NE(BGR444BE, BGR444LE); + // public static AV_PIX_FMT_BGR48 = AV_PIX_FMT_NE(BGR48BE, BGR48LE); + // public static AV_PIX_FMT_BGR555 = AV_PIX_FMT_NE(BGR555BE, BGR555LE); + // public static AV_PIX_FMT_BGR565 = AV_PIX_FMT_NE(BGR565BE, BGR565LE); + // public static AV_PIX_FMT_BGRA64 = AV_PIX_FMT_NE(BGRA64BE, BGRA64LE); + /// AV_PIX_FMT_FLAG_ALPHA = 0x1 << 0x7 + public const int AV_PIX_FMT_FLAG_ALPHA = 0x1 << 0x7; + /// AV_PIX_FMT_FLAG_BAYER = 0x1 << 0x8 + public const int AV_PIX_FMT_FLAG_BAYER = 0x1 << 0x8; + /// AV_PIX_FMT_FLAG_BE = 0x1 << 0x0 + public const int AV_PIX_FMT_FLAG_BE = 0x1 << 0x0; + /// AV_PIX_FMT_FLAG_BITSTREAM = 0x1 << 0x2 + public const int AV_PIX_FMT_FLAG_BITSTREAM = 0x1 << 0x2; + /// AV_PIX_FMT_FLAG_FLOAT = 0x1 << 0x9 + public const int AV_PIX_FMT_FLAG_FLOAT = 0x1 << 0x9; + /// AV_PIX_FMT_FLAG_HWACCEL = 0x1 << 0x3 + public const int AV_PIX_FMT_FLAG_HWACCEL = 0x1 << 0x3; + /// AV_PIX_FMT_FLAG_PAL = 0x1 << 0x1 + public const int AV_PIX_FMT_FLAG_PAL = 0x1 << 0x1; + /// AV_PIX_FMT_FLAG_PLANAR = 0x1 << 0x4 + public const int AV_PIX_FMT_FLAG_PLANAR = 0x1 << 0x4; + /// AV_PIX_FMT_FLAG_RGB = 0x1 << 0x5 + public const int AV_PIX_FMT_FLAG_RGB = 0x1 << 0x5; + // public static AV_PIX_FMT_GBRAP10 = AV_PIX_FMT_NE(GBRAP10BE, GBRAP10LE); + // public static AV_PIX_FMT_GBRAP12 = AV_PIX_FMT_NE(GBRAP12BE, GBRAP12LE); + // public static AV_PIX_FMT_GBRAP16 = AV_PIX_FMT_NE(GBRAP16BE, GBRAP16LE); + // public static AV_PIX_FMT_GBRAPF32 = AV_PIX_FMT_NE(GBRAPF32BE, GBRAPF32LE); + // public static AV_PIX_FMT_GBRP10 = AV_PIX_FMT_NE(GBRP10BE, GBRP10LE); + // public static AV_PIX_FMT_GBRP12 = AV_PIX_FMT_NE(GBRP12BE, GBRP12LE); + // public static AV_PIX_FMT_GBRP14 = AV_PIX_FMT_NE(GBRP14BE, GBRP14LE); + // public static AV_PIX_FMT_GBRP16 = AV_PIX_FMT_NE(GBRP16BE, GBRP16LE); + // public static AV_PIX_FMT_GBRP9 = AV_PIX_FMT_NE(GBRP9BE, GBRP9LE); + // public static AV_PIX_FMT_GBRPF32 = AV_PIX_FMT_NE(GBRPF32BE, GBRPF32LE); + // public static AV_PIX_FMT_GRAY10 = AV_PIX_FMT_NE(GRAY10BE, GRAY10LE); + // public static AV_PIX_FMT_GRAY12 = AV_PIX_FMT_NE(GRAY12BE, GRAY12LE); + // public static AV_PIX_FMT_GRAY14 = AV_PIX_FMT_NE(GRAY14BE, GRAY14LE); + // public static AV_PIX_FMT_GRAY16 = AV_PIX_FMT_NE(GRAY16BE, GRAY16LE); + // public static AV_PIX_FMT_GRAY9 = AV_PIX_FMT_NE(GRAY9BE, GRAY9LE); + // public static AV_PIX_FMT_GRAYF32 = AV_PIX_FMT_NE(GRAYF32BE, GRAYF32LE); + // public static AV_PIX_FMT_NE = (be, le) AV_PIX_FMT_##le; + // public static AV_PIX_FMT_NV20 = AV_PIX_FMT_NE(NV20BE, NV20LE); + // public static AV_PIX_FMT_P010 = AV_PIX_FMT_NE(P010BE, P010LE); + // public static AV_PIX_FMT_P016 = AV_PIX_FMT_NE(P016BE, P016LE); + // public static AV_PIX_FMT_P210 = AV_PIX_FMT_NE(P210BE, P210LE); + // public static AV_PIX_FMT_P216 = AV_PIX_FMT_NE(P216BE, P216LE); + // public static AV_PIX_FMT_P410 = AV_PIX_FMT_NE(P410BE, P410LE); + // public static AV_PIX_FMT_P416 = AV_PIX_FMT_NE(P416BE, P416LE); + // public static AV_PIX_FMT_RGB32 = AV_PIX_FMT_NE(ARGB, BGRA); + // public static AV_PIX_FMT_RGB32_1 = AV_PIX_FMT_NE(RGBA, ABGR); + // public static AV_PIX_FMT_RGB444 = AV_PIX_FMT_NE(RGB444BE, RGB444LE); + // public static AV_PIX_FMT_RGB48 = AV_PIX_FMT_NE(RGB48BE, RGB48LE); + // public static AV_PIX_FMT_RGB555 = AV_PIX_FMT_NE(RGB555BE, RGB555LE); + // public static AV_PIX_FMT_RGB565 = AV_PIX_FMT_NE(RGB565BE, RGB565LE); + // public static AV_PIX_FMT_RGBA64 = AV_PIX_FMT_NE(RGBA64BE, RGBA64LE); + // public static AV_PIX_FMT_X2BGR10 = AV_PIX_FMT_NE(X2BGR10BE, X2BGR10LE); + // public static AV_PIX_FMT_X2RGB10 = AV_PIX_FMT_NE(X2RGB10BE, X2RGB10LE); + // public static AV_PIX_FMT_XYZ12 = AV_PIX_FMT_NE(XYZ12BE, XYZ12LE); + // public static AV_PIX_FMT_Y210 = AV_PIX_FMT_NE(Y210BE, Y210LE); + // public static AV_PIX_FMT_YA16 = AV_PIX_FMT_NE(YA16BE, YA16LE); + // public static AV_PIX_FMT_YUV420P10 = AV_PIX_FMT_NE(YUV420P10BE, YUV420P10LE); + // public static AV_PIX_FMT_YUV420P12 = AV_PIX_FMT_NE(YUV420P12BE, YUV420P12LE); + // public static AV_PIX_FMT_YUV420P14 = AV_PIX_FMT_NE(YUV420P14BE, YUV420P14LE); + // public static AV_PIX_FMT_YUV420P16 = AV_PIX_FMT_NE(YUV420P16BE, YUV420P16LE); + // public static AV_PIX_FMT_YUV420P9 = AV_PIX_FMT_NE(YUV420P9BE, YUV420P9LE); + // public static AV_PIX_FMT_YUV422P10 = AV_PIX_FMT_NE(YUV422P10BE, YUV422P10LE); + // public static AV_PIX_FMT_YUV422P12 = AV_PIX_FMT_NE(YUV422P12BE, YUV422P12LE); + // public static AV_PIX_FMT_YUV422P14 = AV_PIX_FMT_NE(YUV422P14BE, YUV422P14LE); + // public static AV_PIX_FMT_YUV422P16 = AV_PIX_FMT_NE(YUV422P16BE, YUV422P16LE); + // public static AV_PIX_FMT_YUV422P9 = AV_PIX_FMT_NE(YUV422P9BE, YUV422P9LE); + // public static AV_PIX_FMT_YUV440P10 = AV_PIX_FMT_NE(YUV440P10BE, YUV440P10LE); + // public static AV_PIX_FMT_YUV440P12 = AV_PIX_FMT_NE(YUV440P12BE, YUV440P12LE); + // public static AV_PIX_FMT_YUV444P10 = AV_PIX_FMT_NE(YUV444P10BE, YUV444P10LE); + // public static AV_PIX_FMT_YUV444P12 = AV_PIX_FMT_NE(YUV444P12BE, YUV444P12LE); + // public static AV_PIX_FMT_YUV444P14 = AV_PIX_FMT_NE(YUV444P14BE, YUV444P14LE); + // public static AV_PIX_FMT_YUV444P16 = AV_PIX_FMT_NE(YUV444P16BE, YUV444P16LE); + // public static AV_PIX_FMT_YUV444P9 = AV_PIX_FMT_NE(YUV444P9BE, YUV444P9LE); + // public static AV_PIX_FMT_YUVA420P10 = AV_PIX_FMT_NE(YUVA420P10BE, YUVA420P10LE); + // public static AV_PIX_FMT_YUVA420P16 = AV_PIX_FMT_NE(YUVA420P16BE, YUVA420P16LE); + // public static AV_PIX_FMT_YUVA420P9 = AV_PIX_FMT_NE(YUVA420P9BE, YUVA420P9LE); + // public static AV_PIX_FMT_YUVA422P10 = AV_PIX_FMT_NE(YUVA422P10BE, YUVA422P10LE); + // public static AV_PIX_FMT_YUVA422P12 = AV_PIX_FMT_NE(YUVA422P12BE, YUVA422P12LE); + // public static AV_PIX_FMT_YUVA422P16 = AV_PIX_FMT_NE(YUVA422P16BE, YUVA422P16LE); + // public static AV_PIX_FMT_YUVA422P9 = AV_PIX_FMT_NE(YUVA422P9BE, YUVA422P9LE); + // public static AV_PIX_FMT_YUVA444P10 = AV_PIX_FMT_NE(YUVA444P10BE, YUVA444P10LE); + // public static AV_PIX_FMT_YUVA444P12 = AV_PIX_FMT_NE(YUVA444P12BE, YUVA444P12LE); + // public static AV_PIX_FMT_YUVA444P16 = AV_PIX_FMT_NE(YUVA444P16BE, YUVA444P16LE); + // public static AV_PIX_FMT_YUVA444P9 = AV_PIX_FMT_NE(YUVA444P9BE, YUVA444P9LE); + /// AV_PKT_DATA_QUALITY_FACTOR = AV_PKT_DATA_QUALITY_STATS + public static readonly int AV_PKT_DATA_QUALITY_FACTOR = 8; + /// AV_PKT_FLAG_CORRUPT = 0x0002 + public const int AV_PKT_FLAG_CORRUPT = 0x2; + /// AV_PKT_FLAG_DISCARD = 0x0004 + public const int AV_PKT_FLAG_DISCARD = 0x4; + /// AV_PKT_FLAG_DISPOSABLE = 0x0010 + public const int AV_PKT_FLAG_DISPOSABLE = 0x10; + /// AV_PKT_FLAG_KEY = 0x0001 + public const int AV_PKT_FLAG_KEY = 0x1; + /// AV_PKT_FLAG_TRUSTED = 0x0008 + public const int AV_PKT_FLAG_TRUSTED = 0x8; + // public static av_popcount = av_popcount_c; + // public static av_popcount64 = av_popcount64_c; + // public static AV_PRAGMA = (s) _Pragma(#s); + // public static av_printf_format = fmtpos; + /// AV_PROGRAM_RUNNING = 1 + public const int AV_PROGRAM_RUNNING = 0x1; + /// AV_PTS_WRAP_ADD_OFFSET = 1 + public const int AV_PTS_WRAP_ADD_OFFSET = 0x1; + /// AV_PTS_WRAP_IGNORE = 0 + public const int AV_PTS_WRAP_IGNORE = 0x0; + /// AV_PTS_WRAP_SUB_OFFSET = -1 + public const int AV_PTS_WRAP_SUB_OFFSET = -0x1; + // public static av_pure = __attribute__((pure)); + // public static av_sat_add32 = av_sat_add32_c; + // public static av_sat_add64 = av_sat_add64_c; + // public static av_sat_dadd32 = av_sat_dadd32_c; + // public static av_sat_dsub32 = av_sat_dsub32_c; + // public static av_sat_sub32 = av_sat_sub32_c; + // public static av_sat_sub64 = av_sat_sub64_c; + // public static AV_STRINGIFY = (s)(AV_TOSTRING(s)); + /// AV_SUBTITLE_FLAG_FORCED = 0x1 + public const int AV_SUBTITLE_FLAG_FORCED = 0x1; + /// AV_TIME_BASE = 1000000 + public const int AV_TIME_BASE = 0xf4240; + // public static AV_TIME_BASE_Q = (AVRational){1, AV_TIME_BASE}; + /// AV_TIMECODE_STR_SIZE = 0x17 + public const int AV_TIMECODE_STR_SIZE = 0x17; + // public static AV_TOSTRING = (s) #s; + // public static av_uninit = (x) x=x; + // public static av_unused = __attribute__((unused)); + // public static av_used = __attribute__((used)); + // public static AV_VERSION = a; + // public static AV_VERSION_DOT = (a, b, c) a ##.## b ##.## c; + // public static AV_VERSION_INT = a; + // public static AV_VERSION_MAJOR = (a)((a)(>>0x10)); + // public static AV_VERSION_MICRO = (a)((a)(&0xff)); + // public static AV_VERSION_MINOR = (a)((a)(&0xff00) >> 0x8); + // public static AVERROR = (e) (-(e)); + /// AVERROR_BSF_NOT_FOUND = FFERRTAG(0xF8,'B','S','F') + public static readonly int AVERROR_BSF_NOT_FOUND = FFERRTAG(0xf8, 'B', 'S', 'F'); + /// AVERROR_BUFFER_TOO_SMALL = FFERRTAG( 'B','U','F','S') + public static readonly int AVERROR_BUFFER_TOO_SMALL = FFERRTAG('B', 'U', 'F', 'S'); + /// AVERROR_BUG = FFERRTAG( 'B','U','G','!') + public static readonly int AVERROR_BUG = FFERRTAG('B', 'U', 'G', '!'); + /// AVERROR_BUG2 = FFERRTAG( 'B','U','G',' ') + public static readonly int AVERROR_BUG2 = FFERRTAG('B', 'U', 'G', ' '); + /// AVERROR_DECODER_NOT_FOUND = FFERRTAG(0xF8,'D','E','C') + public static readonly int AVERROR_DECODER_NOT_FOUND = FFERRTAG(0xf8, 'D', 'E', 'C'); + /// AVERROR_DEMUXER_NOT_FOUND = FFERRTAG(0xF8,'D','E','M') + public static readonly int AVERROR_DEMUXER_NOT_FOUND = FFERRTAG(0xf8, 'D', 'E', 'M'); + /// AVERROR_ENCODER_NOT_FOUND = FFERRTAG(0xF8,'E','N','C') + public static readonly int AVERROR_ENCODER_NOT_FOUND = FFERRTAG(0xf8, 'E', 'N', 'C'); + /// AVERROR_EOF = FFERRTAG( 'E','O','F',' ') + public static readonly int AVERROR_EOF = FFERRTAG('E', 'O', 'F', ' '); + /// AVERROR_EXIT = FFERRTAG( 'E','X','I','T') + public static readonly int AVERROR_EXIT = FFERRTAG('E', 'X', 'I', 'T'); + /// AVERROR_EXPERIMENTAL = (-0x2bb2afa8) + public const int AVERROR_EXPERIMENTAL = -0x2bb2afa8; + /// AVERROR_EXTERNAL = FFERRTAG( 'E','X','T',' ') + public static readonly int AVERROR_EXTERNAL = FFERRTAG('E', 'X', 'T', ' '); + /// AVERROR_FILTER_NOT_FOUND = FFERRTAG(0xF8,'F','I','L') + public static readonly int AVERROR_FILTER_NOT_FOUND = FFERRTAG(0xf8, 'F', 'I', 'L'); + /// AVERROR_HTTP_BAD_REQUEST = FFERRTAG(0xF8,'4','0','0') + public static readonly int AVERROR_HTTP_BAD_REQUEST = FFERRTAG(0xf8, '4', '0', '0'); + /// AVERROR_HTTP_FORBIDDEN = FFERRTAG(0xF8,'4','0','3') + public static readonly int AVERROR_HTTP_FORBIDDEN = FFERRTAG(0xf8, '4', '0', '3'); + /// AVERROR_HTTP_NOT_FOUND = FFERRTAG(0xF8,'4','0','4') + public static readonly int AVERROR_HTTP_NOT_FOUND = FFERRTAG(0xf8, '4', '0', '4'); + /// AVERROR_HTTP_OTHER_4XX = FFERRTAG(0xF8,'4','X','X') + public static readonly int AVERROR_HTTP_OTHER_4XX = FFERRTAG(0xf8, '4', 'X', 'X'); + /// AVERROR_HTTP_SERVER_ERROR = FFERRTAG(0xF8,'5','X','X') + public static readonly int AVERROR_HTTP_SERVER_ERROR = FFERRTAG(0xf8, '5', 'X', 'X'); + /// AVERROR_HTTP_UNAUTHORIZED = FFERRTAG(0xF8,'4','0','1') + public static readonly int AVERROR_HTTP_UNAUTHORIZED = FFERRTAG(0xf8, '4', '0', '1'); + /// AVERROR_INPUT_CHANGED = (-0x636e6701) + public const int AVERROR_INPUT_CHANGED = -0x636e6701; + /// AVERROR_INVALIDDATA = FFERRTAG( 'I','N','D','A') + public static readonly int AVERROR_INVALIDDATA = FFERRTAG('I', 'N', 'D', 'A'); + /// AVERROR_MUXER_NOT_FOUND = FFERRTAG(0xF8,'M','U','X') + public static readonly int AVERROR_MUXER_NOT_FOUND = FFERRTAG(0xf8, 'M', 'U', 'X'); + /// AVERROR_OPTION_NOT_FOUND = FFERRTAG(0xF8,'O','P','T') + public static readonly int AVERROR_OPTION_NOT_FOUND = FFERRTAG(0xf8, 'O', 'P', 'T'); + /// AVERROR_OUTPUT_CHANGED = (-0x636e6702) + public const int AVERROR_OUTPUT_CHANGED = -0x636e6702; + /// AVERROR_PATCHWELCOME = FFERRTAG( 'P','A','W','E') + public static readonly int AVERROR_PATCHWELCOME = FFERRTAG('P', 'A', 'W', 'E'); + /// AVERROR_PROTOCOL_NOT_FOUND = FFERRTAG(0xF8,'P','R','O') + public static readonly int AVERROR_PROTOCOL_NOT_FOUND = FFERRTAG(0xf8, 'P', 'R', 'O'); + /// AVERROR_STREAM_NOT_FOUND = FFERRTAG(0xF8,'S','T','R') + public static readonly int AVERROR_STREAM_NOT_FOUND = FFERRTAG(0xf8, 'S', 'T', 'R'); + /// AVERROR_UNKNOWN = FFERRTAG( 'U','N','K','N') + public static readonly int AVERROR_UNKNOWN = FFERRTAG('U', 'N', 'K', 'N'); + /// AVFILTER_CMD_FLAG_FAST = 0x2 + public const int AVFILTER_CMD_FLAG_FAST = 0x2; + /// AVFILTER_CMD_FLAG_ONE = 0x1 + public const int AVFILTER_CMD_FLAG_ONE = 0x1; + /// AVFILTER_FLAG_DYNAMIC_INPUTS = 0x1 << 0x0 + public const int AVFILTER_FLAG_DYNAMIC_INPUTS = 0x1 << 0x0; + /// AVFILTER_FLAG_DYNAMIC_OUTPUTS = 0x1 << 0x1 + public const int AVFILTER_FLAG_DYNAMIC_OUTPUTS = 0x1 << 0x1; + /// AVFILTER_FLAG_METADATA_ONLY = 0x1 << 0x3 + public const int AVFILTER_FLAG_METADATA_ONLY = 0x1 << 0x3; + /// AVFILTER_FLAG_SLICE_THREADS = 0x1 << 0x2 + public const int AVFILTER_FLAG_SLICE_THREADS = 0x1 << 0x2; + /// AVFILTER_FLAG_SUPPORT_TIMELINE = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL + public const int AVFILTER_FLAG_SUPPORT_TIMELINE = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL; + /// AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC = 0x1 << 0x10 + public const int AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC = 0x1 << 0x10; + /// AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL = 0x1 << 0x11 + public const int AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL = 0x1 << 0x11; + /// AVFILTER_THREAD_SLICE = 0x1 << 0x0 + public const int AVFILTER_THREAD_SLICE = 0x1 << 0x0; + /// AVFMT_ALLOW_FLUSH = 0x10000 + public const int AVFMT_ALLOW_FLUSH = 0x10000; + /// AVFMT_AVOID_NEG_TS_AUTO = -1 + public const int AVFMT_AVOID_NEG_TS_AUTO = -0x1; + /// AVFMT_AVOID_NEG_TS_DISABLED = 0 + public const int AVFMT_AVOID_NEG_TS_DISABLED = 0x0; + /// AVFMT_AVOID_NEG_TS_MAKE_NON_NEGATIVE = 1 + public const int AVFMT_AVOID_NEG_TS_MAKE_NON_NEGATIVE = 0x1; + /// AVFMT_AVOID_NEG_TS_MAKE_ZERO = 2 + public const int AVFMT_AVOID_NEG_TS_MAKE_ZERO = 0x2; + /// AVFMT_EVENT_FLAG_METADATA_UPDATED = 0x0001 + public const int AVFMT_EVENT_FLAG_METADATA_UPDATED = 0x1; + /// AVFMT_EXPERIMENTAL = 0x0004 + public const int AVFMT_EXPERIMENTAL = 0x4; + /// AVFMT_FLAG_AUTO_BSF = 0x200000 + public const int AVFMT_FLAG_AUTO_BSF = 0x200000; + /// AVFMT_FLAG_BITEXACT = 0x0400 + public const int AVFMT_FLAG_BITEXACT = 0x400; + /// AVFMT_FLAG_CUSTOM_IO = 0x0080 + public const int AVFMT_FLAG_CUSTOM_IO = 0x80; + /// AVFMT_FLAG_DISCARD_CORRUPT = 0x0100 + public const int AVFMT_FLAG_DISCARD_CORRUPT = 0x100; + /// AVFMT_FLAG_FAST_SEEK = 0x80000 + public const int AVFMT_FLAG_FAST_SEEK = 0x80000; + /// AVFMT_FLAG_FLUSH_PACKETS = 0x0200 + public const int AVFMT_FLAG_FLUSH_PACKETS = 0x200; + /// AVFMT_FLAG_GENPTS = 0x0001 + public const int AVFMT_FLAG_GENPTS = 0x1; + /// AVFMT_FLAG_IGNDTS = 0x0008 + public const int AVFMT_FLAG_IGNDTS = 0x8; + /// AVFMT_FLAG_IGNIDX = 0x0002 + public const int AVFMT_FLAG_IGNIDX = 0x2; + /// AVFMT_FLAG_NOBUFFER = 0x0040 + public const int AVFMT_FLAG_NOBUFFER = 0x40; + /// AVFMT_FLAG_NOFILLIN = 0x0010 + public const int AVFMT_FLAG_NOFILLIN = 0x10; + /// AVFMT_FLAG_NONBLOCK = 0x0004 + public const int AVFMT_FLAG_NONBLOCK = 0x4; + /// AVFMT_FLAG_NOPARSE = 0x0020 + public const int AVFMT_FLAG_NOPARSE = 0x20; + /// AVFMT_FLAG_PRIV_OPT = 0x20000 + public const int AVFMT_FLAG_PRIV_OPT = 0x20000; + /// AVFMT_FLAG_SHORTEST = 0x100000 + public const int AVFMT_FLAG_SHORTEST = 0x100000; + /// AVFMT_FLAG_SORT_DTS = 0x10000 + public const int AVFMT_FLAG_SORT_DTS = 0x10000; + /// AVFMT_GENERIC_INDEX = 0x0100 + public const int AVFMT_GENERIC_INDEX = 0x100; + /// AVFMT_GLOBALHEADER = 0x0040 + public const int AVFMT_GLOBALHEADER = 0x40; + /// AVFMT_NEEDNUMBER = 0x0002 + public const int AVFMT_NEEDNUMBER = 0x2; + /// AVFMT_NO_BYTE_SEEK = 0x8000 + public const int AVFMT_NO_BYTE_SEEK = 0x8000; + /// AVFMT_NOBINSEARCH = 0x2000 + public const int AVFMT_NOBINSEARCH = 0x2000; + /// AVFMT_NODIMENSIONS = 0x0800 + public const int AVFMT_NODIMENSIONS = 0x800; + /// AVFMT_NOFILE = 0x0001 + public const int AVFMT_NOFILE = 0x1; + /// AVFMT_NOGENSEARCH = 0x4000 + public const int AVFMT_NOGENSEARCH = 0x4000; + /// AVFMT_NOSTREAMS = 0x1000 + public const int AVFMT_NOSTREAMS = 0x1000; + /// AVFMT_NOTIMESTAMPS = 0x0080 + public const int AVFMT_NOTIMESTAMPS = 0x80; + /// AVFMT_SEEK_TO_PTS = 0x4000000 + public const int AVFMT_SEEK_TO_PTS = 0x4000000; + /// AVFMT_SHOW_IDS = 0x0008 + public const int AVFMT_SHOW_IDS = 0x8; + /// AVFMT_TS_DISCONT = 0x0200 + public const int AVFMT_TS_DISCONT = 0x200; + /// AVFMT_TS_NEGATIVE = 0x40000 + public const int AVFMT_TS_NEGATIVE = 0x40000; + /// AVFMT_TS_NONSTRICT = 0x20000 + public const int AVFMT_TS_NONSTRICT = 0x20000; + /// AVFMT_VARIABLE_FPS = 0x0400 + public const int AVFMT_VARIABLE_FPS = 0x400; + /// AVFMTCTX_NOHEADER = 0x0001 + public const int AVFMTCTX_NOHEADER = 0x1; + /// AVFMTCTX_UNSEEKABLE = 0x0002 + public const int AVFMTCTX_UNSEEKABLE = 0x2; + /// AVINDEX_DISCARD_FRAME = 0x0002 + public const int AVINDEX_DISCARD_FRAME = 0x2; + /// AVINDEX_KEYFRAME = 0x0001 + public const int AVINDEX_KEYFRAME = 0x1; + /// AVIO_FLAG_DIRECT = 0x8000 + public const int AVIO_FLAG_DIRECT = 0x8000; + /// AVIO_FLAG_NONBLOCK = 8 + public const int AVIO_FLAG_NONBLOCK = 0x8; + /// AVIO_FLAG_READ = 1 + public const int AVIO_FLAG_READ = 0x1; + /// AVIO_FLAG_READ_WRITE = (AVIO_FLAG_READ|AVIO_FLAG_WRITE) + public const int AVIO_FLAG_READ_WRITE = AVIO_FLAG_READ | AVIO_FLAG_WRITE; + /// AVIO_FLAG_WRITE = 2 + public const int AVIO_FLAG_WRITE = 0x2; + // public static avio_print = s; + /// AVIO_SEEKABLE_NORMAL = (1 << 0) + public const int AVIO_SEEKABLE_NORMAL = 0x1 << 0x0; + /// AVIO_SEEKABLE_TIME = (1 << 1) + public const int AVIO_SEEKABLE_TIME = 0x1 << 0x1; + /// AVPALETTE_COUNT = 256 + public const int AVPALETTE_COUNT = 0x100; + /// AVPALETTE_SIZE = 1024 + public const int AVPALETTE_SIZE = 0x400; + /// AVPROBE_PADDING_SIZE = 32 + public const int AVPROBE_PADDING_SIZE = 0x20; + /// AVPROBE_SCORE_EXTENSION = 50 + public const int AVPROBE_SCORE_EXTENSION = 0x32; + /// AVPROBE_SCORE_MAX = 100 + public const int AVPROBE_SCORE_MAX = 0x64; + /// AVPROBE_SCORE_MIME = 75 + public const int AVPROBE_SCORE_MIME = 0x4b; + /// AVPROBE_SCORE_RETRY = (AVPROBE_SCORE_MAX/4) + public const int AVPROBE_SCORE_RETRY = AVPROBE_SCORE_MAX / 0x4; + /// AVPROBE_SCORE_STREAM_RETRY = (AVPROBE_SCORE_MAX/4-1) + public const int AVPROBE_SCORE_STREAM_RETRY = AVPROBE_SCORE_MAX / 0x4 - 0x1; + /// AVSEEK_FLAG_ANY = 4 + public const int AVSEEK_FLAG_ANY = 0x4; + /// AVSEEK_FLAG_BACKWARD = 1 + public const int AVSEEK_FLAG_BACKWARD = 0x1; + /// AVSEEK_FLAG_BYTE = 2 + public const int AVSEEK_FLAG_BYTE = 0x2; + /// AVSEEK_FLAG_FRAME = 8 + public const int AVSEEK_FLAG_FRAME = 0x8; + /// AVSEEK_FORCE = 0x20000 + public const int AVSEEK_FORCE = 0x20000; + /// AVSEEK_SIZE = 0x10000 + public const int AVSEEK_SIZE = 0x10000; + /// AVSTREAM_EVENT_FLAG_METADATA_UPDATED = 0x0001 + public const int AVSTREAM_EVENT_FLAG_METADATA_UPDATED = 0x1; + /// AVSTREAM_EVENT_FLAG_NEW_PACKETS = (1 << 1) + public const int AVSTREAM_EVENT_FLAG_NEW_PACKETS = 0x1 << 0x1; + /// AVSTREAM_INIT_IN_INIT_OUTPUT = 1 + public const int AVSTREAM_INIT_IN_INIT_OUTPUT = 0x1; + /// AVSTREAM_INIT_IN_WRITE_HEADER = 0 + public const int AVSTREAM_INIT_IN_WRITE_HEADER = 0x0; + // public static AVUNERROR = (e) (-(e)); + // public static DECLARE_ALIGNED = n; + // public static DECLARE_ASM_ALIGNED = n; + // public static DECLARE_ASM_CONST = n; + /// FF_API_AUTO_THREADS = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_AUTO_THREADS = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_AV_FOPEN_UTF8 = (LIBAVUTIL_VERSION_MAJOR < 58) + public const bool FF_API_AV_FOPEN_UTF8 = LIBAVUTIL_VERSION_MAJOR < 0x3a; + /// FF_API_AV_MALLOCZ_ARRAY = (LIBAVUTIL_VERSION_MAJOR < 58) + public const bool FF_API_AV_MALLOCZ_ARRAY = LIBAVUTIL_VERSION_MAJOR < 0x3a; + /// FF_API_AVCTX_TIMEBASE = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_AVCTX_TIMEBASE = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_AVIOCONTEXT_WRITTEN = (LIBAVFORMAT_VERSION_MAJOR < 60) + public const bool FF_API_AVIOCONTEXT_WRITTEN = LIBAVFORMAT_VERSION_MAJOR < 0x3c; + /// FF_API_AVSTREAM_CLASS = (LIBAVFORMAT_VERSION_MAJOR > 59) + public const bool FF_API_AVSTREAM_CLASS = LIBAVFORMAT_VERSION_MAJOR > 0x3b; + /// FF_API_BUFFERSINK_ALLOC = LIBAVFILTER_VERSION_MAJOR < 0x9 + public const bool FF_API_BUFFERSINK_ALLOC = LIBAVFILTER_VERSION_MAJOR < 0x9; + /// FF_API_COLORSPACE_NAME = (LIBAVUTIL_VERSION_MAJOR < 58) + public const bool FF_API_COLORSPACE_NAME = LIBAVUTIL_VERSION_MAJOR < 0x3a; + /// FF_API_COMPUTE_PKT_FIELDS2 = (LIBAVFORMAT_VERSION_MAJOR < 60) + public const bool FF_API_COMPUTE_PKT_FIELDS2 = LIBAVFORMAT_VERSION_MAJOR < 0x3c; + /// FF_API_D2STR = (LIBAVUTIL_VERSION_MAJOR < 58) + public const bool FF_API_D2STR = LIBAVUTIL_VERSION_MAJOR < 0x3a; + /// FF_API_DEBUG_MV = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_DEBUG_MV = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_DECLARE_ALIGNED = (LIBAVUTIL_VERSION_MAJOR < 58) + public const bool FF_API_DECLARE_ALIGNED = LIBAVUTIL_VERSION_MAJOR < 0x3a; + /// FF_API_DEVICE_CAPABILITIES = (LIBAVDEVICE_VERSION_MAJOR < 60) + public const bool FF_API_DEVICE_CAPABILITIES = LIBAVDEVICE_VERSION_MAJOR < 0x3c; + /// FF_API_FIFO_OLD_API = (LIBAVUTIL_VERSION_MAJOR < 58) + public const bool FF_API_FIFO_OLD_API = LIBAVUTIL_VERSION_MAJOR < 0x3a; + /// FF_API_FIFO_PEEK2 = (LIBAVUTIL_VERSION_MAJOR < 58) + public const bool FF_API_FIFO_PEEK2 = LIBAVUTIL_VERSION_MAJOR < 0x3a; + /// FF_API_FLAG_TRUNCATED = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_FLAG_TRUNCATED = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_GET_FRAME_CLASS = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_GET_FRAME_CLASS = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_IDCT_NONE = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_IDCT_NONE = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_INIT_PACKET = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_INIT_PACKET = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_LAVF_PRIV_OPT = (LIBAVFORMAT_VERSION_MAJOR < 60) + public const bool FF_API_LAVF_PRIV_OPT = LIBAVFORMAT_VERSION_MAJOR < 0x3c; + /// FF_API_OLD_CHANNEL_LAYOUT = (LIBAVUTIL_VERSION_MAJOR < 58) + public const bool FF_API_OLD_CHANNEL_LAYOUT = LIBAVUTIL_VERSION_MAJOR < 0x3a; + /// FF_API_OPENH264_CABAC = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_OPENH264_CABAC = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_OPENH264_SLICE_MODE = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_OPENH264_SLICE_MODE = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_PAD_COUNT = LIBAVFILTER_VERSION_MAJOR < 0x9 + public const bool FF_API_PAD_COUNT = LIBAVFILTER_VERSION_MAJOR < 0x9; + /// FF_API_R_FRAME_RATE = 1 + public const int FF_API_R_FRAME_RATE = 0x1; + /// FF_API_SUB_TEXT_FORMAT = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_SUB_TEXT_FORMAT = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_SVTAV1_OPTS = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_SVTAV1_OPTS = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_SWS_PARAM_OPTION = LIBAVFILTER_VERSION_MAJOR < 0x9 + public const bool FF_API_SWS_PARAM_OPTION = LIBAVFILTER_VERSION_MAJOR < 0x9; + /// FF_API_THREAD_SAFE_CALLBACKS = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_THREAD_SAFE_CALLBACKS = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_UNUSED_CODEC_CAPS = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_UNUSED_CODEC_CAPS = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_XVMC = (LIBAVUTIL_VERSION_MAJOR < 58) + public const bool FF_API_XVMC = LIBAVUTIL_VERSION_MAJOR < 0x3a; + // public static FF_ARRAY_ELEMS = (a) (sizeof(a) / sizeof((a)[0])); + /// FF_BUG_AMV = 0x20 + public const int FF_BUG_AMV = 0x20; + /// FF_BUG_AUTODETECT = 0x1 + public const int FF_BUG_AUTODETECT = 0x1; + /// FF_BUG_DC_CLIP = 0x1000 + public const int FF_BUG_DC_CLIP = 0x1000; + /// FF_BUG_DIRECT_BLOCKSIZE = 0x200 + public const int FF_BUG_DIRECT_BLOCKSIZE = 0x200; + /// FF_BUG_EDGE = 0x400 + public const int FF_BUG_EDGE = 0x400; + /// FF_BUG_HPEL_CHROMA = 0x800 + public const int FF_BUG_HPEL_CHROMA = 0x800; + /// FF_BUG_IEDGE = 0x8000 + public const int FF_BUG_IEDGE = 0x8000; + /// FF_BUG_MS = 0x2000 + public const int FF_BUG_MS = 0x2000; + /// FF_BUG_NO_PADDING = 0x10 + public const int FF_BUG_NO_PADDING = 0x10; + /// FF_BUG_QPEL_CHROMA = 0x40 + public const int FF_BUG_QPEL_CHROMA = 0x40; + /// FF_BUG_QPEL_CHROMA2 = 0x100 + public const int FF_BUG_QPEL_CHROMA2 = 0x100; + /// FF_BUG_STD_QPEL = 0x80 + public const int FF_BUG_STD_QPEL = 0x80; + /// FF_BUG_TRUNCATED = 0x4000 + public const int FF_BUG_TRUNCATED = 0x4000; + /// FF_BUG_UMP4 = 0x8 + public const int FF_BUG_UMP4 = 0x8; + /// FF_BUG_XVID_ILACE = 0x4 + public const int FF_BUG_XVID_ILACE = 0x4; + // public static FF_CEIL_RSHIFT = AV_CEIL_RSHIFT; + /// FF_CMP_BIT = 0x5 + public const int FF_CMP_BIT = 0x5; + /// FF_CMP_CHROMA = 0x100 + public const int FF_CMP_CHROMA = 0x100; + /// FF_CMP_DCT = 0x3 + public const int FF_CMP_DCT = 0x3; + /// FF_CMP_DCT264 = 0xe + public const int FF_CMP_DCT264 = 0xe; + /// FF_CMP_DCTMAX = 0xd + public const int FF_CMP_DCTMAX = 0xd; + /// FF_CMP_MEDIAN_SAD = 0xf + public const int FF_CMP_MEDIAN_SAD = 0xf; + /// FF_CMP_NSSE = 0xa + public const int FF_CMP_NSSE = 0xa; + /// FF_CMP_PSNR = 0x4 + public const int FF_CMP_PSNR = 0x4; + /// FF_CMP_RD = 0x6 + public const int FF_CMP_RD = 0x6; + /// FF_CMP_SAD = 0x0 + public const int FF_CMP_SAD = 0x0; + /// FF_CMP_SATD = 0x2 + public const int FF_CMP_SATD = 0x2; + /// FF_CMP_SSE = 0x1 + public const int FF_CMP_SSE = 0x1; + /// FF_CMP_VSAD = 0x8 + public const int FF_CMP_VSAD = 0x8; + /// FF_CMP_VSSE = 0x9 + public const int FF_CMP_VSSE = 0x9; + /// FF_CMP_W53 = 0xb + public const int FF_CMP_W53 = 0xb; + /// FF_CMP_W97 = 0xc + public const int FF_CMP_W97 = 0xc; + /// FF_CMP_ZERO = 0x7 + public const int FF_CMP_ZERO = 0x7; + /// FF_CODEC_PROPERTY_CLOSED_CAPTIONS = 0x2 + public const int FF_CODEC_PROPERTY_CLOSED_CAPTIONS = 0x2; + /// FF_CODEC_PROPERTY_FILM_GRAIN = 0x4 + public const int FF_CODEC_PROPERTY_FILM_GRAIN = 0x4; + /// FF_CODEC_PROPERTY_LOSSLESS = 0x1 + public const int FF_CODEC_PROPERTY_LOSSLESS = 0x1; + /// FF_COMPLIANCE_EXPERIMENTAL = -0x2 + public const int FF_COMPLIANCE_EXPERIMENTAL = -0x2; + /// FF_COMPLIANCE_NORMAL = 0x0 + public const int FF_COMPLIANCE_NORMAL = 0x0; + /// FF_COMPLIANCE_STRICT = 0x1 + public const int FF_COMPLIANCE_STRICT = 0x1; + /// FF_COMPLIANCE_UNOFFICIAL = -0x1 + public const int FF_COMPLIANCE_UNOFFICIAL = -0x1; + /// FF_COMPLIANCE_VERY_STRICT = 0x2 + public const int FF_COMPLIANCE_VERY_STRICT = 0x2; + /// FF_COMPRESSION_DEFAULT = -0x1 + public const int FF_COMPRESSION_DEFAULT = -0x1; + /// FF_DCT_ALTIVEC = 0x5 + public const int FF_DCT_ALTIVEC = 0x5; + /// FF_DCT_AUTO = 0x0 + public const int FF_DCT_AUTO = 0x0; + /// FF_DCT_FAAN = 0x6 + public const int FF_DCT_FAAN = 0x6; + /// FF_DCT_FASTINT = 0x1 + public const int FF_DCT_FASTINT = 0x1; + /// FF_DCT_INT = 0x2 + public const int FF_DCT_INT = 0x2; + /// FF_DCT_MMX = 0x3 + public const int FF_DCT_MMX = 0x3; + /// FF_DEBUG_BITSTREAM = 0x4 + public const int FF_DEBUG_BITSTREAM = 0x4; + /// FF_DEBUG_BUFFERS = 0x8000 + public const int FF_DEBUG_BUFFERS = 0x8000; + /// FF_DEBUG_BUGS = 0x1000 + public const int FF_DEBUG_BUGS = 0x1000; + /// FF_DEBUG_DCT_COEFF = 0x40 + public const int FF_DEBUG_DCT_COEFF = 0x40; + /// FF_DEBUG_ER = 0x400 + public const int FF_DEBUG_ER = 0x400; + /// FF_DEBUG_GREEN_MD = 0x800000 + public const int FF_DEBUG_GREEN_MD = 0x800000; + /// FF_DEBUG_MB_TYPE = 0x8 + public const int FF_DEBUG_MB_TYPE = 0x8; + /// FF_DEBUG_MMCO = 0x800 + public const int FF_DEBUG_MMCO = 0x800; + /// FF_DEBUG_NOMC = 0x1000000 + public const int FF_DEBUG_NOMC = 0x1000000; + /// FF_DEBUG_PICT_INFO = 0x1 + public const int FF_DEBUG_PICT_INFO = 0x1; + /// FF_DEBUG_QP = 0x10 + public const int FF_DEBUG_QP = 0x10; + /// FF_DEBUG_RC = 0x2 + public const int FF_DEBUG_RC = 0x2; + /// FF_DEBUG_SKIP = 0x80 + public const int FF_DEBUG_SKIP = 0x80; + /// FF_DEBUG_STARTCODE = 0x100 + public const int FF_DEBUG_STARTCODE = 0x100; + /// FF_DEBUG_THREADS = 0x10000 + public const int FF_DEBUG_THREADS = 0x10000; + /// FF_DEBUG_VIS_MV_B_BACK = 0x4 + public const int FF_DEBUG_VIS_MV_B_BACK = 0x4; + /// FF_DEBUG_VIS_MV_B_FOR = 0x2 + public const int FF_DEBUG_VIS_MV_B_FOR = 0x2; + /// FF_DEBUG_VIS_MV_P_FOR = 0x1 + public const int FF_DEBUG_VIS_MV_P_FOR = 0x1; + /// FF_DECODE_ERROR_CONCEALMENT_ACTIVE = 4 + public const int FF_DECODE_ERROR_CONCEALMENT_ACTIVE = 0x4; + /// FF_DECODE_ERROR_DECODE_SLICES = 8 + public const int FF_DECODE_ERROR_DECODE_SLICES = 0x8; + /// FF_DECODE_ERROR_INVALID_BITSTREAM = 1 + public const int FF_DECODE_ERROR_INVALID_BITSTREAM = 0x1; + /// FF_DECODE_ERROR_MISSING_REFERENCE = 2 + public const int FF_DECODE_ERROR_MISSING_REFERENCE = 0x2; + /// FF_DXVA2_WORKAROUND_INTEL_CLEARVIDEO = 0x2 + public const int FF_DXVA2_WORKAROUND_INTEL_CLEARVIDEO = 0x2; + /// FF_DXVA2_WORKAROUND_SCALING_LIST_ZIGZAG = 0x1 + public const int FF_DXVA2_WORKAROUND_SCALING_LIST_ZIGZAG = 0x1; + /// FF_EC_DEBLOCK = 0x2 + public const int FF_EC_DEBLOCK = 0x2; + /// FF_EC_FAVOR_INTER = 0x100 + public const int FF_EC_FAVOR_INTER = 0x100; + /// FF_EC_GUESS_MVS = 0x1 + public const int FF_EC_GUESS_MVS = 0x1; + /// FF_FDEBUG_TS = 0x0001 + public const int FF_FDEBUG_TS = 0x1; + /// FF_HLS_TS_OPTIONS = (LIBAVFORMAT_VERSION_MAJOR < 60) + public const bool FF_HLS_TS_OPTIONS = LIBAVFORMAT_VERSION_MAJOR < 0x3c; + /// FF_IDCT_ALTIVEC = 0x8 + public const int FF_IDCT_ALTIVEC = 0x8; + /// FF_IDCT_ARM = 0x7 + public const int FF_IDCT_ARM = 0x7; + /// FF_IDCT_AUTO = 0x0 + public const int FF_IDCT_AUTO = 0x0; + /// FF_IDCT_FAAN = 0x14 + public const int FF_IDCT_FAAN = 0x14; + /// FF_IDCT_INT = 0x1 + public const int FF_IDCT_INT = 0x1; + /// FF_IDCT_NONE = 0x18 + public const int FF_IDCT_NONE = 0x18; + /// FF_IDCT_SIMPLE = 0x2 + public const int FF_IDCT_SIMPLE = 0x2; + /// FF_IDCT_SIMPLEARM = 0xa + public const int FF_IDCT_SIMPLEARM = 0xa; + /// FF_IDCT_SIMPLEARMV5TE = 0x10 + public const int FF_IDCT_SIMPLEARMV5TE = 0x10; + /// FF_IDCT_SIMPLEARMV6 = 0x11 + public const int FF_IDCT_SIMPLEARMV6 = 0x11; + /// FF_IDCT_SIMPLEAUTO = 0x80 + public const int FF_IDCT_SIMPLEAUTO = 0x80; + /// FF_IDCT_SIMPLEMMX = 0x3 + public const int FF_IDCT_SIMPLEMMX = 0x3; + /// FF_IDCT_SIMPLENEON = 0x16 + public const int FF_IDCT_SIMPLENEON = 0x16; + /// FF_IDCT_XVID = 0xe + public const int FF_IDCT_XVID = 0xe; + /// FF_LAMBDA_MAX = (256*128-1) + public const int FF_LAMBDA_MAX = 0x100 * 0x80 - 0x1; + /// FF_LAMBDA_SCALE = (1<<FF_LAMBDA_SHIFT) + public const int FF_LAMBDA_SCALE = 0x1 << FF_LAMBDA_SHIFT; + /// FF_LAMBDA_SHIFT = 7 + public const int FF_LAMBDA_SHIFT = 0x7; + /// FF_LEVEL_UNKNOWN = -0x63 + public const int FF_LEVEL_UNKNOWN = -0x63; + /// FF_LOSS_ALPHA = 0x8 + public const int FF_LOSS_ALPHA = 0x8; + /// FF_LOSS_CHROMA = 0x20 + public const int FF_LOSS_CHROMA = 0x20; + /// FF_LOSS_COLORQUANT = 0x10 + public const int FF_LOSS_COLORQUANT = 0x10; + /// FF_LOSS_COLORSPACE = 0x4 + public const int FF_LOSS_COLORSPACE = 0x4; + /// FF_LOSS_DEPTH = 0x2 + public const int FF_LOSS_DEPTH = 0x2; + /// FF_LOSS_RESOLUTION = 0x1 + public const int FF_LOSS_RESOLUTION = 0x1; + /// FF_MB_DECISION_BITS = 0x1 + public const int FF_MB_DECISION_BITS = 0x1; + /// FF_MB_DECISION_RD = 0x2 + public const int FF_MB_DECISION_RD = 0x2; + /// FF_MB_DECISION_SIMPLE = 0x0 + public const int FF_MB_DECISION_SIMPLE = 0x0; + /// FF_PROFILE_AAC_ELD = 0x26 + public const int FF_PROFILE_AAC_ELD = 0x26; + /// FF_PROFILE_AAC_HE = 0x4 + public const int FF_PROFILE_AAC_HE = 0x4; + /// FF_PROFILE_AAC_HE_V2 = 0x1c + public const int FF_PROFILE_AAC_HE_V2 = 0x1c; + /// FF_PROFILE_AAC_LD = 0x16 + public const int FF_PROFILE_AAC_LD = 0x16; + /// FF_PROFILE_AAC_LOW = 0x1 + public const int FF_PROFILE_AAC_LOW = 0x1; + /// FF_PROFILE_AAC_LTP = 0x3 + public const int FF_PROFILE_AAC_LTP = 0x3; + /// FF_PROFILE_AAC_MAIN = 0x0 + public const int FF_PROFILE_AAC_MAIN = 0x0; + /// FF_PROFILE_AAC_SSR = 0x2 + public const int FF_PROFILE_AAC_SSR = 0x2; + /// FF_PROFILE_ARIB_PROFILE_A = 0x0 + public const int FF_PROFILE_ARIB_PROFILE_A = 0x0; + /// FF_PROFILE_ARIB_PROFILE_C = 0x1 + public const int FF_PROFILE_ARIB_PROFILE_C = 0x1; + /// FF_PROFILE_AV1_HIGH = 0x1 + public const int FF_PROFILE_AV1_HIGH = 0x1; + /// FF_PROFILE_AV1_MAIN = 0x0 + public const int FF_PROFILE_AV1_MAIN = 0x0; + /// FF_PROFILE_AV1_PROFESSIONAL = 0x2 + public const int FF_PROFILE_AV1_PROFESSIONAL = 0x2; + /// FF_PROFILE_DNXHD = 0x0 + public const int FF_PROFILE_DNXHD = 0x0; + /// FF_PROFILE_DNXHR_444 = 0x5 + public const int FF_PROFILE_DNXHR_444 = 0x5; + /// FF_PROFILE_DNXHR_HQ = 0x3 + public const int FF_PROFILE_DNXHR_HQ = 0x3; + /// FF_PROFILE_DNXHR_HQX = 0x4 + public const int FF_PROFILE_DNXHR_HQX = 0x4; + /// FF_PROFILE_DNXHR_LB = 0x1 + public const int FF_PROFILE_DNXHR_LB = 0x1; + /// FF_PROFILE_DNXHR_SQ = 0x2 + public const int FF_PROFILE_DNXHR_SQ = 0x2; + /// FF_PROFILE_DTS = 0x14 + public const int FF_PROFILE_DTS = 0x14; + /// FF_PROFILE_DTS_96_24 = 0x28 + public const int FF_PROFILE_DTS_96_24 = 0x28; + /// FF_PROFILE_DTS_ES = 0x1e + public const int FF_PROFILE_DTS_ES = 0x1e; + /// FF_PROFILE_DTS_EXPRESS = 0x46 + public const int FF_PROFILE_DTS_EXPRESS = 0x46; + /// FF_PROFILE_DTS_HD_HRA = 0x32 + public const int FF_PROFILE_DTS_HD_HRA = 0x32; + /// FF_PROFILE_DTS_HD_MA = 0x3c + public const int FF_PROFILE_DTS_HD_MA = 0x3c; + /// FF_PROFILE_H264_BASELINE = 0x42 + public const int FF_PROFILE_H264_BASELINE = 0x42; + /// FF_PROFILE_H264_CAVLC_444 = 0x2c + public const int FF_PROFILE_H264_CAVLC_444 = 0x2c; + /// FF_PROFILE_H264_CONSTRAINED = 0x1 << 0x9 + public const int FF_PROFILE_H264_CONSTRAINED = 0x1 << 0x9; + /// FF_PROFILE_H264_CONSTRAINED_BASELINE = 0x42 | FF_PROFILE_H264_CONSTRAINED + public const int FF_PROFILE_H264_CONSTRAINED_BASELINE = 0x42 | FF_PROFILE_H264_CONSTRAINED; + /// FF_PROFILE_H264_EXTENDED = 0x58 + public const int FF_PROFILE_H264_EXTENDED = 0x58; + /// FF_PROFILE_H264_HIGH = 0x64 + public const int FF_PROFILE_H264_HIGH = 0x64; + /// FF_PROFILE_H264_HIGH_10 = 0x6e + public const int FF_PROFILE_H264_HIGH_10 = 0x6e; + /// FF_PROFILE_H264_HIGH_10_INTRA = 0x6e | FF_PROFILE_H264_INTRA + public const int FF_PROFILE_H264_HIGH_10_INTRA = 0x6e | FF_PROFILE_H264_INTRA; + /// FF_PROFILE_H264_HIGH_422 = 0x7a + public const int FF_PROFILE_H264_HIGH_422 = 0x7a; + /// FF_PROFILE_H264_HIGH_422_INTRA = 0x7a | FF_PROFILE_H264_INTRA + public const int FF_PROFILE_H264_HIGH_422_INTRA = 0x7a | FF_PROFILE_H264_INTRA; + /// FF_PROFILE_H264_HIGH_444 = 0x90 + public const int FF_PROFILE_H264_HIGH_444 = 0x90; + /// FF_PROFILE_H264_HIGH_444_INTRA = 0xf4 | FF_PROFILE_H264_INTRA + public const int FF_PROFILE_H264_HIGH_444_INTRA = 0xf4 | FF_PROFILE_H264_INTRA; + /// FF_PROFILE_H264_HIGH_444_PREDICTIVE = 0xf4 + public const int FF_PROFILE_H264_HIGH_444_PREDICTIVE = 0xf4; + /// FF_PROFILE_H264_INTRA = 0x1 << 0xb + public const int FF_PROFILE_H264_INTRA = 0x1 << 0xb; + /// FF_PROFILE_H264_MAIN = 0x4d + public const int FF_PROFILE_H264_MAIN = 0x4d; + /// FF_PROFILE_H264_MULTIVIEW_HIGH = 0x76 + public const int FF_PROFILE_H264_MULTIVIEW_HIGH = 0x76; + /// FF_PROFILE_H264_STEREO_HIGH = 0x80 + public const int FF_PROFILE_H264_STEREO_HIGH = 0x80; + /// FF_PROFILE_HEVC_MAIN = 0x1 + public const int FF_PROFILE_HEVC_MAIN = 0x1; + /// FF_PROFILE_HEVC_MAIN_10 = 0x2 + public const int FF_PROFILE_HEVC_MAIN_10 = 0x2; + /// FF_PROFILE_HEVC_MAIN_STILL_PICTURE = 0x3 + public const int FF_PROFILE_HEVC_MAIN_STILL_PICTURE = 0x3; + /// FF_PROFILE_HEVC_REXT = 0x4 + public const int FF_PROFILE_HEVC_REXT = 0x4; + /// FF_PROFILE_JPEG2000_CSTREAM_NO_RESTRICTION = 0x8000 + public const int FF_PROFILE_JPEG2000_CSTREAM_NO_RESTRICTION = 0x8000; + /// FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_0 = 0x1 + public const int FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_0 = 0x1; + /// FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_1 = 0x2 + public const int FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_1 = 0x2; + /// FF_PROFILE_JPEG2000_DCINEMA_2K = 0x3 + public const int FF_PROFILE_JPEG2000_DCINEMA_2K = 0x3; + /// FF_PROFILE_JPEG2000_DCINEMA_4K = 0x4 + public const int FF_PROFILE_JPEG2000_DCINEMA_4K = 0x4; + /// FF_PROFILE_KLVA_ASYNC = 0x1 + public const int FF_PROFILE_KLVA_ASYNC = 0x1; + /// FF_PROFILE_KLVA_SYNC = 0x0 + public const int FF_PROFILE_KLVA_SYNC = 0x0; + /// FF_PROFILE_MJPEG_HUFFMAN_BASELINE_DCT = 0xc0 + public const int FF_PROFILE_MJPEG_HUFFMAN_BASELINE_DCT = 0xc0; + /// FF_PROFILE_MJPEG_HUFFMAN_EXTENDED_SEQUENTIAL_DCT = 0xc1 + public const int FF_PROFILE_MJPEG_HUFFMAN_EXTENDED_SEQUENTIAL_DCT = 0xc1; + /// FF_PROFILE_MJPEG_HUFFMAN_LOSSLESS = 0xc3 + public const int FF_PROFILE_MJPEG_HUFFMAN_LOSSLESS = 0xc3; + /// FF_PROFILE_MJPEG_HUFFMAN_PROGRESSIVE_DCT = 0xc2 + public const int FF_PROFILE_MJPEG_HUFFMAN_PROGRESSIVE_DCT = 0xc2; + /// FF_PROFILE_MJPEG_JPEG_LS = 0xf7 + public const int FF_PROFILE_MJPEG_JPEG_LS = 0xf7; + /// FF_PROFILE_MPEG2_422 = 0x0 + public const int FF_PROFILE_MPEG2_422 = 0x0; + /// FF_PROFILE_MPEG2_AAC_HE = 0x83 + public const int FF_PROFILE_MPEG2_AAC_HE = 0x83; + /// FF_PROFILE_MPEG2_AAC_LOW = 0x80 + public const int FF_PROFILE_MPEG2_AAC_LOW = 0x80; + /// FF_PROFILE_MPEG2_HIGH = 0x1 + public const int FF_PROFILE_MPEG2_HIGH = 0x1; + /// FF_PROFILE_MPEG2_MAIN = 0x4 + public const int FF_PROFILE_MPEG2_MAIN = 0x4; + /// FF_PROFILE_MPEG2_SIMPLE = 0x5 + public const int FF_PROFILE_MPEG2_SIMPLE = 0x5; + /// FF_PROFILE_MPEG2_SNR_SCALABLE = 0x3 + public const int FF_PROFILE_MPEG2_SNR_SCALABLE = 0x3; + /// FF_PROFILE_MPEG2_SS = 0x2 + public const int FF_PROFILE_MPEG2_SS = 0x2; + /// FF_PROFILE_MPEG4_ADVANCED_CODING = 0xb + public const int FF_PROFILE_MPEG4_ADVANCED_CODING = 0xb; + /// FF_PROFILE_MPEG4_ADVANCED_CORE = 0xc + public const int FF_PROFILE_MPEG4_ADVANCED_CORE = 0xc; + /// FF_PROFILE_MPEG4_ADVANCED_REAL_TIME = 0x9 + public const int FF_PROFILE_MPEG4_ADVANCED_REAL_TIME = 0x9; + /// FF_PROFILE_MPEG4_ADVANCED_SCALABLE_TEXTURE = 0xd + public const int FF_PROFILE_MPEG4_ADVANCED_SCALABLE_TEXTURE = 0xd; + /// FF_PROFILE_MPEG4_ADVANCED_SIMPLE = 0xf + public const int FF_PROFILE_MPEG4_ADVANCED_SIMPLE = 0xf; + /// FF_PROFILE_MPEG4_BASIC_ANIMATED_TEXTURE = 0x7 + public const int FF_PROFILE_MPEG4_BASIC_ANIMATED_TEXTURE = 0x7; + /// FF_PROFILE_MPEG4_CORE = 0x2 + public const int FF_PROFILE_MPEG4_CORE = 0x2; + /// FF_PROFILE_MPEG4_CORE_SCALABLE = 0xa + public const int FF_PROFILE_MPEG4_CORE_SCALABLE = 0xa; + /// FF_PROFILE_MPEG4_HYBRID = 0x8 + public const int FF_PROFILE_MPEG4_HYBRID = 0x8; + /// FF_PROFILE_MPEG4_MAIN = 0x3 + public const int FF_PROFILE_MPEG4_MAIN = 0x3; + /// FF_PROFILE_MPEG4_N_BIT = 0x4 + public const int FF_PROFILE_MPEG4_N_BIT = 0x4; + /// FF_PROFILE_MPEG4_SCALABLE_TEXTURE = 0x5 + public const int FF_PROFILE_MPEG4_SCALABLE_TEXTURE = 0x5; + /// FF_PROFILE_MPEG4_SIMPLE = 0x0 + public const int FF_PROFILE_MPEG4_SIMPLE = 0x0; + /// FF_PROFILE_MPEG4_SIMPLE_FACE_ANIMATION = 0x6 + public const int FF_PROFILE_MPEG4_SIMPLE_FACE_ANIMATION = 0x6; + /// FF_PROFILE_MPEG4_SIMPLE_SCALABLE = 0x1 + public const int FF_PROFILE_MPEG4_SIMPLE_SCALABLE = 0x1; + /// FF_PROFILE_MPEG4_SIMPLE_STUDIO = 0xe + public const int FF_PROFILE_MPEG4_SIMPLE_STUDIO = 0xe; + /// FF_PROFILE_PRORES_4444 = 0x4 + public const int FF_PROFILE_PRORES_4444 = 0x4; + /// FF_PROFILE_PRORES_HQ = 0x3 + public const int FF_PROFILE_PRORES_HQ = 0x3; + /// FF_PROFILE_PRORES_LT = 0x1 + public const int FF_PROFILE_PRORES_LT = 0x1; + /// FF_PROFILE_PRORES_PROXY = 0x0 + public const int FF_PROFILE_PRORES_PROXY = 0x0; + /// FF_PROFILE_PRORES_STANDARD = 0x2 + public const int FF_PROFILE_PRORES_STANDARD = 0x2; + /// FF_PROFILE_PRORES_XQ = 0x5 + public const int FF_PROFILE_PRORES_XQ = 0x5; + /// FF_PROFILE_RESERVED = -0x64 + public const int FF_PROFILE_RESERVED = -0x64; + /// FF_PROFILE_SBC_MSBC = 0x1 + public const int FF_PROFILE_SBC_MSBC = 0x1; + /// FF_PROFILE_UNKNOWN = -0x63 + public const int FF_PROFILE_UNKNOWN = -0x63; + /// FF_PROFILE_VC1_ADVANCED = 0x3 + public const int FF_PROFILE_VC1_ADVANCED = 0x3; + /// FF_PROFILE_VC1_COMPLEX = 0x2 + public const int FF_PROFILE_VC1_COMPLEX = 0x2; + /// FF_PROFILE_VC1_MAIN = 0x1 + public const int FF_PROFILE_VC1_MAIN = 0x1; + /// FF_PROFILE_VC1_SIMPLE = 0x0 + public const int FF_PROFILE_VC1_SIMPLE = 0x0; + /// FF_PROFILE_VP9_0 = 0x0 + public const int FF_PROFILE_VP9_0 = 0x0; + /// FF_PROFILE_VP9_1 = 0x1 + public const int FF_PROFILE_VP9_1 = 0x1; + /// FF_PROFILE_VP9_2 = 0x2 + public const int FF_PROFILE_VP9_2 = 0x2; + /// FF_PROFILE_VP9_3 = 0x3 + public const int FF_PROFILE_VP9_3 = 0x3; + /// FF_PROFILE_VVC_MAIN_10 = 0x1 + public const int FF_PROFILE_VVC_MAIN_10 = 0x1; + /// FF_PROFILE_VVC_MAIN_10_444 = 0x21 + public const int FF_PROFILE_VVC_MAIN_10_444 = 0x21; + /// FF_QP2LAMBDA = 118 + public const int FF_QP2LAMBDA = 0x76; + /// FF_QUALITY_SCALE = FF_LAMBDA_SCALE + public const int FF_QUALITY_SCALE = FF_LAMBDA_SCALE; + /// FF_SUB_CHARENC_MODE_AUTOMATIC = 0x0 + public const int FF_SUB_CHARENC_MODE_AUTOMATIC = 0x0; + /// FF_SUB_CHARENC_MODE_DO_NOTHING = -0x1 + public const int FF_SUB_CHARENC_MODE_DO_NOTHING = -0x1; + /// FF_SUB_CHARENC_MODE_IGNORE = 0x2 + public const int FF_SUB_CHARENC_MODE_IGNORE = 0x2; + /// FF_SUB_CHARENC_MODE_PRE_DECODER = 0x1 + public const int FF_SUB_CHARENC_MODE_PRE_DECODER = 0x1; + /// FF_SUB_TEXT_FMT_ASS = 0x0 + public const int FF_SUB_TEXT_FMT_ASS = 0x0; + /// FF_THREAD_FRAME = 0x1 + public const int FF_THREAD_FRAME = 0x1; + /// FF_THREAD_SLICE = 0x2 + public const int FF_THREAD_SLICE = 0x2; + // public static FFABS = (a) ((a) >= 0 ? (a) : (-(a))); + // public static FFABS64U = (a) ((a) <= 0 ? -(uint64_t)(a) : (uint64_t)(a)); + // public static FFABSU = (a) ((a) <= 0 ? -(unsigned)(a) : (unsigned)(a)); + // public static FFALIGN = x; + // public static FFDIFFSIGN = x; + // public static FFERRTAG = a; + // public static FFMAX = (a,b) ((a) > (b) ? (a) : (b)); + // public static FFMAX3 = a; + // public static FFMIN = (a,b) ((a) > (b) ? (b) : (a)); + // public static FFMIN3 = a; + // public static FFNABS = (a) ((a) <= 0 ? (a) : (-(a))); + // public static FFSIGN = (a) ((a) > 0 ? 1 : -1); + // public static FFSWAP = (type,a,b) do{type SWAP_tmp= b; b= a; a= SWAP_tmp;}while(0); + // public static FFUDIV = (a,b) (((a)>0 ?(a):(a)-(b)+1) / (b)); + // public static FFUMOD = a; + // public static GET_UTF16 = (val, GET_16BIT, ERROR)val = (GET_16BIT);{unsigned int hi = val - 0xD800;if (hi < 0x800) {val = (GET_16BIT) - 0xDC00;if (val > 0x3FFU || hi > 0x3FFU){ERROR}val += (hi<<10) + 0x10000;}}; + // public static GET_UTF8 = (val, GET_BYTE, ERROR)val= (GET_BYTE);{uint32_t top = (val & 128) >> 1;if ((val & 0xc0) == 0x80 || val >= 0xFE){ERROR}while (val & top) {unsigned int tmp = (GET_BYTE) - 128;if(tmp>>6){ERROR}val= (val<<6) + tmp;top <<= 5;}val &= (top << 1) - 1;}; + /// LIBAVCODEC_BUILD = LIBAVCODEC_VERSION_INT + public static readonly int LIBAVCODEC_BUILD = LIBAVCODEC_VERSION_INT; + /// LIBAVCODEC_IDENT = "Lavc" + public const string LIBAVCODEC_IDENT = "Lavc"; + /// LIBAVCODEC_VERSION = AV_VERSION(LIBAVCODEC_VERSION_MAJOR, LIBAVCODEC_VERSION_MINOR, LIBAVCODEC_VERSION_MICRO) + public static readonly string LIBAVCODEC_VERSION = AV_VERSION(LIBAVCODEC_VERSION_MAJOR, LIBAVCODEC_VERSION_MINOR, LIBAVCODEC_VERSION_MICRO); + /// LIBAVCODEC_VERSION_INT = AV_VERSION_INT(LIBAVCODEC_VERSION_MAJOR, LIBAVCODEC_VERSION_MINOR, LIBAVCODEC_VERSION_MICRO) + public static readonly int LIBAVCODEC_VERSION_INT = AV_VERSION_INT(LIBAVCODEC_VERSION_MAJOR, LIBAVCODEC_VERSION_MINOR, LIBAVCODEC_VERSION_MICRO); + /// LIBAVCODEC_VERSION_MAJOR = 59 + public const int LIBAVCODEC_VERSION_MAJOR = 0x3b; + /// LIBAVCODEC_VERSION_MICRO = 0x64 + public const int LIBAVCODEC_VERSION_MICRO = 0x64; + /// LIBAVCODEC_VERSION_MINOR = 0x25 + public const int LIBAVCODEC_VERSION_MINOR = 0x25; + /// LIBAVDEVICE_BUILD = LIBAVDEVICE_VERSION_INT + public static readonly int LIBAVDEVICE_BUILD = LIBAVDEVICE_VERSION_INT; + /// LIBAVDEVICE_IDENT = "Lavd" AV_STRINGIFY(LIBAVDEVICE_VERSION) + public const string LIBAVDEVICE_IDENT = "Lavd"; + /// LIBAVDEVICE_VERSION = AV_VERSION(LIBAVDEVICE_VERSION_MAJOR, LIBAVDEVICE_VERSION_MINOR, LIBAVDEVICE_VERSION_MICRO) + public static readonly string LIBAVDEVICE_VERSION = AV_VERSION(LIBAVDEVICE_VERSION_MAJOR, LIBAVDEVICE_VERSION_MINOR, LIBAVDEVICE_VERSION_MICRO); + /// LIBAVDEVICE_VERSION_INT = AV_VERSION_INT(LIBAVDEVICE_VERSION_MAJOR, LIBAVDEVICE_VERSION_MINOR, LIBAVDEVICE_VERSION_MICRO) + public static readonly int LIBAVDEVICE_VERSION_INT = AV_VERSION_INT(LIBAVDEVICE_VERSION_MAJOR, LIBAVDEVICE_VERSION_MINOR, LIBAVDEVICE_VERSION_MICRO); + /// LIBAVDEVICE_VERSION_MAJOR = 59 + public const int LIBAVDEVICE_VERSION_MAJOR = 0x3b; + /// LIBAVDEVICE_VERSION_MICRO = 100 + public const int LIBAVDEVICE_VERSION_MICRO = 0x64; + /// LIBAVDEVICE_VERSION_MINOR = 7 + public const int LIBAVDEVICE_VERSION_MINOR = 0x7; + /// LIBAVFILTER_BUILD = LIBAVFILTER_VERSION_INT + public static readonly int LIBAVFILTER_BUILD = LIBAVFILTER_VERSION_INT; + /// LIBAVFILTER_IDENT = "Lavfi" + public const string LIBAVFILTER_IDENT = "Lavfi"; + /// LIBAVFILTER_VERSION = AV_VERSION(LIBAVFILTER_VERSION_MAJOR, LIBAVFILTER_VERSION_MINOR, LIBAVFILTER_VERSION_MICRO) + public static readonly string LIBAVFILTER_VERSION = AV_VERSION(LIBAVFILTER_VERSION_MAJOR, LIBAVFILTER_VERSION_MINOR, LIBAVFILTER_VERSION_MICRO); + /// LIBAVFILTER_VERSION_INT = AV_VERSION_INT(LIBAVFILTER_VERSION_MAJOR, LIBAVFILTER_VERSION_MINOR, LIBAVFILTER_VERSION_MICRO) + public static readonly int LIBAVFILTER_VERSION_INT = AV_VERSION_INT(LIBAVFILTER_VERSION_MAJOR, LIBAVFILTER_VERSION_MINOR, LIBAVFILTER_VERSION_MICRO); + /// LIBAVFILTER_VERSION_MAJOR = 0x8 + public const int LIBAVFILTER_VERSION_MAJOR = 0x8; + /// LIBAVFILTER_VERSION_MICRO = 0x64 + public const int LIBAVFILTER_VERSION_MICRO = 0x64; + /// LIBAVFILTER_VERSION_MINOR = 0x2c + public const int LIBAVFILTER_VERSION_MINOR = 0x2c; + /// LIBAVFORMAT_BUILD = LIBAVFORMAT_VERSION_INT + public static readonly int LIBAVFORMAT_BUILD = LIBAVFORMAT_VERSION_INT; + /// LIBAVFORMAT_IDENT = "Lavf" AV_STRINGIFY(LIBAVFORMAT_VERSION) + public const string LIBAVFORMAT_IDENT = "Lavf"; + /// LIBAVFORMAT_VERSION = AV_VERSION(LIBAVFORMAT_VERSION_MAJOR, LIBAVFORMAT_VERSION_MINOR, LIBAVFORMAT_VERSION_MICRO) + public static readonly string LIBAVFORMAT_VERSION = AV_VERSION(LIBAVFORMAT_VERSION_MAJOR, LIBAVFORMAT_VERSION_MINOR, LIBAVFORMAT_VERSION_MICRO); + /// LIBAVFORMAT_VERSION_INT = AV_VERSION_INT(LIBAVFORMAT_VERSION_MAJOR, LIBAVFORMAT_VERSION_MINOR, LIBAVFORMAT_VERSION_MICRO) + public static readonly int LIBAVFORMAT_VERSION_INT = AV_VERSION_INT(LIBAVFORMAT_VERSION_MAJOR, LIBAVFORMAT_VERSION_MINOR, LIBAVFORMAT_VERSION_MICRO); + /// LIBAVFORMAT_VERSION_MAJOR = 59 + public const int LIBAVFORMAT_VERSION_MAJOR = 0x3b; + /// LIBAVFORMAT_VERSION_MICRO = 100 + public const int LIBAVFORMAT_VERSION_MICRO = 0x64; + /// LIBAVFORMAT_VERSION_MINOR = 27 + public const int LIBAVFORMAT_VERSION_MINOR = 0x1b; + /// LIBAVUTIL_BUILD = LIBAVUTIL_VERSION_INT + public static readonly int LIBAVUTIL_BUILD = LIBAVUTIL_VERSION_INT; + /// LIBAVUTIL_IDENT = "Lavu" AV_STRINGIFY(LIBAVUTIL_VERSION) + public const string LIBAVUTIL_IDENT = "Lavu"; + /// LIBAVUTIL_VERSION = AV_VERSION(LIBAVUTIL_VERSION_MAJOR, LIBAVUTIL_VERSION_MINOR, LIBAVUTIL_VERSION_MICRO) + public static readonly string LIBAVUTIL_VERSION = AV_VERSION(LIBAVUTIL_VERSION_MAJOR, LIBAVUTIL_VERSION_MINOR, LIBAVUTIL_VERSION_MICRO); + /// LIBAVUTIL_VERSION_INT = AV_VERSION_INT(LIBAVUTIL_VERSION_MAJOR, LIBAVUTIL_VERSION_MINOR, LIBAVUTIL_VERSION_MICRO) + public static readonly int LIBAVUTIL_VERSION_INT = AV_VERSION_INT(LIBAVUTIL_VERSION_MAJOR, LIBAVUTIL_VERSION_MINOR, LIBAVUTIL_VERSION_MICRO); + /// LIBAVUTIL_VERSION_MAJOR = 57 + public const int LIBAVUTIL_VERSION_MAJOR = 0x39; + /// LIBAVUTIL_VERSION_MICRO = 100 + public const int LIBAVUTIL_VERSION_MICRO = 0x64; + /// LIBAVUTIL_VERSION_MINOR = 28 + public const int LIBAVUTIL_VERSION_MINOR = 0x1c; + /// LIBPOSTPROC_BUILD = LIBPOSTPROC_VERSION_INT + public static readonly int LIBPOSTPROC_BUILD = LIBPOSTPROC_VERSION_INT; + /// LIBPOSTPROC_IDENT = "postproc" + public const string LIBPOSTPROC_IDENT = "postproc"; + /// LIBPOSTPROC_VERSION = AV_VERSION(LIBPOSTPROC_VERSION_MAJOR, LIBPOSTPROC_VERSION_MINOR, LIBPOSTPROC_VERSION_MICRO) + public static readonly string LIBPOSTPROC_VERSION = AV_VERSION(LIBPOSTPROC_VERSION_MAJOR, LIBPOSTPROC_VERSION_MINOR, LIBPOSTPROC_VERSION_MICRO); + /// LIBPOSTPROC_VERSION_INT = AV_VERSION_INT(LIBPOSTPROC_VERSION_MAJOR, LIBPOSTPROC_VERSION_MINOR, LIBPOSTPROC_VERSION_MICRO) + public static readonly int LIBPOSTPROC_VERSION_INT = AV_VERSION_INT(LIBPOSTPROC_VERSION_MAJOR, LIBPOSTPROC_VERSION_MINOR, LIBPOSTPROC_VERSION_MICRO); + /// LIBPOSTPROC_VERSION_MAJOR = 0x38 + public const int LIBPOSTPROC_VERSION_MAJOR = 0x38; + /// LIBPOSTPROC_VERSION_MICRO = 0x64 + public const int LIBPOSTPROC_VERSION_MICRO = 0x64; + /// LIBPOSTPROC_VERSION_MINOR = 0x6 + public const int LIBPOSTPROC_VERSION_MINOR = 0x6; + /// LIBSWRESAMPLE_BUILD = LIBSWRESAMPLE_VERSION_INT + public static readonly int LIBSWRESAMPLE_BUILD = LIBSWRESAMPLE_VERSION_INT; + /// LIBSWRESAMPLE_IDENT = "SwR" + public const string LIBSWRESAMPLE_IDENT = "SwR"; + /// LIBSWRESAMPLE_VERSION = AV_VERSION(LIBSWRESAMPLE_VERSION_MAJOR, LIBSWRESAMPLE_VERSION_MINOR, LIBSWRESAMPLE_VERSION_MICRO) + public static readonly string LIBSWRESAMPLE_VERSION = AV_VERSION(LIBSWRESAMPLE_VERSION_MAJOR, LIBSWRESAMPLE_VERSION_MINOR, LIBSWRESAMPLE_VERSION_MICRO); + /// LIBSWRESAMPLE_VERSION_INT = AV_VERSION_INT(LIBSWRESAMPLE_VERSION_MAJOR, LIBSWRESAMPLE_VERSION_MINOR, LIBSWRESAMPLE_VERSION_MICRO) + public static readonly int LIBSWRESAMPLE_VERSION_INT = AV_VERSION_INT(LIBSWRESAMPLE_VERSION_MAJOR, LIBSWRESAMPLE_VERSION_MINOR, LIBSWRESAMPLE_VERSION_MICRO); + /// LIBSWRESAMPLE_VERSION_MAJOR = 0x4 + public const int LIBSWRESAMPLE_VERSION_MAJOR = 0x4; + /// LIBSWRESAMPLE_VERSION_MICRO = 0x64 + public const int LIBSWRESAMPLE_VERSION_MICRO = 0x64; + /// LIBSWRESAMPLE_VERSION_MINOR = 0x7 + public const int LIBSWRESAMPLE_VERSION_MINOR = 0x7; + /// LIBSWSCALE_BUILD = LIBSWSCALE_VERSION_INT + public static readonly int LIBSWSCALE_BUILD = LIBSWSCALE_VERSION_INT; + /// LIBSWSCALE_IDENT = "SwS" + public const string LIBSWSCALE_IDENT = "SwS"; + /// LIBSWSCALE_VERSION = AV_VERSION(LIBSWSCALE_VERSION_MAJOR, LIBSWSCALE_VERSION_MINOR, LIBSWSCALE_VERSION_MICRO) + public static readonly string LIBSWSCALE_VERSION = AV_VERSION(LIBSWSCALE_VERSION_MAJOR, LIBSWSCALE_VERSION_MINOR, LIBSWSCALE_VERSION_MICRO); + /// LIBSWSCALE_VERSION_INT = AV_VERSION_INT(LIBSWSCALE_VERSION_MAJOR, LIBSWSCALE_VERSION_MINOR, LIBSWSCALE_VERSION_MICRO) + public static readonly int LIBSWSCALE_VERSION_INT = AV_VERSION_INT(LIBSWSCALE_VERSION_MAJOR, LIBSWSCALE_VERSION_MINOR, LIBSWSCALE_VERSION_MICRO); + /// LIBSWSCALE_VERSION_MAJOR = 0x6 + public const int LIBSWSCALE_VERSION_MAJOR = 0x6; + /// LIBSWSCALE_VERSION_MICRO = 0x64 + public const int LIBSWSCALE_VERSION_MICRO = 0x64; + /// LIBSWSCALE_VERSION_MINOR = 0x7 + public const int LIBSWSCALE_VERSION_MINOR = 0x7; + /// M_E = 2.7182818284590452354 + public const double M_E = 2.718281828459045D; + /// M_LN10 = 2.30258509299404568402 + public const double M_LN10 = 2.302585092994046D; + /// M_LN2 = 0.69314718055994530942 + public const double M_LN2 = 0.6931471805599453D; + /// M_LOG2_10 = 3.32192809488736234787 + public const double M_LOG2_10 = 3.321928094887362D; + /// M_PHI = 1.61803398874989484820 + public const double M_PHI = 1.618033988749895D; + /// M_PI = 3.14159265358979323846 + public const double M_PI = 3.141592653589793D; + /// M_PI_2 = 1.57079632679489661923 + public const double M_PI_2 = 1.5707963267948966D; + /// M_SQRT1_2 = 0.70710678118654752440 + public const double M_SQRT1_2 = 0.7071067811865476D; + /// M_SQRT2 = 1.41421356237309504880 + public const double M_SQRT2 = 1.4142135623730951D; + // public static MKBETAG = a; + // public static MKTAG = a; + /// PARSER_FLAG_COMPLETE_FRAMES = 0x1 + public const int PARSER_FLAG_COMPLETE_FRAMES = 0x1; + /// PARSER_FLAG_FETCHED_OFFSET = 0x4 + public const int PARSER_FLAG_FETCHED_OFFSET = 0x4; + /// PARSER_FLAG_ONCE = 0x2 + public const int PARSER_FLAG_ONCE = 0x2; + /// PARSER_FLAG_USE_CODEC_TS = 0x1000 + public const int PARSER_FLAG_USE_CODEC_TS = 0x1000; + /// PP_CPU_CAPS_3DNOW = 0x40000000 + public const int PP_CPU_CAPS_3DNOW = 0x40000000; + /// PP_CPU_CAPS_ALTIVEC = 0x10000000 + public const int PP_CPU_CAPS_ALTIVEC = 0x10000000; + /// PP_CPU_CAPS_AUTO = 0x80000 + public const int PP_CPU_CAPS_AUTO = 0x80000; + /// PP_CPU_CAPS_MMX = 0x80000000U + public const uint PP_CPU_CAPS_MMX = 0x80000000U; + /// PP_CPU_CAPS_MMX2 = 0x20000000 + public const int PP_CPU_CAPS_MMX2 = 0x20000000; + /// PP_FORMAT = 0x8 + public const int PP_FORMAT = 0x8; + /// PP_FORMAT_411 = 0x2 | PP_FORMAT + public const int PP_FORMAT_411 = 0x2 | PP_FORMAT; + /// PP_FORMAT_420 = 0x11 | PP_FORMAT + public const int PP_FORMAT_420 = 0x11 | PP_FORMAT; + /// PP_FORMAT_422 = 0x1 | PP_FORMAT + public const int PP_FORMAT_422 = 0x1 | PP_FORMAT; + /// PP_FORMAT_440 = 0x10 | PP_FORMAT + public const int PP_FORMAT_440 = 0x10 | PP_FORMAT; + /// PP_FORMAT_444 = 0x0 | PP_FORMAT + public const int PP_FORMAT_444 = 0x0 | PP_FORMAT; + /// PP_PICT_TYPE_QP2 = 0x10 + public const int PP_PICT_TYPE_QP2 = 0x10; + /// PP_QUALITY_MAX = 0x6 + public const int PP_QUALITY_MAX = 0x6; + // public static PUT_UTF16 = (val, tmp, PUT_16BIT){uint32_t in = val;if (in < 0x10000) {tmp = in;PUT_16BIT} else {tmp = 0xD800 | ((in - 0x10000) >> 10);PUT_16BITtmp = 0xDC00 | ((in - 0x10000) & 0x3FF);PUT_16BIT}}; + // public static PUT_UTF8 = (val, tmp, PUT_BYTE){int bytes, shift;uint32_t in = val;if (in < 0x80) {tmp = in;PUT_BYTE} else {bytes = (av_log2(in) + 4) / 5;shift = (bytes - 1) * 6;tmp = (256 - (256 >> bytes)) | (in >> shift);PUT_BYTEwhile (shift >= 6) {shift -= 6;tmp = 0x80 | ((in >> shift) & 0x3f);PUT_BYTE}}}; + // public static ROUNDED_DIV = (a,b) (((a)>=0 ? (a) + ((b)>>1) : (a) - ((b)>>1))/(b)); + // public static RSHIFT = (a,b) ((a) > 0 ? ((a) + ((1<<(b))>>1))>>(b) : ((a) + ((1<<(b))>>1)-1)>>(b)); + /// SLICE_FLAG_ALLOW_FIELD = 0x2 + public const int SLICE_FLAG_ALLOW_FIELD = 0x2; + /// SLICE_FLAG_ALLOW_PLANE = 0x4 + public const int SLICE_FLAG_ALLOW_PLANE = 0x4; + /// SLICE_FLAG_CODED_ORDER = 0x1 + public const int SLICE_FLAG_CODED_ORDER = 0x1; + /// SWR_FLAG_RESAMPLE = 0x1 + public const int SWR_FLAG_RESAMPLE = 0x1; + /// SWS_ACCURATE_RND = 0x40000 + public const int SWS_ACCURATE_RND = 0x40000; + /// SWS_AREA = 0x20 + public const int SWS_AREA = 0x20; + /// SWS_BICUBIC = 0x4 + public const int SWS_BICUBIC = 0x4; + /// SWS_BICUBLIN = 0x40 + public const int SWS_BICUBLIN = 0x40; + /// SWS_BILINEAR = 0x2 + public const int SWS_BILINEAR = 0x2; + /// SWS_BITEXACT = 0x80000 + public const int SWS_BITEXACT = 0x80000; + /// SWS_CS_BT2020 = 0x9 + public const int SWS_CS_BT2020 = 0x9; + /// SWS_CS_DEFAULT = 0x5 + public const int SWS_CS_DEFAULT = 0x5; + /// SWS_CS_FCC = 0x4 + public const int SWS_CS_FCC = 0x4; + /// SWS_CS_ITU601 = 0x5 + public const int SWS_CS_ITU601 = 0x5; + /// SWS_CS_ITU624 = 0x5 + public const int SWS_CS_ITU624 = 0x5; + /// SWS_CS_ITU709 = 0x1 + public const int SWS_CS_ITU709 = 0x1; + /// SWS_CS_SMPTE170M = 0x5 + public const int SWS_CS_SMPTE170M = 0x5; + /// SWS_CS_SMPTE240M = 0x7 + public const int SWS_CS_SMPTE240M = 0x7; + /// SWS_DIRECT_BGR = 0x8000 + public const int SWS_DIRECT_BGR = 0x8000; + /// SWS_ERROR_DIFFUSION = 0x800000 + public const int SWS_ERROR_DIFFUSION = 0x800000; + /// SWS_FAST_BILINEAR = 0x1 + public const int SWS_FAST_BILINEAR = 0x1; + /// SWS_FULL_CHR_H_INP = 0x4000 + public const int SWS_FULL_CHR_H_INP = 0x4000; + /// SWS_FULL_CHR_H_INT = 0x2000 + public const int SWS_FULL_CHR_H_INT = 0x2000; + /// SWS_GAUSS = 0x80 + public const int SWS_GAUSS = 0x80; + /// SWS_LANCZOS = 0x200 + public const int SWS_LANCZOS = 0x200; + /// SWS_MAX_REDUCE_CUTOFF = 0.002D + public const double SWS_MAX_REDUCE_CUTOFF = 0.002D; + /// SWS_PARAM_DEFAULT = 0x1e240 + public const int SWS_PARAM_DEFAULT = 0x1e240; + /// SWS_POINT = 0x10 + public const int SWS_POINT = 0x10; + /// SWS_PRINT_INFO = 0x1000 + public const int SWS_PRINT_INFO = 0x1000; + /// SWS_SINC = 0x100 + public const int SWS_SINC = 0x100; + /// SWS_SPLINE = 0x400 + public const int SWS_SPLINE = 0x400; + /// SWS_SRC_V_CHR_DROP_MASK = 0x30000 + public const int SWS_SRC_V_CHR_DROP_MASK = 0x30000; + /// SWS_SRC_V_CHR_DROP_SHIFT = 0x10 + public const int SWS_SRC_V_CHR_DROP_SHIFT = 0x10; + /// SWS_X = 0x8 + public const int SWS_X = 0x8; +} diff --git a/FFmpeg.AutoGen.Abstractions/generated/vectors.g.cs b/FFmpeg.AutoGen.Abstractions/generated/vectors.g.cs new file mode 100644 index 00000000..b63b1fc1 --- /dev/null +++ b/FFmpeg.AutoGen.Abstractions/generated/vectors.g.cs @@ -0,0 +1,3725 @@ +using System; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen.Abstractions; + +public static unsafe partial class vectors +{ + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVABufferSinkParams* av_abuffersink_params_alloc_delegate(); + public static av_abuffersink_params_alloc_delegate av_abuffersink_params_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_add_index_entry_delegate(AVStream* @st, long @pos, long @timestamp, int @size, int @distance, int @flags); + public static av_add_index_entry_delegate av_add_index_entry; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_add_q_delegate(AVRational @b, AVRational @c); + public static av_add_q_delegate av_add_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_add_stable_delegate(AVRational @ts_tb, long @ts, AVRational @inc_tb, long @inc); + public static av_add_stable_delegate av_add_stable; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_append_packet_delegate(AVIOContext* @s, AVPacket* @pkt, int @size); + public static av_append_packet_delegate av_append_packet; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVAudioFifo* av_audio_fifo_alloc_delegate(AVSampleFormat @sample_fmt, int @channels, int @nb_samples); + public static av_audio_fifo_alloc_delegate av_audio_fifo_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_audio_fifo_drain_delegate(AVAudioFifo* @af, int @nb_samples); + public static av_audio_fifo_drain_delegate av_audio_fifo_drain; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_audio_fifo_free_delegate(AVAudioFifo* @af); + public static av_audio_fifo_free_delegate av_audio_fifo_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_audio_fifo_peek_delegate(AVAudioFifo* @af, void** @data, int @nb_samples); + public static av_audio_fifo_peek_delegate av_audio_fifo_peek; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_audio_fifo_peek_at_delegate(AVAudioFifo* @af, void** @data, int @nb_samples, int @offset); + public static av_audio_fifo_peek_at_delegate av_audio_fifo_peek_at; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_audio_fifo_read_delegate(AVAudioFifo* @af, void** @data, int @nb_samples); + public static av_audio_fifo_read_delegate av_audio_fifo_read; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_audio_fifo_realloc_delegate(AVAudioFifo* @af, int @nb_samples); + public static av_audio_fifo_realloc_delegate av_audio_fifo_realloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_audio_fifo_reset_delegate(AVAudioFifo* @af); + public static av_audio_fifo_reset_delegate av_audio_fifo_reset; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_audio_fifo_size_delegate(AVAudioFifo* @af); + public static av_audio_fifo_size_delegate av_audio_fifo_size; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_audio_fifo_space_delegate(AVAudioFifo* @af); + public static av_audio_fifo_space_delegate av_audio_fifo_space; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_audio_fifo_write_delegate(AVAudioFifo* @af, void** @data, int @nb_samples); + public static av_audio_fifo_write_delegate av_audio_fifo_write; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_bprint_channel_layout_delegate(AVBPrint* @bp, int @nb_channels, ulong @channel_layout); + public static av_bprint_channel_layout_delegate av_bprint_channel_layout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_bsf_alloc_delegate(AVBitStreamFilter* @filter, AVBSFContext** @ctx); + public static av_bsf_alloc_delegate av_bsf_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_bsf_flush_delegate(AVBSFContext* @ctx); + public static av_bsf_flush_delegate av_bsf_flush; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_bsf_free_delegate(AVBSFContext** @ctx); + public static av_bsf_free_delegate av_bsf_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBitStreamFilter* av_bsf_get_by_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_bsf_get_by_name_delegate av_bsf_get_by_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* av_bsf_get_class_delegate(); + public static av_bsf_get_class_delegate av_bsf_get_class; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_bsf_get_null_filter_delegate(AVBSFContext** @bsf); + public static av_bsf_get_null_filter_delegate av_bsf_get_null_filter; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_bsf_init_delegate(AVBSFContext* @ctx); + public static av_bsf_init_delegate av_bsf_init; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBitStreamFilter* av_bsf_iterate_delegate(void** @opaque); + public static av_bsf_iterate_delegate av_bsf_iterate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBSFList* av_bsf_list_alloc_delegate(); + public static av_bsf_list_alloc_delegate av_bsf_list_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_bsf_list_append_delegate(AVBSFList* @lst, AVBSFContext* @bsf); + public static av_bsf_list_append_delegate av_bsf_list_append; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_bsf_list_append2_delegate(AVBSFList* @lst, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @bsf_name, AVDictionary** @options); + public static av_bsf_list_append2_delegate av_bsf_list_append2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_bsf_list_finalize_delegate(AVBSFList** @lst, AVBSFContext** @bsf); + public static av_bsf_list_finalize_delegate av_bsf_list_finalize; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_bsf_list_free_delegate(AVBSFList** @lst); + public static av_bsf_list_free_delegate av_bsf_list_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_bsf_list_parse_str_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str, AVBSFContext** @bsf); + public static av_bsf_list_parse_str_delegate av_bsf_list_parse_str; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_bsf_receive_packet_delegate(AVBSFContext* @ctx, AVPacket* @pkt); + public static av_bsf_receive_packet_delegate av_bsf_receive_packet; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_bsf_send_packet_delegate(AVBSFContext* @ctx, AVPacket* @pkt); + public static av_bsf_send_packet_delegate av_bsf_send_packet; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferRef* av_buffer_alloc_delegate(ulong @size); + public static av_buffer_alloc_delegate av_buffer_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferRef* av_buffer_allocz_delegate(ulong @size); + public static av_buffer_allocz_delegate av_buffer_allocz; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferRef* av_buffer_create_delegate(byte* @data, ulong @size, av_buffer_create_free_func @free, void* @opaque, int @flags); + public static av_buffer_create_delegate av_buffer_create; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_buffer_default_free_delegate(void* @opaque, byte* @data); + public static av_buffer_default_free_delegate av_buffer_default_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_buffer_get_opaque_delegate(AVBufferRef* @buf); + public static av_buffer_get_opaque_delegate av_buffer_get_opaque; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffer_get_ref_count_delegate(AVBufferRef* @buf); + public static av_buffer_get_ref_count_delegate av_buffer_get_ref_count; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffer_is_writable_delegate(AVBufferRef* @buf); + public static av_buffer_is_writable_delegate av_buffer_is_writable; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffer_make_writable_delegate(AVBufferRef** @buf); + public static av_buffer_make_writable_delegate av_buffer_make_writable; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_buffer_pool_buffer_get_opaque_delegate(AVBufferRef* @ref); + public static av_buffer_pool_buffer_get_opaque_delegate av_buffer_pool_buffer_get_opaque; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferRef* av_buffer_pool_get_delegate(AVBufferPool* @pool); + public static av_buffer_pool_get_delegate av_buffer_pool_get; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferPool* av_buffer_pool_init_delegate(ulong @size, av_buffer_pool_init_alloc_func @alloc); + public static av_buffer_pool_init_delegate av_buffer_pool_init; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferPool* av_buffer_pool_init2_delegate(ulong @size, void* @opaque, av_buffer_pool_init2_alloc_func @alloc, av_buffer_pool_init2_pool_free_func @pool_free); + public static av_buffer_pool_init2_delegate av_buffer_pool_init2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_buffer_pool_uninit_delegate(AVBufferPool** @pool); + public static av_buffer_pool_uninit_delegate av_buffer_pool_uninit; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffer_realloc_delegate(AVBufferRef** @buf, ulong @size); + public static av_buffer_realloc_delegate av_buffer_realloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferRef* av_buffer_ref_delegate(AVBufferRef* @buf); + public static av_buffer_ref_delegate av_buffer_ref; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffer_replace_delegate(AVBufferRef** @dst, AVBufferRef* @src); + public static av_buffer_replace_delegate av_buffer_replace; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_buffer_unref_delegate(AVBufferRef** @buf); + public static av_buffer_unref_delegate av_buffer_unref; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersink_get_ch_layout_delegate(AVFilterContext* @ctx, AVChannelLayout* @ch_layout); + public static av_buffersink_get_ch_layout_delegate av_buffersink_get_ch_layout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate ulong av_buffersink_get_channel_layout_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_channel_layout_delegate av_buffersink_get_channel_layout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersink_get_channels_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_channels_delegate av_buffersink_get_channels; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersink_get_format_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_format_delegate av_buffersink_get_format; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersink_get_frame_delegate(AVFilterContext* @ctx, AVFrame* @frame); + public static av_buffersink_get_frame_delegate av_buffersink_get_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersink_get_frame_flags_delegate(AVFilterContext* @ctx, AVFrame* @frame, int @flags); + public static av_buffersink_get_frame_flags_delegate av_buffersink_get_frame_flags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_buffersink_get_frame_rate_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_frame_rate_delegate av_buffersink_get_frame_rate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersink_get_h_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_h_delegate av_buffersink_get_h; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferRef* av_buffersink_get_hw_frames_ctx_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_hw_frames_ctx_delegate av_buffersink_get_hw_frames_ctx; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_buffersink_get_sample_aspect_ratio_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_sample_aspect_ratio_delegate av_buffersink_get_sample_aspect_ratio; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersink_get_sample_rate_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_sample_rate_delegate av_buffersink_get_sample_rate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersink_get_samples_delegate(AVFilterContext* @ctx, AVFrame* @frame, int @nb_samples); + public static av_buffersink_get_samples_delegate av_buffersink_get_samples; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_buffersink_get_time_base_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_time_base_delegate av_buffersink_get_time_base; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVMediaType av_buffersink_get_type_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_type_delegate av_buffersink_get_type; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersink_get_w_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_w_delegate av_buffersink_get_w; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferSinkParams* av_buffersink_params_alloc_delegate(); + public static av_buffersink_params_alloc_delegate av_buffersink_params_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_buffersink_set_frame_size_delegate(AVFilterContext* @ctx, uint @frame_size); + public static av_buffersink_set_frame_size_delegate av_buffersink_set_frame_size; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersrc_add_frame_delegate(AVFilterContext* @ctx, AVFrame* @frame); + public static av_buffersrc_add_frame_delegate av_buffersrc_add_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersrc_add_frame_flags_delegate(AVFilterContext* @buffer_src, AVFrame* @frame, int @flags); + public static av_buffersrc_add_frame_flags_delegate av_buffersrc_add_frame_flags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersrc_close_delegate(AVFilterContext* @ctx, long @pts, uint @flags); + public static av_buffersrc_close_delegate av_buffersrc_close; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint av_buffersrc_get_nb_failed_requests_delegate(AVFilterContext* @buffer_src); + public static av_buffersrc_get_nb_failed_requests_delegate av_buffersrc_get_nb_failed_requests; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferSrcParameters* av_buffersrc_parameters_alloc_delegate(); + public static av_buffersrc_parameters_alloc_delegate av_buffersrc_parameters_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersrc_parameters_set_delegate(AVFilterContext* @ctx, AVBufferSrcParameters* @param); + public static av_buffersrc_parameters_set_delegate av_buffersrc_parameters_set; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersrc_write_frame_delegate(AVFilterContext* @ctx, AVFrame* @frame); + public static av_buffersrc_write_frame_delegate av_buffersrc_write_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_calloc_delegate(ulong @nmemb, ulong @size); + public static av_calloc_delegate av_calloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_description_delegate(byte* @buf, ulong @buf_size, AVChannel @channel); + public static av_channel_description_delegate av_channel_description; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_channel_description_bprint_delegate(AVBPrint* @bp, AVChannel @channel_id); + public static av_channel_description_bprint_delegate av_channel_description_bprint; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVChannel av_channel_from_string_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_channel_from_string_delegate av_channel_from_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVChannel av_channel_layout_channel_from_index_delegate(AVChannelLayout* @channel_layout, uint @idx); + public static av_channel_layout_channel_from_index_delegate av_channel_layout_channel_from_index; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVChannel av_channel_layout_channel_from_string_delegate(AVChannelLayout* @channel_layout, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_channel_layout_channel_from_string_delegate av_channel_layout_channel_from_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_layout_check_delegate(AVChannelLayout* @channel_layout); + public static av_channel_layout_check_delegate av_channel_layout_check; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_layout_compare_delegate(AVChannelLayout* @chl, AVChannelLayout* @chl1); + public static av_channel_layout_compare_delegate av_channel_layout_compare; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_layout_copy_delegate(AVChannelLayout* @dst, AVChannelLayout* @src); + public static av_channel_layout_copy_delegate av_channel_layout_copy; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_channel_layout_default_delegate(AVChannelLayout* @ch_layout, int @nb_channels); + public static av_channel_layout_default_delegate av_channel_layout_default; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_layout_describe_delegate(AVChannelLayout* @channel_layout, byte* @buf, ulong @buf_size); + public static av_channel_layout_describe_delegate av_channel_layout_describe; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_layout_describe_bprint_delegate(AVChannelLayout* @channel_layout, AVBPrint* @bp); + public static av_channel_layout_describe_bprint_delegate av_channel_layout_describe_bprint; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate ulong av_channel_layout_extract_channel_delegate(ulong @channel_layout, int @index); + public static av_channel_layout_extract_channel_delegate av_channel_layout_extract_channel; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_layout_from_mask_delegate(AVChannelLayout* @channel_layout, ulong @mask); + public static av_channel_layout_from_mask_delegate av_channel_layout_from_mask; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_layout_from_string_delegate(AVChannelLayout* @channel_layout, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str); + public static av_channel_layout_from_string_delegate av_channel_layout_from_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_layout_index_from_channel_delegate(AVChannelLayout* @channel_layout, AVChannel @channel); + public static av_channel_layout_index_from_channel_delegate av_channel_layout_index_from_channel; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_layout_index_from_string_delegate(AVChannelLayout* @channel_layout, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_channel_layout_index_from_string_delegate av_channel_layout_index_from_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVChannelLayout* av_channel_layout_standard_delegate(void** @opaque); + public static av_channel_layout_standard_delegate av_channel_layout_standard; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate ulong av_channel_layout_subset_delegate(AVChannelLayout* @channel_layout, ulong @mask); + public static av_channel_layout_subset_delegate av_channel_layout_subset; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_channel_layout_uninit_delegate(AVChannelLayout* @channel_layout); + public static av_channel_layout_uninit_delegate av_channel_layout_uninit; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_name_delegate(byte* @buf, ulong @buf_size, AVChannel @channel); + public static av_channel_name_delegate av_channel_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_channel_name_bprint_delegate(AVBPrint* @bp, AVChannel @channel_id); + public static av_channel_name_bprint_delegate av_channel_name_bprint; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_chroma_location_from_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_chroma_location_from_name_delegate av_chroma_location_from_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_chroma_location_name_delegate(AVChromaLocation @location); + public static av_chroma_location_name_delegate av_chroma_location_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecID av_codec_get_id_delegate(AVCodecTag** @tags, uint @tag); + public static av_codec_get_id_delegate av_codec_get_id; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint av_codec_get_tag_delegate(AVCodecTag** @tags, AVCodecID @id); + public static av_codec_get_tag_delegate av_codec_get_tag; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_codec_get_tag2_delegate(AVCodecTag** @tags, AVCodecID @id, uint* @tag); + public static av_codec_get_tag2_delegate av_codec_get_tag2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_codec_is_decoder_delegate(AVCodec* @codec); + public static av_codec_is_decoder_delegate av_codec_is_decoder; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_codec_is_encoder_delegate(AVCodec* @codec); + public static av_codec_is_encoder_delegate av_codec_is_encoder; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodec* av_codec_iterate_delegate(void** @opaque); + public static av_codec_iterate_delegate av_codec_iterate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_color_primaries_from_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_color_primaries_from_name_delegate av_color_primaries_from_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_color_primaries_name_delegate(AVColorPrimaries @primaries); + public static av_color_primaries_name_delegate av_color_primaries_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_color_range_from_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_color_range_from_name_delegate av_color_range_from_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_color_range_name_delegate(AVColorRange @range); + public static av_color_range_name_delegate av_color_range_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_color_space_from_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_color_space_from_name_delegate av_color_space_from_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_color_space_name_delegate(AVColorSpace @space); + public static av_color_space_name_delegate av_color_space_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_color_transfer_from_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_color_transfer_from_name_delegate av_color_transfer_from_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_color_transfer_name_delegate(AVColorTransferCharacteristic @transfer); + public static av_color_transfer_name_delegate av_color_transfer_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_compare_mod_delegate(ulong @a, ulong @b, ulong @mod); + public static av_compare_mod_delegate av_compare_mod; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_compare_ts_delegate(long @ts_a, AVRational @tb_a, long @ts_b, AVRational @tb_b); + public static av_compare_ts_delegate av_compare_ts; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVContentLightMetadata* av_content_light_metadata_alloc_delegate(ulong* @size); + public static av_content_light_metadata_alloc_delegate av_content_light_metadata_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVContentLightMetadata* av_content_light_metadata_create_side_data_delegate(AVFrame* @frame); + public static av_content_light_metadata_create_side_data_delegate av_content_light_metadata_create_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCPBProperties* av_cpb_properties_alloc_delegate(ulong* @size); + public static av_cpb_properties_alloc_delegate av_cpb_properties_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_cpu_count_delegate(); + public static av_cpu_count_delegate av_cpu_count; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_cpu_force_count_delegate(int @count); + public static av_cpu_force_count_delegate av_cpu_force_count; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate ulong av_cpu_max_align_delegate(); + public static av_cpu_max_align_delegate av_cpu_max_align; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_d2q_delegate(double @d, int @max); + public static av_d2q_delegate av_d2q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVD3D11VAContext* av_d3d11va_alloc_context_delegate(); + public static av_d3d11va_alloc_context_delegate av_d3d11va_alloc_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClassCategory av_default_get_category_delegate(void* @ptr); + public static av_default_get_category_delegate av_default_get_category; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_default_item_name_delegate(void* @ctx); + public static av_default_item_name_delegate av_default_item_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVInputFormat* av_demuxer_iterate_delegate(void** @opaque); + public static av_demuxer_iterate_delegate av_demuxer_iterate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_dict_copy_delegate(AVDictionary** @dst, AVDictionary* @src, int @flags); + public static av_dict_copy_delegate av_dict_copy; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_dict_count_delegate(AVDictionary* @m); + public static av_dict_count_delegate av_dict_count; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_dict_free_delegate(AVDictionary** @m); + public static av_dict_free_delegate av_dict_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVDictionaryEntry* av_dict_get_delegate(AVDictionary* @m, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, AVDictionaryEntry* @prev, int @flags); + public static av_dict_get_delegate av_dict_get; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_dict_get_string_delegate(AVDictionary* @m, byte** @buffer, byte @key_val_sep, byte @pairs_sep); + public static av_dict_get_string_delegate av_dict_get_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_dict_parse_string_delegate(AVDictionary** @pm, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key_val_sep, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @pairs_sep, int @flags); + public static av_dict_parse_string_delegate av_dict_parse_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_dict_set_delegate(AVDictionary** @pm, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @value, int @flags); + public static av_dict_set_delegate av_dict_set; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_dict_set_int_delegate(AVDictionary** @pm, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, long @value, int @flags); + public static av_dict_set_int_delegate av_dict_set_int; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_disposition_from_string_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @disp); + public static av_disposition_from_string_delegate av_disposition_from_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_disposition_to_string_delegate(int @disposition); + public static av_disposition_to_string_delegate av_disposition_to_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_div_q_delegate(AVRational @b, AVRational @c); + public static av_div_q_delegate av_div_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_dump_format_delegate(AVFormatContext* @ic, int @index, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, int @is_output); + public static av_dump_format_delegate av_dump_format; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVDynamicHDRPlus* av_dynamic_hdr_plus_alloc_delegate(ulong* @size); + public static av_dynamic_hdr_plus_alloc_delegate av_dynamic_hdr_plus_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVDynamicHDRPlus* av_dynamic_hdr_plus_create_side_data_delegate(AVFrame* @frame); + public static av_dynamic_hdr_plus_create_side_data_delegate av_dynamic_hdr_plus_create_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_dynarray_add_delegate(void* @tab_ptr, int* @nb_ptr, void* @elem); + public static av_dynarray_add_delegate av_dynarray_add; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_dynarray_add_nofree_delegate(void* @tab_ptr, int* @nb_ptr, void* @elem); + public static av_dynarray_add_nofree_delegate av_dynarray_add_nofree; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_dynarray2_add_delegate(void** @tab_ptr, int* @nb_ptr, ulong @elem_size, byte* @elem_data); + public static av_dynarray2_add_delegate av_dynarray2_add; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_fast_malloc_delegate(void* @ptr, uint* @size, ulong @min_size); + public static av_fast_malloc_delegate av_fast_malloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_fast_mallocz_delegate(void* @ptr, uint* @size, ulong @min_size); + public static av_fast_mallocz_delegate av_fast_mallocz; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_fast_padded_malloc_delegate(void* @ptr, uint* @size, ulong @min_size); + public static av_fast_padded_malloc_delegate av_fast_padded_malloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_fast_padded_mallocz_delegate(void* @ptr, uint* @size, ulong @min_size); + public static av_fast_padded_mallocz_delegate av_fast_padded_mallocz; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_fast_realloc_delegate(void* @ptr, uint* @size, ulong @min_size); + public static av_fast_realloc_delegate av_fast_realloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_file_map_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename, byte** @bufptr, ulong* @size, int @log_offset, void* @log_ctx); + public static av_file_map_delegate av_file_map; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_file_unmap_delegate(byte* @bufptr, ulong @size); + public static av_file_unmap_delegate av_file_unmap; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_filename_number_test_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename); + public static av_filename_number_test_delegate av_filename_number_test; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFilter* av_filter_iterate_delegate(void** @opaque); + public static av_filter_iterate_delegate av_filter_iterate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVPixelFormat av_find_best_pix_fmt_of_2_delegate(AVPixelFormat @dst_pix_fmt1, AVPixelFormat @dst_pix_fmt2, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr); + public static av_find_best_pix_fmt_of_2_delegate av_find_best_pix_fmt_of_2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_find_best_stream_delegate(AVFormatContext* @ic, AVMediaType @type, int @wanted_stream_nb, int @related_stream, AVCodec** @decoder_ret, int @flags); + public static av_find_best_stream_delegate av_find_best_stream; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_find_default_stream_index_delegate(AVFormatContext* @s); + public static av_find_default_stream_index_delegate av_find_default_stream_index; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVInputFormat* av_find_input_format_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @short_name); + public static av_find_input_format_delegate av_find_input_format; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_find_nearest_q_idx_delegate(AVRational @q, AVRational* @q_list); + public static av_find_nearest_q_idx_delegate av_find_nearest_q_idx; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVProgram* av_find_program_from_stream_delegate(AVFormatContext* @ic, AVProgram* @last, int @s); + public static av_find_program_from_stream_delegate av_find_program_from_stream; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVDurationEstimationMethod av_fmt_ctx_get_duration_estimation_method_delegate(AVFormatContext* @ctx); + public static av_fmt_ctx_get_duration_estimation_method_delegate av_fmt_ctx_get_duration_estimation_method; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate _iobuf* av_fopen_utf8_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @path, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @mode); + public static av_fopen_utf8_delegate av_fopen_utf8; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_force_cpu_flags_delegate(int @flags); + public static av_force_cpu_flags_delegate av_force_cpu_flags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_format_inject_global_side_data_delegate(AVFormatContext* @s); + public static av_format_inject_global_side_data_delegate av_format_inject_global_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_fourcc_make_string_delegate(byte* @buf, uint @fourcc); + public static av_fourcc_make_string_delegate av_fourcc_make_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFrame* av_frame_alloc_delegate(); + public static av_frame_alloc_delegate av_frame_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_frame_apply_cropping_delegate(AVFrame* @frame, int @flags); + public static av_frame_apply_cropping_delegate av_frame_apply_cropping; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFrame* av_frame_clone_delegate(AVFrame* @src); + public static av_frame_clone_delegate av_frame_clone; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_frame_copy_delegate(AVFrame* @dst, AVFrame* @src); + public static av_frame_copy_delegate av_frame_copy; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_frame_copy_props_delegate(AVFrame* @dst, AVFrame* @src); + public static av_frame_copy_props_delegate av_frame_copy_props; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_frame_free_delegate(AVFrame** @frame); + public static av_frame_free_delegate av_frame_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_frame_get_buffer_delegate(AVFrame* @frame, int @align); + public static av_frame_get_buffer_delegate av_frame_get_buffer; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferRef* av_frame_get_plane_buffer_delegate(AVFrame* @frame, int @plane); + public static av_frame_get_plane_buffer_delegate av_frame_get_plane_buffer; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFrameSideData* av_frame_get_side_data_delegate(AVFrame* @frame, AVFrameSideDataType @type); + public static av_frame_get_side_data_delegate av_frame_get_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_frame_is_writable_delegate(AVFrame* @frame); + public static av_frame_is_writable_delegate av_frame_is_writable; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_frame_make_writable_delegate(AVFrame* @frame); + public static av_frame_make_writable_delegate av_frame_make_writable; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_frame_move_ref_delegate(AVFrame* @dst, AVFrame* @src); + public static av_frame_move_ref_delegate av_frame_move_ref; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFrameSideData* av_frame_new_side_data_delegate(AVFrame* @frame, AVFrameSideDataType @type, ulong @size); + public static av_frame_new_side_data_delegate av_frame_new_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFrameSideData* av_frame_new_side_data_from_buf_delegate(AVFrame* @frame, AVFrameSideDataType @type, AVBufferRef* @buf); + public static av_frame_new_side_data_from_buf_delegate av_frame_new_side_data_from_buf; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_frame_ref_delegate(AVFrame* @dst, AVFrame* @src); + public static av_frame_ref_delegate av_frame_ref; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_frame_remove_side_data_delegate(AVFrame* @frame, AVFrameSideDataType @type); + public static av_frame_remove_side_data_delegate av_frame_remove_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_frame_side_data_name_delegate(AVFrameSideDataType @type); + public static av_frame_side_data_name_delegate av_frame_side_data_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_frame_unref_delegate(AVFrame* @frame); + public static av_frame_unref_delegate av_frame_unref; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_free_delegate(void* @ptr); + public static av_free_delegate av_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_freep_delegate(void* @ptr); + public static av_freep_delegate av_freep; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_gcd_delegate(long @a, long @b); + public static av_gcd_delegate av_gcd; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_gcd_q_delegate(AVRational @a, AVRational @b, int @max_den, AVRational @def); + public static av_gcd_q_delegate av_gcd_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVSampleFormat av_get_alt_sample_fmt_delegate(AVSampleFormat @sample_fmt, int @planar); + public static av_get_alt_sample_fmt_delegate av_get_alt_sample_fmt; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_audio_frame_duration_delegate(AVCodecContext* @avctx, int @frame_bytes); + public static av_get_audio_frame_duration_delegate av_get_audio_frame_duration; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_audio_frame_duration2_delegate(AVCodecParameters* @par, int @frame_bytes); + public static av_get_audio_frame_duration2_delegate av_get_audio_frame_duration2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_bits_per_pixel_delegate(AVPixFmtDescriptor* @pixdesc); + public static av_get_bits_per_pixel_delegate av_get_bits_per_pixel; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_bits_per_sample_delegate(AVCodecID @codec_id); + public static av_get_bits_per_sample_delegate av_get_bits_per_sample; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_bytes_per_sample_delegate(AVSampleFormat @sample_fmt); + public static av_get_bytes_per_sample_delegate av_get_bytes_per_sample; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_get_channel_description_delegate(ulong @channel); + public static av_get_channel_description_delegate av_get_channel_description; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate ulong av_get_channel_layout_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_get_channel_layout_delegate av_get_channel_layout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_channel_layout_channel_index_delegate(ulong @channel_layout, ulong @channel); + public static av_get_channel_layout_channel_index_delegate av_get_channel_layout_channel_index; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_channel_layout_nb_channels_delegate(ulong @channel_layout); + public static av_get_channel_layout_nb_channels_delegate av_get_channel_layout_nb_channels; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_get_channel_layout_string_delegate(byte* @buf, int @buf_size, int @nb_channels, ulong @channel_layout); + public static av_get_channel_layout_string_delegate av_get_channel_layout_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_get_channel_name_delegate(ulong @channel); + public static av_get_channel_name_delegate av_get_channel_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_get_colorspace_name_delegate(AVColorSpace @val); + public static av_get_colorspace_name_delegate av_get_colorspace_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_cpu_flags_delegate(); + public static av_get_cpu_flags_delegate av_get_cpu_flags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_get_default_channel_layout_delegate(int @nb_channels); + public static av_get_default_channel_layout_delegate av_get_default_channel_layout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_exact_bits_per_sample_delegate(AVCodecID @codec_id); + public static av_get_exact_bits_per_sample_delegate av_get_exact_bits_per_sample; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_extended_channel_layout_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, ulong* @channel_layout, int* @nb_channels); + public static av_get_extended_channel_layout_delegate av_get_extended_channel_layout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_frame_filename_delegate(byte* @buf, int @buf_size, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @path, int @number); + public static av_get_frame_filename_delegate av_get_frame_filename; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_frame_filename2_delegate(byte* @buf, int @buf_size, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @path, int @number, int @flags); + public static av_get_frame_filename2_delegate av_get_frame_filename2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_get_media_type_string_delegate(AVMediaType @media_type); + public static av_get_media_type_string_delegate av_get_media_type_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_output_timestamp_delegate(AVFormatContext* @s, int @stream, long* @dts, long* @wall); + public static av_get_output_timestamp_delegate av_get_output_timestamp; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVSampleFormat av_get_packed_sample_fmt_delegate(AVSampleFormat @sample_fmt); + public static av_get_packed_sample_fmt_delegate av_get_packed_sample_fmt; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_packet_delegate(AVIOContext* @s, AVPacket* @pkt, int @size); + public static av_get_packet_delegate av_get_packet; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_padded_bits_per_pixel_delegate(AVPixFmtDescriptor* @pixdesc); + public static av_get_padded_bits_per_pixel_delegate av_get_padded_bits_per_pixel; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecID av_get_pcm_codec_delegate(AVSampleFormat @fmt, int @be); + public static av_get_pcm_codec_delegate av_get_pcm_codec; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte av_get_picture_type_char_delegate(AVPictureType @pict_type); + public static av_get_picture_type_char_delegate av_get_picture_type_char; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVPixelFormat av_get_pix_fmt_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_get_pix_fmt_delegate av_get_pix_fmt; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_pix_fmt_loss_delegate(AVPixelFormat @dst_pix_fmt, AVPixelFormat @src_pix_fmt, int @has_alpha); + public static av_get_pix_fmt_loss_delegate av_get_pix_fmt_loss; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_get_pix_fmt_name_delegate(AVPixelFormat @pix_fmt); + public static av_get_pix_fmt_name_delegate av_get_pix_fmt_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_get_pix_fmt_string_delegate(byte* @buf, int @buf_size, AVPixelFormat @pix_fmt); + public static av_get_pix_fmt_string_delegate av_get_pix_fmt_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVSampleFormat av_get_planar_sample_fmt_delegate(AVSampleFormat @sample_fmt); + public static av_get_planar_sample_fmt_delegate av_get_planar_sample_fmt; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_get_profile_name_delegate(AVCodec* @codec, int @profile); + public static av_get_profile_name_delegate av_get_profile_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVSampleFormat av_get_sample_fmt_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_get_sample_fmt_delegate av_get_sample_fmt; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_get_sample_fmt_name_delegate(AVSampleFormat @sample_fmt); + public static av_get_sample_fmt_name_delegate av_get_sample_fmt_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_get_sample_fmt_string_delegate(byte* @buf, int @buf_size, AVSampleFormat @sample_fmt); + public static av_get_sample_fmt_string_delegate av_get_sample_fmt_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_standard_channel_layout_delegate(uint @index, ulong* @layout, byte** @name); + public static av_get_standard_channel_layout_delegate av_get_standard_channel_layout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_get_time_base_q_delegate(); + public static av_get_time_base_q_delegate av_get_time_base_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_gettime_delegate(); + public static av_gettime_delegate av_gettime; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_gettime_relative_delegate(); + public static av_gettime_relative_delegate av_gettime_relative; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_gettime_relative_is_monotonic_delegate(); + public static av_gettime_relative_is_monotonic_delegate av_gettime_relative_is_monotonic; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_grow_packet_delegate(AVPacket* @pkt, int @grow_by); + public static av_grow_packet_delegate av_grow_packet; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecID av_guess_codec_delegate(AVOutputFormat* @fmt, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @short_name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @mime_type, AVMediaType @type); + public static av_guess_codec_delegate av_guess_codec; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVOutputFormat* av_guess_format_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @short_name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @mime_type); + public static av_guess_format_delegate av_guess_format; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_guess_frame_rate_delegate(AVFormatContext* @ctx, AVStream* @stream, AVFrame* @frame); + public static av_guess_frame_rate_delegate av_guess_frame_rate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_guess_sample_aspect_ratio_delegate(AVFormatContext* @format, AVStream* @stream, AVFrame* @frame); + public static av_guess_sample_aspect_ratio_delegate av_guess_sample_aspect_ratio; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_hex_dump_delegate(_iobuf* @f, byte* @buf, int @size); + public static av_hex_dump_delegate av_hex_dump; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_hex_dump_log_delegate(void* @avcl, int @level, byte* @buf, int @size); + public static av_hex_dump_log_delegate av_hex_dump_log; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferRef* av_hwdevice_ctx_alloc_delegate(AVHWDeviceType @type); + public static av_hwdevice_ctx_alloc_delegate av_hwdevice_ctx_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_hwdevice_ctx_create_delegate(AVBufferRef** @device_ctx, AVHWDeviceType @type, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @device, AVDictionary* @opts, int @flags); + public static av_hwdevice_ctx_create_delegate av_hwdevice_ctx_create; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_hwdevice_ctx_create_derived_delegate(AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, int @flags); + public static av_hwdevice_ctx_create_derived_delegate av_hwdevice_ctx_create_derived; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_hwdevice_ctx_create_derived_opts_delegate(AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, AVDictionary* @options, int @flags); + public static av_hwdevice_ctx_create_derived_opts_delegate av_hwdevice_ctx_create_derived_opts; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_hwdevice_ctx_init_delegate(AVBufferRef* @ref); + public static av_hwdevice_ctx_init_delegate av_hwdevice_ctx_init; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVHWDeviceType av_hwdevice_find_type_by_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_hwdevice_find_type_by_name_delegate av_hwdevice_find_type_by_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVHWFramesConstraints* av_hwdevice_get_hwframe_constraints_delegate(AVBufferRef* @ref, void* @hwconfig); + public static av_hwdevice_get_hwframe_constraints_delegate av_hwdevice_get_hwframe_constraints; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_hwdevice_get_type_name_delegate(AVHWDeviceType @type); + public static av_hwdevice_get_type_name_delegate av_hwdevice_get_type_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_hwdevice_hwconfig_alloc_delegate(AVBufferRef* @device_ctx); + public static av_hwdevice_hwconfig_alloc_delegate av_hwdevice_hwconfig_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVHWDeviceType av_hwdevice_iterate_types_delegate(AVHWDeviceType @prev); + public static av_hwdevice_iterate_types_delegate av_hwdevice_iterate_types; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_hwframe_constraints_free_delegate(AVHWFramesConstraints** @constraints); + public static av_hwframe_constraints_free_delegate av_hwframe_constraints_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferRef* av_hwframe_ctx_alloc_delegate(AVBufferRef* @device_ctx); + public static av_hwframe_ctx_alloc_delegate av_hwframe_ctx_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_hwframe_ctx_create_derived_delegate(AVBufferRef** @derived_frame_ctx, AVPixelFormat @format, AVBufferRef* @derived_device_ctx, AVBufferRef* @source_frame_ctx, int @flags); + public static av_hwframe_ctx_create_derived_delegate av_hwframe_ctx_create_derived; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_hwframe_ctx_init_delegate(AVBufferRef* @ref); + public static av_hwframe_ctx_init_delegate av_hwframe_ctx_init; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_hwframe_get_buffer_delegate(AVBufferRef* @hwframe_ctx, AVFrame* @frame, int @flags); + public static av_hwframe_get_buffer_delegate av_hwframe_get_buffer; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_hwframe_map_delegate(AVFrame* @dst, AVFrame* @src, int @flags); + public static av_hwframe_map_delegate av_hwframe_map; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_hwframe_transfer_data_delegate(AVFrame* @dst, AVFrame* @src, int @flags); + public static av_hwframe_transfer_data_delegate av_hwframe_transfer_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_hwframe_transfer_get_formats_delegate(AVBufferRef* @hwframe_ctx, AVHWFrameTransferDirection @dir, AVPixelFormat** @formats, int @flags); + public static av_hwframe_transfer_get_formats_delegate av_hwframe_transfer_get_formats; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_alloc_delegate(ref byte_ptr4 @pointers, ref int4 @linesizes, int @w, int @h, AVPixelFormat @pix_fmt, int @align); + public static av_image_alloc_delegate av_image_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_check_sar_delegate(uint @w, uint @h, AVRational @sar); + public static av_image_check_sar_delegate av_image_check_sar; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_check_size_delegate(uint @w, uint @h, int @log_offset, void* @log_ctx); + public static av_image_check_size_delegate av_image_check_size; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_check_size2_delegate(uint @w, uint @h, long @max_pixels, AVPixelFormat @pix_fmt, int @log_offset, void* @log_ctx); + public static av_image_check_size2_delegate av_image_check_size2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_image_copy_delegate(ref byte_ptr4 @dst_data, ref int4 @dst_linesizes, in byte_ptr4 @src_data, in int4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height); + public static av_image_copy_delegate av_image_copy; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_image_copy_plane_delegate(byte* @dst, int @dst_linesize, byte* @src, int @src_linesize, int @bytewidth, int @height); + public static av_image_copy_plane_delegate av_image_copy_plane; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_image_copy_plane_uc_from_delegate(byte* @dst, long @dst_linesize, byte* @src, long @src_linesize, long @bytewidth, int @height); + public static av_image_copy_plane_uc_from_delegate av_image_copy_plane_uc_from; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_copy_to_buffer_delegate(byte* @dst, int @dst_size, in byte_ptr4 @src_data, in int4 @src_linesize, AVPixelFormat @pix_fmt, int @width, int @height, int @align); + public static av_image_copy_to_buffer_delegate av_image_copy_to_buffer; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_image_copy_uc_from_delegate(ref byte_ptr4 @dst_data, in long4 @dst_linesizes, in byte_ptr4 @src_data, in long4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height); + public static av_image_copy_uc_from_delegate av_image_copy_uc_from; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_fill_arrays_delegate(ref byte_ptr4 @dst_data, ref int4 @dst_linesize, byte* @src, AVPixelFormat @pix_fmt, int @width, int @height, int @align); + public static av_image_fill_arrays_delegate av_image_fill_arrays; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_fill_black_delegate(ref byte_ptr4 @dst_data, in long4 @dst_linesize, AVPixelFormat @pix_fmt, AVColorRange @range, int @width, int @height); + public static av_image_fill_black_delegate av_image_fill_black; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_fill_linesizes_delegate(ref int4 @linesizes, AVPixelFormat @pix_fmt, int @width); + public static av_image_fill_linesizes_delegate av_image_fill_linesizes; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_image_fill_max_pixsteps_delegate(ref int4 @max_pixsteps, ref int4 @max_pixstep_comps, AVPixFmtDescriptor* @pixdesc); + public static av_image_fill_max_pixsteps_delegate av_image_fill_max_pixsteps; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_fill_plane_sizes_delegate(ref ulong4 @size, AVPixelFormat @pix_fmt, int @height, in long4 @linesizes); + public static av_image_fill_plane_sizes_delegate av_image_fill_plane_sizes; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_fill_pointers_delegate(ref byte_ptr4 @data, AVPixelFormat @pix_fmt, int @height, byte* @ptr, in int4 @linesizes); + public static av_image_fill_pointers_delegate av_image_fill_pointers; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_get_buffer_size_delegate(AVPixelFormat @pix_fmt, int @width, int @height, int @align); + public static av_image_get_buffer_size_delegate av_image_get_buffer_size; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_get_linesize_delegate(AVPixelFormat @pix_fmt, int @width, int @plane); + public static av_image_get_linesize_delegate av_image_get_linesize; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_index_search_timestamp_delegate(AVStream* @st, long @timestamp, int @flags); + public static av_index_search_timestamp_delegate av_index_search_timestamp; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_init_packet_delegate(AVPacket* @pkt); + public static av_init_packet_delegate av_init_packet; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVInputFormat* av_input_audio_device_next_delegate(AVInputFormat* @d); + public static av_input_audio_device_next_delegate av_input_audio_device_next; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVInputFormat* av_input_video_device_next_delegate(AVInputFormat* @d); + public static av_input_video_device_next_delegate av_input_video_device_next; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint av_int_list_length_for_size_delegate(uint @elsize, void* @list, ulong @term); + public static av_int_list_length_for_size_delegate av_int_list_length_for_size; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_interleaved_write_frame_delegate(AVFormatContext* @s, AVPacket* @pkt); + public static av_interleaved_write_frame_delegate av_interleaved_write_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_interleaved_write_uncoded_frame_delegate(AVFormatContext* @s, int @stream_index, AVFrame* @frame); + public static av_interleaved_write_uncoded_frame_delegate av_interleaved_write_uncoded_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_log_delegate(void* @avcl, int @level, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt); + public static av_log_delegate av_log; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_log_default_callback_delegate(void* @avcl, int @level, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt, byte* @vl); + public static av_log_default_callback_delegate av_log_default_callback; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_log_format_line_delegate(void* @ptr, int @level, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix); + public static av_log_format_line_delegate av_log_format_line; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_log_format_line2_delegate(void* @ptr, int @level, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix); + public static av_log_format_line2_delegate av_log_format_line2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_log_get_flags_delegate(); + public static av_log_get_flags_delegate av_log_get_flags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_log_get_level_delegate(); + public static av_log_get_level_delegate av_log_get_level; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_log_once_delegate(void* @avcl, int @initial_level, int @subsequent_level, int* @state, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt); + public static av_log_once_delegate av_log_once; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_log_set_callback_delegate(av_log_set_callback_callback_func @callback); + public static av_log_set_callback_delegate av_log_set_callback; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_log_set_flags_delegate(int @arg); + public static av_log_set_flags_delegate av_log_set_flags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_log_set_level_delegate(int @level); + public static av_log_set_level_delegate av_log_set_level; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_log2_delegate(uint @v); + public static av_log2_delegate av_log2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_log2_16bit_delegate(uint @v); + public static av_log2_16bit_delegate av_log2_16bit; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_malloc_delegate(ulong @size); + public static av_malloc_delegate av_malloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_malloc_array_delegate(ulong @nmemb, ulong @size); + public static av_malloc_array_delegate av_malloc_array; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_mallocz_delegate(ulong @size); + public static av_mallocz_delegate av_mallocz; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_mallocz_array_delegate(ulong @nmemb, ulong @size); + public static av_mallocz_array_delegate av_mallocz_array; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVMasteringDisplayMetadata* av_mastering_display_metadata_alloc_delegate(); + public static av_mastering_display_metadata_alloc_delegate av_mastering_display_metadata_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVMasteringDisplayMetadata* av_mastering_display_metadata_create_side_data_delegate(AVFrame* @frame); + public static av_mastering_display_metadata_create_side_data_delegate av_mastering_display_metadata_create_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_match_ext_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @extensions); + public static av_match_ext_delegate av_match_ext; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_max_alloc_delegate(ulong @max); + public static av_max_alloc_delegate av_max_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_memcpy_backptr_delegate(byte* @dst, int @back, int @cnt); + public static av_memcpy_backptr_delegate av_memcpy_backptr; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_memdup_delegate(void* @p, ulong @size); + public static av_memdup_delegate av_memdup; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_mul_q_delegate(AVRational @b, AVRational @c); + public static av_mul_q_delegate av_mul_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVOutputFormat* av_muxer_iterate_delegate(void** @opaque); + public static av_muxer_iterate_delegate av_muxer_iterate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_nearer_q_delegate(AVRational @q, AVRational @q1, AVRational @q2); + public static av_nearer_q_delegate av_nearer_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_new_packet_delegate(AVPacket* @pkt, int @size); + public static av_new_packet_delegate av_new_packet; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVProgram* av_new_program_delegate(AVFormatContext* @s, int @id); + public static av_new_program_delegate av_new_program; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* av_opt_child_class_iterate_delegate(AVClass* @parent, void** @iter); + public static av_opt_child_class_iterate_delegate av_opt_child_class_iterate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_opt_child_next_delegate(void* @obj, void* @prev); + public static av_opt_child_next_delegate av_opt_child_next; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_copy_delegate(void* @dest, void* @src); + public static av_opt_copy_delegate av_opt_copy; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_eval_double_delegate(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, double* @double_out); + public static av_opt_eval_double_delegate av_opt_eval_double; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_eval_flags_delegate(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, int* @flags_out); + public static av_opt_eval_flags_delegate av_opt_eval_flags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_eval_float_delegate(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, float* @float_out); + public static av_opt_eval_float_delegate av_opt_eval_float; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_eval_int_delegate(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, int* @int_out); + public static av_opt_eval_int_delegate av_opt_eval_int; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_eval_int64_delegate(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, long* @int64_out); + public static av_opt_eval_int64_delegate av_opt_eval_int64; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_eval_q_delegate(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, AVRational* @q_out); + public static av_opt_eval_q_delegate av_opt_eval_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVOption* av_opt_find_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @unit, int @opt_flags, int @search_flags); + public static av_opt_find_delegate av_opt_find; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVOption* av_opt_find2_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @unit, int @opt_flags, int @search_flags, void** @target_obj); + public static av_opt_find2_delegate av_opt_find2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_flag_is_set_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @field_name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @flag_name); + public static av_opt_flag_is_set_delegate av_opt_flag_is_set; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_opt_free_delegate(void* @obj); + public static av_opt_free_delegate av_opt_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_opt_freep_ranges_delegate(AVOptionRanges** @ranges); + public static av_opt_freep_ranges_delegate av_opt_freep_ranges; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, byte** @out_val); + public static av_opt_get_delegate av_opt_get; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_channel_layout_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, long* @ch_layout); + public static av_opt_get_channel_layout_delegate av_opt_get_channel_layout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_chlayout_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVChannelLayout* @layout); + public static av_opt_get_chlayout_delegate av_opt_get_chlayout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_dict_val_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVDictionary** @out_val); + public static av_opt_get_dict_val_delegate av_opt_get_dict_val; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_double_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, double* @out_val); + public static av_opt_get_double_delegate av_opt_get_double; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_image_size_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, int* @w_out, int* @h_out); + public static av_opt_get_image_size_delegate av_opt_get_image_size; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_int_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, long* @out_val); + public static av_opt_get_int_delegate av_opt_get_int; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_key_value_delegate(byte** @ropts, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key_val_sep, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @pairs_sep, uint @flags, byte** @rkey, byte** @rval); + public static av_opt_get_key_value_delegate av_opt_get_key_value; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_pixel_fmt_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVPixelFormat* @out_fmt); + public static av_opt_get_pixel_fmt_delegate av_opt_get_pixel_fmt; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_q_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVRational* @out_val); + public static av_opt_get_q_delegate av_opt_get_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_sample_fmt_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVSampleFormat* @out_fmt); + public static av_opt_get_sample_fmt_delegate av_opt_get_sample_fmt; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_video_rate_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVRational* @out_val); + public static av_opt_get_video_rate_delegate av_opt_get_video_rate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_is_set_to_default_delegate(void* @obj, AVOption* @o); + public static av_opt_is_set_to_default_delegate av_opt_is_set_to_default; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_is_set_to_default_by_name_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags); + public static av_opt_is_set_to_default_by_name_delegate av_opt_is_set_to_default_by_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVOption* av_opt_next_delegate(void* @obj, AVOption* @prev); + public static av_opt_next_delegate av_opt_next; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_opt_ptr_delegate(AVClass* @avclass, void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_opt_ptr_delegate av_opt_ptr; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_query_ranges_delegate(AVOptionRanges** @p0, void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, int @flags); + public static av_opt_query_ranges_delegate av_opt_query_ranges; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_query_ranges_default_delegate(AVOptionRanges** @p0, void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, int @flags); + public static av_opt_query_ranges_default_delegate av_opt_query_ranges_default; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_serialize_delegate(void* @obj, int @opt_flags, int @flags, byte** @buffer, byte @key_val_sep, byte @pairs_sep); + public static av_opt_serialize_delegate av_opt_serialize; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, int @search_flags); + public static av_opt_set_delegate av_opt_set; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_bin_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, byte* @val, int @size, int @search_flags); + public static av_opt_set_bin_delegate av_opt_set_bin; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_channel_layout_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, long @ch_layout, int @search_flags); + public static av_opt_set_channel_layout_delegate av_opt_set_channel_layout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_chlayout_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVChannelLayout* @layout, int @search_flags); + public static av_opt_set_chlayout_delegate av_opt_set_chlayout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_opt_set_defaults_delegate(void* @s); + public static av_opt_set_defaults_delegate av_opt_set_defaults; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_opt_set_defaults2_delegate(void* @s, int @mask, int @flags); + public static av_opt_set_defaults2_delegate av_opt_set_defaults2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_dict_delegate(void* @obj, AVDictionary** @options); + public static av_opt_set_dict_delegate av_opt_set_dict; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_dict_val_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVDictionary* @val, int @search_flags); + public static av_opt_set_dict_val_delegate av_opt_set_dict_val; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_dict2_delegate(void* @obj, AVDictionary** @options, int @search_flags); + public static av_opt_set_dict2_delegate av_opt_set_dict2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_double_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, double @val, int @search_flags); + public static av_opt_set_double_delegate av_opt_set_double; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_from_string_delegate(void* @ctx, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @opts, byte** @shorthand, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key_val_sep, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @pairs_sep); + public static av_opt_set_from_string_delegate av_opt_set_from_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_image_size_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @w, int @h, int @search_flags); + public static av_opt_set_image_size_delegate av_opt_set_image_size; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_int_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, long @val, int @search_flags); + public static av_opt_set_int_delegate av_opt_set_int; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_pixel_fmt_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVPixelFormat @fmt, int @search_flags); + public static av_opt_set_pixel_fmt_delegate av_opt_set_pixel_fmt; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_q_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVRational @val, int @search_flags); + public static av_opt_set_q_delegate av_opt_set_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_sample_fmt_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVSampleFormat @fmt, int @search_flags); + public static av_opt_set_sample_fmt_delegate av_opt_set_sample_fmt; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_video_rate_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVRational @val, int @search_flags); + public static av_opt_set_video_rate_delegate av_opt_set_video_rate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_show2_delegate(void* @obj, void* @av_log_obj, int @req_flags, int @rej_flags); + public static av_opt_show2_delegate av_opt_show2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVOutputFormat* av_output_audio_device_next_delegate(AVOutputFormat* @d); + public static av_output_audio_device_next_delegate av_output_audio_device_next; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVOutputFormat* av_output_video_device_next_delegate(AVOutputFormat* @d); + public static av_output_video_device_next_delegate av_output_video_device_next; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_packet_add_side_data_delegate(AVPacket* @pkt, AVPacketSideDataType @type, byte* @data, ulong @size); + public static av_packet_add_side_data_delegate av_packet_add_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVPacket* av_packet_alloc_delegate(); + public static av_packet_alloc_delegate av_packet_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVPacket* av_packet_clone_delegate(AVPacket* @src); + public static av_packet_clone_delegate av_packet_clone; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_packet_copy_props_delegate(AVPacket* @dst, AVPacket* @src); + public static av_packet_copy_props_delegate av_packet_copy_props; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_packet_free_delegate(AVPacket** @pkt); + public static av_packet_free_delegate av_packet_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_packet_free_side_data_delegate(AVPacket* @pkt); + public static av_packet_free_side_data_delegate av_packet_free_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_packet_from_data_delegate(AVPacket* @pkt, byte* @data, int @size); + public static av_packet_from_data_delegate av_packet_from_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_packet_get_side_data_delegate(AVPacket* @pkt, AVPacketSideDataType @type, ulong* @size); + public static av_packet_get_side_data_delegate av_packet_get_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_packet_make_refcounted_delegate(AVPacket* @pkt); + public static av_packet_make_refcounted_delegate av_packet_make_refcounted; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_packet_make_writable_delegate(AVPacket* @pkt); + public static av_packet_make_writable_delegate av_packet_make_writable; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_packet_move_ref_delegate(AVPacket* @dst, AVPacket* @src); + public static av_packet_move_ref_delegate av_packet_move_ref; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_packet_new_side_data_delegate(AVPacket* @pkt, AVPacketSideDataType @type, ulong @size); + public static av_packet_new_side_data_delegate av_packet_new_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_packet_pack_dictionary_delegate(AVDictionary* @dict, ulong* @size); + public static av_packet_pack_dictionary_delegate av_packet_pack_dictionary; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_packet_ref_delegate(AVPacket* @dst, AVPacket* @src); + public static av_packet_ref_delegate av_packet_ref; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_packet_rescale_ts_delegate(AVPacket* @pkt, AVRational @tb_src, AVRational @tb_dst); + public static av_packet_rescale_ts_delegate av_packet_rescale_ts; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_packet_shrink_side_data_delegate(AVPacket* @pkt, AVPacketSideDataType @type, ulong @size); + public static av_packet_shrink_side_data_delegate av_packet_shrink_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_packet_side_data_name_delegate(AVPacketSideDataType @type); + public static av_packet_side_data_name_delegate av_packet_side_data_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_packet_unpack_dictionary_delegate(byte* @data, ulong @size, AVDictionary** @dict); + public static av_packet_unpack_dictionary_delegate av_packet_unpack_dictionary; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_packet_unref_delegate(AVPacket* @pkt); + public static av_packet_unref_delegate av_packet_unref; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_parse_cpu_caps_delegate(uint* @flags, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @s); + public static av_parse_cpu_caps_delegate av_parse_cpu_caps; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_parser_close_delegate(AVCodecParserContext* @s); + public static av_parser_close_delegate av_parser_close; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecParserContext* av_parser_init_delegate(int @codec_id); + public static av_parser_init_delegate av_parser_init; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecParser* av_parser_iterate_delegate(void** @opaque); + public static av_parser_iterate_delegate av_parser_iterate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_parser_parse2_delegate(AVCodecParserContext* @s, AVCodecContext* @avctx, byte** @poutbuf, int* @poutbuf_size, byte* @buf, int @buf_size, long @pts, long @dts, long @pos); + public static av_parser_parse2_delegate av_parser_parse2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_pix_fmt_count_planes_delegate(AVPixelFormat @pix_fmt); + public static av_pix_fmt_count_planes_delegate av_pix_fmt_count_planes; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVPixFmtDescriptor* av_pix_fmt_desc_get_delegate(AVPixelFormat @pix_fmt); + public static av_pix_fmt_desc_get_delegate av_pix_fmt_desc_get; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVPixelFormat av_pix_fmt_desc_get_id_delegate(AVPixFmtDescriptor* @desc); + public static av_pix_fmt_desc_get_id_delegate av_pix_fmt_desc_get_id; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVPixFmtDescriptor* av_pix_fmt_desc_next_delegate(AVPixFmtDescriptor* @prev); + public static av_pix_fmt_desc_next_delegate av_pix_fmt_desc_next; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_pix_fmt_get_chroma_sub_sample_delegate(AVPixelFormat @pix_fmt, int* @h_shift, int* @v_shift); + public static av_pix_fmt_get_chroma_sub_sample_delegate av_pix_fmt_get_chroma_sub_sample; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVPixelFormat av_pix_fmt_swap_endianness_delegate(AVPixelFormat @pix_fmt); + public static av_pix_fmt_swap_endianness_delegate av_pix_fmt_swap_endianness; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_pkt_dump_log2_delegate(void* @avcl, int @level, AVPacket* @pkt, int @dump_payload, AVStream* @st); + public static av_pkt_dump_log2_delegate av_pkt_dump_log2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_pkt_dump2_delegate(_iobuf* @f, AVPacket* @pkt, int @dump_payload, AVStream* @st); + public static av_pkt_dump2_delegate av_pkt_dump2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_probe_input_buffer_delegate(AVIOContext* @pb, AVInputFormat** @fmt, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, void* @logctx, uint @offset, uint @max_probe_size); + public static av_probe_input_buffer_delegate av_probe_input_buffer; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_probe_input_buffer2_delegate(AVIOContext* @pb, AVInputFormat** @fmt, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, void* @logctx, uint @offset, uint @max_probe_size); + public static av_probe_input_buffer2_delegate av_probe_input_buffer2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVInputFormat* av_probe_input_format_delegate(AVProbeData* @pd, int @is_opened); + public static av_probe_input_format_delegate av_probe_input_format; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVInputFormat* av_probe_input_format2_delegate(AVProbeData* @pd, int @is_opened, int* @score_max); + public static av_probe_input_format2_delegate av_probe_input_format2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVInputFormat* av_probe_input_format3_delegate(AVProbeData* @pd, int @is_opened, int* @score_ret); + public static av_probe_input_format3_delegate av_probe_input_format3; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_program_add_stream_index_delegate(AVFormatContext* @ac, int @progid, uint @idx); + public static av_program_add_stream_index_delegate av_program_add_stream_index; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint av_q2intfloat_delegate(AVRational @q); + public static av_q2intfloat_delegate av_q2intfloat; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_read_frame_delegate(AVFormatContext* @s, AVPacket* @pkt); + public static av_read_frame_delegate av_read_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_read_image_line_delegate(ushort* @dst, in byte_ptr4 @data, in int4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component); + public static av_read_image_line_delegate av_read_image_line; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_read_image_line2_delegate(void* @dst, in byte_ptr4 @data, in int4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component, int @dst_element_size); + public static av_read_image_line2_delegate av_read_image_line2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_read_pause_delegate(AVFormatContext* @s); + public static av_read_pause_delegate av_read_pause; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_read_play_delegate(AVFormatContext* @s); + public static av_read_play_delegate av_read_play; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_realloc_delegate(void* @ptr, ulong @size); + public static av_realloc_delegate av_realloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_realloc_array_delegate(void* @ptr, ulong @nmemb, ulong @size); + public static av_realloc_array_delegate av_realloc_array; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_realloc_f_delegate(void* @ptr, ulong @nelem, ulong @elsize); + public static av_realloc_f_delegate av_realloc_f; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_reallocp_delegate(void* @ptr, ulong @size); + public static av_reallocp_delegate av_reallocp; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_reallocp_array_delegate(void* @ptr, ulong @nmemb, ulong @size); + public static av_reallocp_array_delegate av_reallocp_array; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_reduce_delegate(int* @dst_num, int* @dst_den, long @num, long @den, long @max); + public static av_reduce_delegate av_reduce; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_rescale_delegate(long @a, long @b, long @c); + public static av_rescale_delegate av_rescale; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_rescale_delta_delegate(AVRational @in_tb, long @in_ts, AVRational @fs_tb, int @duration, long* @last, AVRational @out_tb); + public static av_rescale_delta_delegate av_rescale_delta; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_rescale_q_delegate(long @a, AVRational @bq, AVRational @cq); + public static av_rescale_q_delegate av_rescale_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_rescale_q_rnd_delegate(long @a, AVRational @bq, AVRational @cq, AVRounding @rnd); + public static av_rescale_q_rnd_delegate av_rescale_q_rnd; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_rescale_rnd_delegate(long @a, long @b, long @c, AVRounding @rnd); + public static av_rescale_rnd_delegate av_rescale_rnd; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_sample_fmt_is_planar_delegate(AVSampleFormat @sample_fmt); + public static av_sample_fmt_is_planar_delegate av_sample_fmt_is_planar; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_samples_alloc_delegate(byte** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align); + public static av_samples_alloc_delegate av_samples_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_samples_alloc_array_and_samples_delegate(byte*** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align); + public static av_samples_alloc_array_and_samples_delegate av_samples_alloc_array_and_samples; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_samples_copy_delegate(byte** @dst, byte** @src, int @dst_offset, int @src_offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt); + public static av_samples_copy_delegate av_samples_copy; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_samples_fill_arrays_delegate(byte** @audio_data, int* @linesize, byte* @buf, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align); + public static av_samples_fill_arrays_delegate av_samples_fill_arrays; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_samples_get_buffer_size_delegate(int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align); + public static av_samples_get_buffer_size_delegate av_samples_get_buffer_size; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_samples_set_silence_delegate(byte** @audio_data, int @offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt); + public static av_samples_set_silence_delegate av_samples_set_silence; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_sdp_create_delegate(AVFormatContext** @ac, int @n_files, byte* @buf, int @size); + public static av_sdp_create_delegate av_sdp_create; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_seek_frame_delegate(AVFormatContext* @s, int @stream_index, long @timestamp, int @flags); + public static av_seek_frame_delegate av_seek_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_set_options_string_delegate(void* @ctx, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @opts, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key_val_sep, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @pairs_sep); + public static av_set_options_string_delegate av_set_options_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_shrink_packet_delegate(AVPacket* @pkt, int @size); + public static av_shrink_packet_delegate av_shrink_packet; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_size_mult_delegate(ulong @a, ulong @b, ulong* @r); + public static av_size_mult_delegate av_size_mult; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_strdup_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @s); + public static av_strdup_delegate av_strdup; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_stream_add_side_data_delegate(AVStream* @st, AVPacketSideDataType @type, byte* @data, ulong @size); + public static av_stream_add_side_data_delegate av_stream_add_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* av_stream_get_class_delegate(); + public static av_stream_get_class_delegate av_stream_get_class; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_stream_get_codec_timebase_delegate(AVStream* @st); + public static av_stream_get_codec_timebase_delegate av_stream_get_codec_timebase; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_stream_get_end_pts_delegate(AVStream* @st); + public static av_stream_get_end_pts_delegate av_stream_get_end_pts; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecParserContext* av_stream_get_parser_delegate(AVStream* @s); + public static av_stream_get_parser_delegate av_stream_get_parser; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_stream_get_side_data_delegate(AVStream* @stream, AVPacketSideDataType @type, ulong* @size); + public static av_stream_get_side_data_delegate av_stream_get_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_stream_new_side_data_delegate(AVStream* @stream, AVPacketSideDataType @type, ulong @size); + public static av_stream_new_side_data_delegate av_stream_new_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_strerror_delegate(int @errnum, byte* @errbuf, ulong @errbuf_size); + public static av_strerror_delegate av_strerror; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_strndup_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @s, ulong @len); + public static av_strndup_delegate av_strndup; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_sub_q_delegate(AVRational @b, AVRational @c); + public static av_sub_q_delegate av_sub_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_tempfile_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @prefix, byte** @filename, int @log_offset, void* @log_ctx); + public static av_tempfile_delegate av_tempfile; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_timecode_adjust_ntsc_framenum2_delegate(int @framenum, int @fps); + public static av_timecode_adjust_ntsc_framenum2_delegate av_timecode_adjust_ntsc_framenum2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_timecode_check_frame_rate_delegate(AVRational @rate); + public static av_timecode_check_frame_rate_delegate av_timecode_check_frame_rate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint av_timecode_get_smpte_delegate(AVRational @rate, int @drop, int @hh, int @mm, int @ss, int @ff); + public static av_timecode_get_smpte_delegate av_timecode_get_smpte; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint av_timecode_get_smpte_from_framenum_delegate(AVTimecode* @tc, int @framenum); + public static av_timecode_get_smpte_from_framenum_delegate av_timecode_get_smpte_from_framenum; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_timecode_init_delegate(AVTimecode* @tc, AVRational @rate, int @flags, int @frame_start, void* @log_ctx); + public static av_timecode_init_delegate av_timecode_init; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_timecode_init_from_components_delegate(AVTimecode* @tc, AVRational @rate, int @flags, int @hh, int @mm, int @ss, int @ff, void* @log_ctx); + public static av_timecode_init_from_components_delegate av_timecode_init_from_components; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_timecode_init_from_string_delegate(AVTimecode* @tc, AVRational @rate, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str, void* @log_ctx); + public static av_timecode_init_from_string_delegate av_timecode_init_from_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_timecode_make_mpeg_tc_string_delegate(byte* @buf, uint @tc25bit); + public static av_timecode_make_mpeg_tc_string_delegate av_timecode_make_mpeg_tc_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_timecode_make_smpte_tc_string_delegate(byte* @buf, uint @tcsmpte, int @prevent_df); + public static av_timecode_make_smpte_tc_string_delegate av_timecode_make_smpte_tc_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_timecode_make_smpte_tc_string2_delegate(byte* @buf, AVRational @rate, uint @tcsmpte, int @prevent_df, int @skip_field); + public static av_timecode_make_smpte_tc_string2_delegate av_timecode_make_smpte_tc_string2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_timecode_make_string_delegate(AVTimecode* @tc, byte* @buf, int @framenum); + public static av_timecode_make_string_delegate av_timecode_make_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_tree_destroy_delegate(AVTreeNode* @t); + public static av_tree_destroy_delegate av_tree_destroy; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_tree_enumerate_delegate(AVTreeNode* @t, void* @opaque, av_tree_enumerate_cmp_func @cmp, av_tree_enumerate_enu_func @enu); + public static av_tree_enumerate_delegate av_tree_enumerate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_tree_find_delegate(AVTreeNode* @root, void* @key, av_tree_find_cmp_func @cmp, ref void_ptr2 @next); + public static av_tree_find_delegate av_tree_find; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_tree_insert_delegate(AVTreeNode** @rootp, void* @key, av_tree_insert_cmp_func @cmp, AVTreeNode** @next); + public static av_tree_insert_delegate av_tree_insert; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVTreeNode* av_tree_node_alloc_delegate(); + public static av_tree_node_alloc_delegate av_tree_node_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_url_split_delegate(byte* @proto, int @proto_size, byte* @authorization, int @authorization_size, byte* @hostname, int @hostname_size, int* @port_ptr, byte* @path, int @path_size, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url); + public static av_url_split_delegate av_url_split; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_usleep_delegate(uint @usec); + public static av_usleep_delegate av_usleep; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_version_info_delegate(); + public static av_version_info_delegate av_version_info; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_vlog_delegate(void* @avcl, int @level, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt, byte* @vl); + public static av_vlog_delegate av_vlog; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_write_frame_delegate(AVFormatContext* @s, AVPacket* @pkt); + public static av_write_frame_delegate av_write_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_write_image_line_delegate(ushort* @src, ref byte_ptr4 @data, in int4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w); + public static av_write_image_line_delegate av_write_image_line; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_write_image_line2_delegate(void* @src, ref byte_ptr4 @data, in int4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @src_element_size); + public static av_write_image_line2_delegate av_write_image_line2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_write_trailer_delegate(AVFormatContext* @s); + public static av_write_trailer_delegate av_write_trailer; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_write_uncoded_frame_delegate(AVFormatContext* @s, int @stream_index, AVFrame* @frame); + public static av_write_uncoded_frame_delegate av_write_uncoded_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_write_uncoded_frame_query_delegate(AVFormatContext* @s, int @stream_index); + public static av_write_uncoded_frame_query_delegate av_write_uncoded_frame_query; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint av_xiphlacing_delegate(byte* @s, uint @v); + public static av_xiphlacing_delegate av_xiphlacing; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avcodec_align_dimensions_delegate(AVCodecContext* @s, int* @width, int* @height); + public static avcodec_align_dimensions_delegate avcodec_align_dimensions; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avcodec_align_dimensions2_delegate(AVCodecContext* @s, int* @width, int* @height, ref int8 @linesize_align); + public static avcodec_align_dimensions2_delegate avcodec_align_dimensions2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecContext* avcodec_alloc_context3_delegate(AVCodec* @codec); + public static avcodec_alloc_context3_delegate avcodec_alloc_context3; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVChromaLocation avcodec_chroma_pos_to_enum_delegate(int @xpos, int @ypos); + public static avcodec_chroma_pos_to_enum_delegate avcodec_chroma_pos_to_enum; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_close_delegate(AVCodecContext* @avctx); + public static avcodec_close_delegate avcodec_close; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avcodec_configuration_delegate(); + public static avcodec_configuration_delegate avcodec_configuration; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_decode_subtitle2_delegate(AVCodecContext* @avctx, AVSubtitle* @sub, int* @got_sub_ptr, AVPacket* @avpkt); + public static avcodec_decode_subtitle2_delegate avcodec_decode_subtitle2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_default_execute_delegate(AVCodecContext* @c, avcodec_default_execute_func_func @func, void* @arg, int* @ret, int @count, int @size); + public static avcodec_default_execute_delegate avcodec_default_execute; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_default_execute2_delegate(AVCodecContext* @c, avcodec_default_execute2_func_func @func, void* @arg, int* @ret, int @count); + public static avcodec_default_execute2_delegate avcodec_default_execute2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_default_get_buffer2_delegate(AVCodecContext* @s, AVFrame* @frame, int @flags); + public static avcodec_default_get_buffer2_delegate avcodec_default_get_buffer2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_default_get_encode_buffer_delegate(AVCodecContext* @s, AVPacket* @pkt, int @flags); + public static avcodec_default_get_encode_buffer_delegate avcodec_default_get_encode_buffer; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVPixelFormat avcodec_default_get_format_delegate(AVCodecContext* @s, AVPixelFormat* @fmt); + public static avcodec_default_get_format_delegate avcodec_default_get_format; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecDescriptor* avcodec_descriptor_get_delegate(AVCodecID @id); + public static avcodec_descriptor_get_delegate avcodec_descriptor_get; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecDescriptor* avcodec_descriptor_get_by_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static avcodec_descriptor_get_by_name_delegate avcodec_descriptor_get_by_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecDescriptor* avcodec_descriptor_next_delegate(AVCodecDescriptor* @prev); + public static avcodec_descriptor_next_delegate avcodec_descriptor_next; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_encode_subtitle_delegate(AVCodecContext* @avctx, byte* @buf, int @buf_size, AVSubtitle* @sub); + public static avcodec_encode_subtitle_delegate avcodec_encode_subtitle; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_enum_to_chroma_pos_delegate(int* @xpos, int* @ypos, AVChromaLocation @pos); + public static avcodec_enum_to_chroma_pos_delegate avcodec_enum_to_chroma_pos; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_fill_audio_frame_delegate(AVFrame* @frame, int @nb_channels, AVSampleFormat @sample_fmt, byte* @buf, int @buf_size, int @align); + public static avcodec_fill_audio_frame_delegate avcodec_fill_audio_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVPixelFormat avcodec_find_best_pix_fmt_of_list_delegate(AVPixelFormat* @pix_fmt_list, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr); + public static avcodec_find_best_pix_fmt_of_list_delegate avcodec_find_best_pix_fmt_of_list; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodec* avcodec_find_decoder_delegate(AVCodecID @id); + public static avcodec_find_decoder_delegate avcodec_find_decoder; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodec* avcodec_find_decoder_by_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static avcodec_find_decoder_by_name_delegate avcodec_find_decoder_by_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodec* avcodec_find_encoder_delegate(AVCodecID @id); + public static avcodec_find_encoder_delegate avcodec_find_encoder; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodec* avcodec_find_encoder_by_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static avcodec_find_encoder_by_name_delegate avcodec_find_encoder_by_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avcodec_flush_buffers_delegate(AVCodecContext* @avctx); + public static avcodec_flush_buffers_delegate avcodec_flush_buffers; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avcodec_free_context_delegate(AVCodecContext** @avctx); + public static avcodec_free_context_delegate avcodec_free_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* avcodec_get_class_delegate(); + public static avcodec_get_class_delegate avcodec_get_class; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* avcodec_get_frame_class_delegate(); + public static avcodec_get_frame_class_delegate avcodec_get_frame_class; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecHWConfig* avcodec_get_hw_config_delegate(AVCodec* @codec, int @index); + public static avcodec_get_hw_config_delegate avcodec_get_hw_config; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_get_hw_frames_parameters_delegate(AVCodecContext* @avctx, AVBufferRef* @device_ref, AVPixelFormat @hw_pix_fmt, AVBufferRef** @out_frames_ref); + public static avcodec_get_hw_frames_parameters_delegate avcodec_get_hw_frames_parameters; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avcodec_get_name_delegate(AVCodecID @id); + public static avcodec_get_name_delegate avcodec_get_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* avcodec_get_subtitle_rect_class_delegate(); + public static avcodec_get_subtitle_rect_class_delegate avcodec_get_subtitle_rect_class; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVMediaType avcodec_get_type_delegate(AVCodecID @codec_id); + public static avcodec_get_type_delegate avcodec_get_type; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_is_open_delegate(AVCodecContext* @s); + public static avcodec_is_open_delegate avcodec_is_open; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avcodec_license_delegate(); + public static avcodec_license_delegate avcodec_license; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_open2_delegate(AVCodecContext* @avctx, AVCodec* @codec, AVDictionary** @options); + public static avcodec_open2_delegate avcodec_open2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecParameters* avcodec_parameters_alloc_delegate(); + public static avcodec_parameters_alloc_delegate avcodec_parameters_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_parameters_copy_delegate(AVCodecParameters* @dst, AVCodecParameters* @src); + public static avcodec_parameters_copy_delegate avcodec_parameters_copy; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avcodec_parameters_free_delegate(AVCodecParameters** @par); + public static avcodec_parameters_free_delegate avcodec_parameters_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_parameters_from_context_delegate(AVCodecParameters* @par, AVCodecContext* @codec); + public static avcodec_parameters_from_context_delegate avcodec_parameters_from_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_parameters_to_context_delegate(AVCodecContext* @codec, AVCodecParameters* @par); + public static avcodec_parameters_to_context_delegate avcodec_parameters_to_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avcodec_pix_fmt_to_codec_tag_delegate(AVPixelFormat @pix_fmt); + public static avcodec_pix_fmt_to_codec_tag_delegate avcodec_pix_fmt_to_codec_tag; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avcodec_profile_name_delegate(AVCodecID @codec_id, int @profile); + public static avcodec_profile_name_delegate avcodec_profile_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_receive_frame_delegate(AVCodecContext* @avctx, AVFrame* @frame); + public static avcodec_receive_frame_delegate avcodec_receive_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_receive_packet_delegate(AVCodecContext* @avctx, AVPacket* @avpkt); + public static avcodec_receive_packet_delegate avcodec_receive_packet; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_send_frame_delegate(AVCodecContext* @avctx, AVFrame* @frame); + public static avcodec_send_frame_delegate avcodec_send_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_send_packet_delegate(AVCodecContext* @avctx, AVPacket* @avpkt); + public static avcodec_send_packet_delegate avcodec_send_packet; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avcodec_string_delegate(byte* @buf, int @buf_size, AVCodecContext* @enc, int @encode); + public static avcodec_string_delegate avcodec_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avcodec_version_delegate(); + public static avcodec_version_delegate avcodec_version; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avdevice_app_to_dev_control_message_delegate(AVFormatContext* @s, AVAppToDevMessageType @type, void* @data, ulong @data_size); + public static avdevice_app_to_dev_control_message_delegate avdevice_app_to_dev_control_message; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avdevice_capabilities_create_delegate(AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s, AVDictionary** @device_options); + public static avdevice_capabilities_create_delegate avdevice_capabilities_create; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avdevice_capabilities_free_delegate(AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s); + public static avdevice_capabilities_free_delegate avdevice_capabilities_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avdevice_configuration_delegate(); + public static avdevice_configuration_delegate avdevice_configuration; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avdevice_dev_to_app_control_message_delegate(AVFormatContext* @s, AVDevToAppMessageType @type, void* @data, ulong @data_size); + public static avdevice_dev_to_app_control_message_delegate avdevice_dev_to_app_control_message; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avdevice_free_list_devices_delegate(AVDeviceInfoList** @device_list); + public static avdevice_free_list_devices_delegate avdevice_free_list_devices; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avdevice_license_delegate(); + public static avdevice_license_delegate avdevice_license; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avdevice_list_devices_delegate(AVFormatContext* @s, AVDeviceInfoList** @device_list); + public static avdevice_list_devices_delegate avdevice_list_devices; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avdevice_list_input_sources_delegate(AVInputFormat* @device, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list); + public static avdevice_list_input_sources_delegate avdevice_list_input_sources; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avdevice_list_output_sinks_delegate(AVOutputFormat* @device, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list); + public static avdevice_list_output_sinks_delegate avdevice_list_output_sinks; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avdevice_register_all_delegate(); + public static avdevice_register_all_delegate avdevice_register_all; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avdevice_version_delegate(); + public static avdevice_version_delegate avdevice_version; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_config_links_delegate(AVFilterContext* @filter); + public static avfilter_config_links_delegate avfilter_config_links; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avfilter_configuration_delegate(); + public static avfilter_configuration_delegate avfilter_configuration; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avfilter_filter_pad_count_delegate(AVFilter* @filter, int @is_output); + public static avfilter_filter_pad_count_delegate avfilter_filter_pad_count; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avfilter_free_delegate(AVFilterContext* @filter); + public static avfilter_free_delegate avfilter_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFilter* avfilter_get_by_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static avfilter_get_by_name_delegate avfilter_get_by_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* avfilter_get_class_delegate(); + public static avfilter_get_class_delegate avfilter_get_class; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFilterGraph* avfilter_graph_alloc_delegate(); + public static avfilter_graph_alloc_delegate avfilter_graph_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFilterContext* avfilter_graph_alloc_filter_delegate(AVFilterGraph* @graph, AVFilter* @filter, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static avfilter_graph_alloc_filter_delegate avfilter_graph_alloc_filter; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_graph_config_delegate(AVFilterGraph* @graphctx, void* @log_ctx); + public static avfilter_graph_config_delegate avfilter_graph_config; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_graph_create_filter_delegate(AVFilterContext** @filt_ctx, AVFilter* @filt, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @args, void* @opaque, AVFilterGraph* @graph_ctx); + public static avfilter_graph_create_filter_delegate avfilter_graph_create_filter; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* avfilter_graph_dump_delegate(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @options); + public static avfilter_graph_dump_delegate avfilter_graph_dump; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avfilter_graph_free_delegate(AVFilterGraph** @graph); + public static avfilter_graph_free_delegate avfilter_graph_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFilterContext* avfilter_graph_get_filter_delegate(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static avfilter_graph_get_filter_delegate avfilter_graph_get_filter; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_graph_parse_delegate(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filters, AVFilterInOut* @inputs, AVFilterInOut* @outputs, void* @log_ctx); + public static avfilter_graph_parse_delegate avfilter_graph_parse; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_graph_parse_ptr_delegate(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs, void* @log_ctx); + public static avfilter_graph_parse_ptr_delegate avfilter_graph_parse_ptr; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_graph_parse2_delegate(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs); + public static avfilter_graph_parse2_delegate avfilter_graph_parse2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_graph_queue_command_delegate(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @target, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @cmd, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @arg, int @flags, double @ts); + public static avfilter_graph_queue_command_delegate avfilter_graph_queue_command; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_graph_request_oldest_delegate(AVFilterGraph* @graph); + public static avfilter_graph_request_oldest_delegate avfilter_graph_request_oldest; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_graph_send_command_delegate(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @target, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @cmd, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @arg, byte* @res, int @res_len, int @flags); + public static avfilter_graph_send_command_delegate avfilter_graph_send_command; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avfilter_graph_set_auto_convert_delegate(AVFilterGraph* @graph, uint @flags); + public static avfilter_graph_set_auto_convert_delegate avfilter_graph_set_auto_convert; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_init_dict_delegate(AVFilterContext* @ctx, AVDictionary** @options); + public static avfilter_init_dict_delegate avfilter_init_dict; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_init_str_delegate(AVFilterContext* @ctx, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @args); + public static avfilter_init_str_delegate avfilter_init_str; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFilterInOut* avfilter_inout_alloc_delegate(); + public static avfilter_inout_alloc_delegate avfilter_inout_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avfilter_inout_free_delegate(AVFilterInOut** @inout); + public static avfilter_inout_free_delegate avfilter_inout_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_insert_filter_delegate(AVFilterLink* @link, AVFilterContext* @filt, uint @filt_srcpad_idx, uint @filt_dstpad_idx); + public static avfilter_insert_filter_delegate avfilter_insert_filter; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avfilter_license_delegate(); + public static avfilter_license_delegate avfilter_license; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_link_delegate(AVFilterContext* @src, uint @srcpad, AVFilterContext* @dst, uint @dstpad); + public static avfilter_link_delegate avfilter_link; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avfilter_link_free_delegate(AVFilterLink** @link); + public static avfilter_link_free_delegate avfilter_link_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_pad_count_delegate(AVFilterPad* @pads); + public static avfilter_pad_count_delegate avfilter_pad_count; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avfilter_pad_get_name_delegate(AVFilterPad* @pads, int @pad_idx); + public static avfilter_pad_get_name_delegate avfilter_pad_get_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVMediaType avfilter_pad_get_type_delegate(AVFilterPad* @pads, int @pad_idx); + public static avfilter_pad_get_type_delegate avfilter_pad_get_type; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_process_command_delegate(AVFilterContext* @filter, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @cmd, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @arg, byte* @res, int @res_len, int @flags); + public static avfilter_process_command_delegate avfilter_process_command; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avfilter_version_delegate(); + public static avfilter_version_delegate avfilter_version; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFormatContext* avformat_alloc_context_delegate(); + public static avformat_alloc_context_delegate avformat_alloc_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_alloc_output_context2_delegate(AVFormatContext** @ctx, AVOutputFormat* @oformat, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @format_name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename); + public static avformat_alloc_output_context2_delegate avformat_alloc_output_context2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avformat_close_input_delegate(AVFormatContext** @s); + public static avformat_close_input_delegate avformat_close_input; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avformat_configuration_delegate(); + public static avformat_configuration_delegate avformat_configuration; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_find_stream_info_delegate(AVFormatContext* @ic, AVDictionary** @options); + public static avformat_find_stream_info_delegate avformat_find_stream_info; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_flush_delegate(AVFormatContext* @s); + public static avformat_flush_delegate avformat_flush; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avformat_free_context_delegate(AVFormatContext* @s); + public static avformat_free_context_delegate avformat_free_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* avformat_get_class_delegate(); + public static avformat_get_class_delegate avformat_get_class; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecTag* avformat_get_mov_audio_tags_delegate(); + public static avformat_get_mov_audio_tags_delegate avformat_get_mov_audio_tags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecTag* avformat_get_mov_video_tags_delegate(); + public static avformat_get_mov_video_tags_delegate avformat_get_mov_video_tags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecTag* avformat_get_riff_audio_tags_delegate(); + public static avformat_get_riff_audio_tags_delegate avformat_get_riff_audio_tags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecTag* avformat_get_riff_video_tags_delegate(); + public static avformat_get_riff_video_tags_delegate avformat_get_riff_video_tags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_index_get_entries_count_delegate(AVStream* @st); + public static avformat_index_get_entries_count_delegate avformat_index_get_entries_count; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVIndexEntry* avformat_index_get_entry_delegate(AVStream* @st, int @idx); + public static avformat_index_get_entry_delegate avformat_index_get_entry; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVIndexEntry* avformat_index_get_entry_from_timestamp_delegate(AVStream* @st, long @wanted_timestamp, int @flags); + public static avformat_index_get_entry_from_timestamp_delegate avformat_index_get_entry_from_timestamp; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_init_output_delegate(AVFormatContext* @s, AVDictionary** @options); + public static avformat_init_output_delegate avformat_init_output; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avformat_license_delegate(); + public static avformat_license_delegate avformat_license; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_match_stream_specifier_delegate(AVFormatContext* @s, AVStream* @st, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @spec); + public static avformat_match_stream_specifier_delegate avformat_match_stream_specifier; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_network_deinit_delegate(); + public static avformat_network_deinit_delegate avformat_network_deinit; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_network_init_delegate(); + public static avformat_network_init_delegate avformat_network_init; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVStream* avformat_new_stream_delegate(AVFormatContext* @s, AVCodec* @c); + public static avformat_new_stream_delegate avformat_new_stream; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_open_input_delegate(AVFormatContext** @ps, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, AVInputFormat* @fmt, AVDictionary** @options); + public static avformat_open_input_delegate avformat_open_input; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_query_codec_delegate(AVOutputFormat* @ofmt, AVCodecID @codec_id, int @std_compliance); + public static avformat_query_codec_delegate avformat_query_codec; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_queue_attached_pictures_delegate(AVFormatContext* @s); + public static avformat_queue_attached_pictures_delegate avformat_queue_attached_pictures; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_seek_file_delegate(AVFormatContext* @s, int @stream_index, long @min_ts, long @ts, long @max_ts, int @flags); + public static avformat_seek_file_delegate avformat_seek_file; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_transfer_internal_stream_timing_info_delegate(AVOutputFormat* @ofmt, AVStream* @ost, AVStream* @ist, AVTimebaseSource @copy_tb); + public static avformat_transfer_internal_stream_timing_info_delegate avformat_transfer_internal_stream_timing_info; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avformat_version_delegate(); + public static avformat_version_delegate avformat_version; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_write_header_delegate(AVFormatContext* @s, AVDictionary** @options); + public static avformat_write_header_delegate avformat_write_header; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_accept_delegate(AVIOContext* @s, AVIOContext** @c); + public static avio_accept_delegate avio_accept; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVIOContext* avio_alloc_context_delegate(byte* @buffer, int @buffer_size, int @write_flag, void* @opaque, avio_alloc_context_read_packet_func @read_packet, avio_alloc_context_write_packet_func @write_packet, avio_alloc_context_seek_func @seek); + public static avio_alloc_context_delegate avio_alloc_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_check_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, int @flags); + public static avio_check_delegate avio_check; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_close_delegate(AVIOContext* @s); + public static avio_close_delegate avio_close; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_close_dir_delegate(AVIODirContext** @s); + public static avio_close_dir_delegate avio_close_dir; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_close_dyn_buf_delegate(AVIOContext* @s, byte** @pbuffer); + public static avio_close_dyn_buf_delegate avio_close_dyn_buf; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_closep_delegate(AVIOContext** @s); + public static avio_closep_delegate avio_closep; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_context_free_delegate(AVIOContext** @s); + public static avio_context_free_delegate avio_context_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avio_enum_protocols_delegate(void** @opaque, int @output); + public static avio_enum_protocols_delegate avio_enum_protocols; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_feof_delegate(AVIOContext* @s); + public static avio_feof_delegate avio_feof; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avio_find_protocol_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url); + public static avio_find_protocol_name_delegate avio_find_protocol_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_flush_delegate(AVIOContext* @s); + public static avio_flush_delegate avio_flush; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_free_directory_entry_delegate(AVIODirEntry** @entry); + public static avio_free_directory_entry_delegate avio_free_directory_entry; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_get_dyn_buf_delegate(AVIOContext* @s, byte** @pbuffer); + public static avio_get_dyn_buf_delegate avio_get_dyn_buf; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_get_str_delegate(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen); + public static avio_get_str_delegate avio_get_str; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_get_str16be_delegate(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen); + public static avio_get_str16be_delegate avio_get_str16be; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_get_str16le_delegate(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen); + public static avio_get_str16le_delegate avio_get_str16le; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_handshake_delegate(AVIOContext* @c); + public static avio_handshake_delegate avio_handshake; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_open_delegate(AVIOContext** @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, int @flags); + public static avio_open_delegate avio_open; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_open_dir_delegate(AVIODirContext** @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, AVDictionary** @options); + public static avio_open_dir_delegate avio_open_dir; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_open_dyn_buf_delegate(AVIOContext** @s); + public static avio_open_dyn_buf_delegate avio_open_dyn_buf; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_open2_delegate(AVIOContext** @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, int @flags, AVIOInterruptCB* @int_cb, AVDictionary** @options); + public static avio_open2_delegate avio_open2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_pause_delegate(AVIOContext* @h, int @pause); + public static avio_pause_delegate avio_pause; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_print_string_array_delegate(AVIOContext* @s, byte*[] @strings); + public static avio_print_string_array_delegate avio_print_string_array; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_printf_delegate(AVIOContext* @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt); + public static avio_printf_delegate avio_printf; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* avio_protocol_get_class_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static avio_protocol_get_class_delegate avio_protocol_get_class; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_put_str_delegate(AVIOContext* @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str); + public static avio_put_str_delegate avio_put_str; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_put_str16be_delegate(AVIOContext* @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str); + public static avio_put_str16be_delegate avio_put_str16be; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_put_str16le_delegate(AVIOContext* @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str); + public static avio_put_str16le_delegate avio_put_str16le; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_r8_delegate(AVIOContext* @s); + public static avio_r8_delegate avio_r8; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avio_rb16_delegate(AVIOContext* @s); + public static avio_rb16_delegate avio_rb16; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avio_rb24_delegate(AVIOContext* @s); + public static avio_rb24_delegate avio_rb24; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avio_rb32_delegate(AVIOContext* @s); + public static avio_rb32_delegate avio_rb32; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate ulong avio_rb64_delegate(AVIOContext* @s); + public static avio_rb64_delegate avio_rb64; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_read_delegate(AVIOContext* @s, byte* @buf, int @size); + public static avio_read_delegate avio_read; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_read_dir_delegate(AVIODirContext* @s, AVIODirEntry** @next); + public static avio_read_dir_delegate avio_read_dir; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_read_partial_delegate(AVIOContext* @s, byte* @buf, int @size); + public static avio_read_partial_delegate avio_read_partial; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_read_to_bprint_delegate(AVIOContext* @h, AVBPrint* @pb, ulong @max_size); + public static avio_read_to_bprint_delegate avio_read_to_bprint; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avio_rl16_delegate(AVIOContext* @s); + public static avio_rl16_delegate avio_rl16; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avio_rl24_delegate(AVIOContext* @s); + public static avio_rl24_delegate avio_rl24; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avio_rl32_delegate(AVIOContext* @s); + public static avio_rl32_delegate avio_rl32; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate ulong avio_rl64_delegate(AVIOContext* @s); + public static avio_rl64_delegate avio_rl64; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long avio_seek_delegate(AVIOContext* @s, long @offset, int @whence); + public static avio_seek_delegate avio_seek; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long avio_seek_time_delegate(AVIOContext* @h, int @stream_index, long @timestamp, int @flags); + public static avio_seek_time_delegate avio_seek_time; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long avio_size_delegate(AVIOContext* @s); + public static avio_size_delegate avio_size; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long avio_skip_delegate(AVIOContext* @s, long @offset); + public static avio_skip_delegate avio_skip; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_vprintf_delegate(AVIOContext* @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt, byte* @ap); + public static avio_vprintf_delegate avio_vprintf; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_w8_delegate(AVIOContext* @s, int @b); + public static avio_w8_delegate avio_w8; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_wb16_delegate(AVIOContext* @s, uint @val); + public static avio_wb16_delegate avio_wb16; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_wb24_delegate(AVIOContext* @s, uint @val); + public static avio_wb24_delegate avio_wb24; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_wb32_delegate(AVIOContext* @s, uint @val); + public static avio_wb32_delegate avio_wb32; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_wb64_delegate(AVIOContext* @s, ulong @val); + public static avio_wb64_delegate avio_wb64; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_wl16_delegate(AVIOContext* @s, uint @val); + public static avio_wl16_delegate avio_wl16; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_wl24_delegate(AVIOContext* @s, uint @val); + public static avio_wl24_delegate avio_wl24; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_wl32_delegate(AVIOContext* @s, uint @val); + public static avio_wl32_delegate avio_wl32; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_wl64_delegate(AVIOContext* @s, ulong @val); + public static avio_wl64_delegate avio_wl64; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_write_delegate(AVIOContext* @s, byte* @buf, int @size); + public static avio_write_delegate avio_write; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_write_marker_delegate(AVIOContext* @s, long @time, AVIODataMarkerType @type); + public static avio_write_marker_delegate avio_write_marker; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avsubtitle_free_delegate(AVSubtitle* @sub); + public static avsubtitle_free_delegate avsubtitle_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avutil_configuration_delegate(); + public static avutil_configuration_delegate avutil_configuration; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avutil_license_delegate(); + public static avutil_license_delegate avutil_license; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avutil_version_delegate(); + public static avutil_version_delegate avutil_version; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string postproc_configuration_delegate(); + public static postproc_configuration_delegate postproc_configuration; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string postproc_license_delegate(); + public static postproc_license_delegate postproc_license; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint postproc_version_delegate(); + public static postproc_version_delegate postproc_version; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void pp_free_context_delegate(void* @ppContext); + public static pp_free_context_delegate pp_free_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void pp_free_mode_delegate(void* @mode); + public static pp_free_mode_delegate pp_free_mode; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* pp_get_context_delegate(int @width, int @height, int @flags); + public static pp_get_context_delegate pp_get_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* pp_get_mode_by_name_and_quality_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @quality); + public static pp_get_mode_by_name_and_quality_delegate pp_get_mode_by_name_and_quality; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void pp_postprocess_delegate(in byte_ptr3 @src, in int3 @srcStride, ref byte_ptr3 @dst, in int3 @dstStride, int @horizontalSize, int @verticalSize, sbyte* @QP_store, int @QP_stride, void* @mode, void* @ppContext, int @pict_type); + public static pp_postprocess_delegate pp_postprocess; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate SwrContext* swr_alloc_delegate(); + public static swr_alloc_delegate swr_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate SwrContext* swr_alloc_set_opts_delegate(SwrContext* @s, long @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, long @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx); + public static swr_alloc_set_opts_delegate swr_alloc_set_opts; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_alloc_set_opts2_delegate(SwrContext** @ps, AVChannelLayout* @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, AVChannelLayout* @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx); + public static swr_alloc_set_opts2_delegate swr_alloc_set_opts2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_build_matrix_delegate(ulong @in_layout, ulong @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @rematrix_maxval, double @rematrix_volume, double* @matrix, int @stride, AVMatrixEncoding @matrix_encoding, void* @log_ctx); + public static swr_build_matrix_delegate swr_build_matrix; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_build_matrix2_delegate(AVChannelLayout* @in_layout, AVChannelLayout* @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @maxval, double @rematrix_volume, double* @matrix, long @stride, AVMatrixEncoding @matrix_encoding, void* @log_context); + public static swr_build_matrix2_delegate swr_build_matrix2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void swr_close_delegate(SwrContext* @s); + public static swr_close_delegate swr_close; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_config_frame_delegate(SwrContext* @swr, AVFrame* @out, AVFrame* @in); + public static swr_config_frame_delegate swr_config_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_convert_delegate(SwrContext* @s, byte** @out, int @out_count, byte** @in, int @in_count); + public static swr_convert_delegate swr_convert; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_convert_frame_delegate(SwrContext* @swr, AVFrame* @output, AVFrame* @input); + public static swr_convert_frame_delegate swr_convert_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_drop_output_delegate(SwrContext* @s, int @count); + public static swr_drop_output_delegate swr_drop_output; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void swr_free_delegate(SwrContext** @s); + public static swr_free_delegate swr_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* swr_get_class_delegate(); + public static swr_get_class_delegate swr_get_class; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long swr_get_delay_delegate(SwrContext* @s, long @base); + public static swr_get_delay_delegate swr_get_delay; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_get_out_samples_delegate(SwrContext* @s, int @in_samples); + public static swr_get_out_samples_delegate swr_get_out_samples; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_init_delegate(SwrContext* @s); + public static swr_init_delegate swr_init; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_inject_silence_delegate(SwrContext* @s, int @count); + public static swr_inject_silence_delegate swr_inject_silence; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_is_initialized_delegate(SwrContext* @s); + public static swr_is_initialized_delegate swr_is_initialized; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long swr_next_pts_delegate(SwrContext* @s, long @pts); + public static swr_next_pts_delegate swr_next_pts; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_set_channel_mapping_delegate(SwrContext* @s, int* @channel_map); + public static swr_set_channel_mapping_delegate swr_set_channel_mapping; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_set_compensation_delegate(SwrContext* @s, int @sample_delta, int @compensation_distance); + public static swr_set_compensation_delegate swr_set_compensation; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_set_matrix_delegate(SwrContext* @s, double* @matrix, int @stride); + public static swr_set_matrix_delegate swr_set_matrix; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string swresample_configuration_delegate(); + public static swresample_configuration_delegate swresample_configuration; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string swresample_license_delegate(); + public static swresample_license_delegate swresample_license; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint swresample_version_delegate(); + public static swresample_version_delegate swresample_version; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate SwsContext* sws_alloc_context_delegate(); + public static sws_alloc_context_delegate sws_alloc_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate SwsVector* sws_allocVec_delegate(int @length); + public static sws_allocVec_delegate sws_allocVec; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void sws_convertPalette8ToPacked24_delegate(byte* @src, byte* @dst, int @num_pixels, byte* @palette); + public static sws_convertPalette8ToPacked24_delegate sws_convertPalette8ToPacked24; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void sws_convertPalette8ToPacked32_delegate(byte* @src, byte* @dst, int @num_pixels, byte* @palette); + public static sws_convertPalette8ToPacked32_delegate sws_convertPalette8ToPacked32; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void sws_frame_end_delegate(SwsContext* @c); + public static sws_frame_end_delegate sws_frame_end; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_frame_start_delegate(SwsContext* @c, AVFrame* @dst, AVFrame* @src); + public static sws_frame_start_delegate sws_frame_start; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void sws_freeContext_delegate(SwsContext* @swsContext); + public static sws_freeContext_delegate sws_freeContext; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void sws_freeFilter_delegate(SwsFilter* @filter); + public static sws_freeFilter_delegate sws_freeFilter; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void sws_freeVec_delegate(SwsVector* @a); + public static sws_freeVec_delegate sws_freeVec; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* sws_get_class_delegate(); + public static sws_get_class_delegate sws_get_class; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate SwsContext* sws_getCachedContext_delegate(SwsContext* @context, int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param); + public static sws_getCachedContext_delegate sws_getCachedContext; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int* sws_getCoefficients_delegate(int @colorspace); + public static sws_getCoefficients_delegate sws_getCoefficients; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_getColorspaceDetails_delegate(SwsContext* @c, int** @inv_table, int* @srcRange, int** @table, int* @dstRange, int* @brightness, int* @contrast, int* @saturation); + public static sws_getColorspaceDetails_delegate sws_getColorspaceDetails; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate SwsContext* sws_getContext_delegate(int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param); + public static sws_getContext_delegate sws_getContext; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate SwsFilter* sws_getDefaultFilter_delegate(float @lumaGBlur, float @chromaGBlur, float @lumaSharpen, float @chromaSharpen, float @chromaHShift, float @chromaVShift, int @verbose); + public static sws_getDefaultFilter_delegate sws_getDefaultFilter; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate SwsVector* sws_getGaussianVec_delegate(double @variance, double @quality); + public static sws_getGaussianVec_delegate sws_getGaussianVec; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_init_context_delegate(SwsContext* @sws_context, SwsFilter* @srcFilter, SwsFilter* @dstFilter); + public static sws_init_context_delegate sws_init_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_isSupportedEndiannessConversion_delegate(AVPixelFormat @pix_fmt); + public static sws_isSupportedEndiannessConversion_delegate sws_isSupportedEndiannessConversion; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_isSupportedInput_delegate(AVPixelFormat @pix_fmt); + public static sws_isSupportedInput_delegate sws_isSupportedInput; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_isSupportedOutput_delegate(AVPixelFormat @pix_fmt); + public static sws_isSupportedOutput_delegate sws_isSupportedOutput; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void sws_normalizeVec_delegate(SwsVector* @a, double @height); + public static sws_normalizeVec_delegate sws_normalizeVec; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_receive_slice_delegate(SwsContext* @c, uint @slice_start, uint @slice_height); + public static sws_receive_slice_delegate sws_receive_slice; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint sws_receive_slice_alignment_delegate(SwsContext* @c); + public static sws_receive_slice_alignment_delegate sws_receive_slice_alignment; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_scale_delegate(SwsContext* @c, byte*[] @srcSlice, int[] @srcStride, int @srcSliceY, int @srcSliceH, byte*[] @dst, int[] @dstStride); + public static sws_scale_delegate sws_scale; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_scale_frame_delegate(SwsContext* @c, AVFrame* @dst, AVFrame* @src); + public static sws_scale_frame_delegate sws_scale_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void sws_scaleVec_delegate(SwsVector* @a, double @scalar); + public static sws_scaleVec_delegate sws_scaleVec; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_send_slice_delegate(SwsContext* @c, uint @slice_start, uint @slice_height); + public static sws_send_slice_delegate sws_send_slice; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_setColorspaceDetails_delegate(SwsContext* @c, in int4 @inv_table, int @srcRange, in int4 @table, int @dstRange, int @brightness, int @contrast, int @saturation); + public static sws_setColorspaceDetails_delegate sws_setColorspaceDetails; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string swscale_configuration_delegate(); + public static swscale_configuration_delegate swscale_configuration; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string swscale_license_delegate(); + public static swscale_license_delegate swscale_license; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint swscale_version_delegate(); + public static swscale_version_delegate swscale_version; + +} diff --git a/FFmpeg.AutoGen.Bindings.DynamicallyLinked/FFmpeg.AutoGen.Bindings.DynamicallyLinked.csproj b/FFmpeg.AutoGen.Bindings.DynamicallyLinked/FFmpeg.AutoGen.Bindings.DynamicallyLinked.csproj new file mode 100644 index 00000000..b2e3912e --- /dev/null +++ b/FFmpeg.AutoGen.Bindings.DynamicallyLinked/FFmpeg.AutoGen.Bindings.DynamicallyLinked.csproj @@ -0,0 +1,35 @@ + + + + netstandard2.1;netstandard2.0;net45 + FFmpeg auto generated unsafe bindings for C#/.NET and Mono. Dynamicly Linked implementation + true + + + + True + 108;169;612;618;1573;1591;1701;1702;1705 + false + + bin\$(Configuration)\$(TargetFramework)\$(AssemblyName).xml + + + + + true + snupkg + + + + + + + + + + + + + + + diff --git a/FFmpeg.AutoGen.Bindings.DynamicallyLinked/generated/DynamicallyLinkedBindings.g.cs b/FFmpeg.AutoGen.Bindings.DynamicallyLinked/generated/DynamicallyLinkedBindings.g.cs new file mode 100644 index 00000000..1409b588 --- /dev/null +++ b/FFmpeg.AutoGen.Bindings.DynamicallyLinked/generated/DynamicallyLinkedBindings.g.cs @@ -0,0 +1,5774 @@ +using System; +using System.Runtime.InteropServices; +using FFmpeg.AutoGen.Abstractions; + +namespace FFmpeg.AutoGen.Bindings.DynamicallyLinked; + +public static unsafe partial class DynamicallyLinkedBindings +{ + /// Create an AVABufferSinkParams structure. + [Obsolete()] + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern AVABufferSinkParams* av_abuffersink_params_alloc(); + + /// Add an index entry into a sorted list. Update the entry if the list already contains it. + /// timestamp in the time base of the given stream + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_add_index_entry(AVStream* @st, long @pos, long @timestamp, int @size, int @distance, int @flags); + + /// Add two rationals. + /// First rational + /// Second rational + /// b+c + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_add_q(AVRational @b, AVRational @c); + + /// Add a value to a timestamp. + /// Input timestamp time base + /// Input timestamp + /// Time base of `inc` + /// Value to be added + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_add_stable(AVRational @ts_tb, long @ts, AVRational @inc_tb, long @inc); + + /// Read data and append it to the current content of the AVPacket. If pkt->size is 0 this is identical to av_get_packet. Note that this uses av_grow_packet and thus involves a realloc which is inefficient. Thus this function should only be used when there is no reasonable way to know (an upper bound of) the final size. + /// associated IO context + /// packet + /// amount of data to read + /// >0 (read size) if OK, AVERROR_xxx otherwise, previous data will not be lost even if an error occurs. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_append_packet(AVIOContext* @s, AVPacket* @pkt, int @size); + + /// Allocate an AVAudioFifo. + /// sample format + /// number of channels + /// initial allocation size, in samples + /// newly allocated AVAudioFifo, or NULL on error + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVAudioFifo* av_audio_fifo_alloc(AVSampleFormat @sample_fmt, int @channels, int @nb_samples); + + /// Drain data from an AVAudioFifo. + /// AVAudioFifo to drain + /// number of samples to drain + /// 0 if OK, or negative AVERROR code on failure + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_audio_fifo_drain(AVAudioFifo* @af, int @nb_samples); + + /// Free an AVAudioFifo. + /// AVAudioFifo to free + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_audio_fifo_free(AVAudioFifo* @af); + + /// Peek data from an AVAudioFifo. + /// AVAudioFifo to read from + /// audio data plane pointers + /// number of samples to peek + /// number of samples actually peek, or negative AVERROR code on failure. The number of samples actually peek will not be greater than nb_samples, and will only be less than nb_samples if av_audio_fifo_size is less than nb_samples. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_audio_fifo_peek(AVAudioFifo* @af, void** @data, int @nb_samples); + + /// Peek data from an AVAudioFifo. + /// AVAudioFifo to read from + /// audio data plane pointers + /// number of samples to peek + /// offset from current read position + /// number of samples actually peek, or negative AVERROR code on failure. The number of samples actually peek will not be greater than nb_samples, and will only be less than nb_samples if av_audio_fifo_size is less than nb_samples. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_audio_fifo_peek_at(AVAudioFifo* @af, void** @data, int @nb_samples, int @offset); + + /// Read data from an AVAudioFifo. + /// AVAudioFifo to read from + /// audio data plane pointers + /// number of samples to read + /// number of samples actually read, or negative AVERROR code on failure. The number of samples actually read will not be greater than nb_samples, and will only be less than nb_samples if av_audio_fifo_size is less than nb_samples. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_audio_fifo_read(AVAudioFifo* @af, void** @data, int @nb_samples); + + /// Reallocate an AVAudioFifo. + /// AVAudioFifo to reallocate + /// new allocation size, in samples + /// 0 if OK, or negative AVERROR code on failure + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_audio_fifo_realloc(AVAudioFifo* @af, int @nb_samples); + + /// Reset the AVAudioFifo buffer. + /// AVAudioFifo to reset + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_audio_fifo_reset(AVAudioFifo* @af); + + /// Get the current number of samples in the AVAudioFifo available for reading. + /// the AVAudioFifo to query + /// number of samples available for reading + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_audio_fifo_size(AVAudioFifo* @af); + + /// Get the current number of samples in the AVAudioFifo available for writing. + /// the AVAudioFifo to query + /// number of samples available for writing + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_audio_fifo_space(AVAudioFifo* @af); + + /// Write data to an AVAudioFifo. + /// AVAudioFifo to write to + /// audio data plane pointers + /// number of samples to write + /// number of samples actually written, or negative AVERROR code on failure. If successful, the number of samples actually written will always be nb_samples. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_audio_fifo_write(AVAudioFifo* @af, void** @data, int @nb_samples); + + /// Append a description of a channel layout to a bprint buffer. + [Obsolete("use av_channel_layout_describe()")] + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_bprint_channel_layout(AVBPrint* @bp, int @nb_channels, ulong @channel_layout); + + /// Allocate a context for a given bitstream filter. The caller must fill in the context parameters as described in the documentation and then call av_bsf_init() before sending any data to the filter. + /// the filter for which to allocate an instance. + /// a pointer into which the pointer to the newly-allocated context will be written. It must be freed with av_bsf_free() after the filtering is done. + /// 0 on success, a negative AVERROR code on failure + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_bsf_alloc(AVBitStreamFilter* @filter, AVBSFContext** @ctx); + + /// Reset the internal bitstream filter state. Should be called e.g. when seeking. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_bsf_flush(AVBSFContext* @ctx); + + /// Free a bitstream filter context and everything associated with it; write NULL into the supplied pointer. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_bsf_free(AVBSFContext** @ctx); + + /// Returns a bitstream filter with the specified name or NULL if no such bitstream filter exists. + /// a bitstream filter with the specified name or NULL if no such bitstream filter exists. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBitStreamFilter* av_bsf_get_by_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Get the AVClass for AVBSFContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* av_bsf_get_class(); + + /// Get null/pass-through bitstream filter. + /// Pointer to be set to new instance of pass-through bitstream filter + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_bsf_get_null_filter(AVBSFContext** @bsf); + + /// Prepare the filter for use, after all the parameters and options have been set. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_bsf_init(AVBSFContext* @ctx); + + /// Iterate over all registered bitstream filters. + /// a pointer where libavcodec will store the iteration state. Must point to NULL to start the iteration. + /// the next registered bitstream filter or NULL when the iteration is finished + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBitStreamFilter* av_bsf_iterate(void** @opaque); + + /// Allocate empty list of bitstream filters. The list must be later freed by av_bsf_list_free() or finalized by av_bsf_list_finalize(). + /// Pointer to on success, NULL in case of failure + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBSFList* av_bsf_list_alloc(); + + /// Append bitstream filter to the list of bitstream filters. + /// List to append to + /// Filter context to be appended + /// >=0 on success, negative AVERROR in case of failure + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_bsf_list_append(AVBSFList* @lst, AVBSFContext* @bsf); + + /// Construct new bitstream filter context given it's name and options and append it to the list of bitstream filters. + /// List to append to + /// Name of the bitstream filter + /// Options for the bitstream filter, can be set to NULL + /// >=0 on success, negative AVERROR in case of failure + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_bsf_list_append2(AVBSFList* @lst, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @bsf_name, AVDictionary** @options); + + /// Finalize list of bitstream filters. + /// Filter list structure to be transformed + /// Pointer to be set to newly created structure representing the chain of bitstream filters + /// >=0 on success, negative AVERROR in case of failure + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_bsf_list_finalize(AVBSFList** @lst, AVBSFContext** @bsf); + + /// Free list of bitstream filters. + /// Pointer to pointer returned by av_bsf_list_alloc() + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_bsf_list_free(AVBSFList** @lst); + + /// Parse string describing list of bitstream filters and create single AVBSFContext describing the whole chain of bitstream filters. Resulting AVBSFContext can be treated as any other AVBSFContext freshly allocated by av_bsf_alloc(). + /// String describing chain of bitstream filters in format `bsf1[=opt1=val1:opt2=val2][,bsf2]` + /// Pointer to be set to newly created structure representing the chain of bitstream filters + /// >=0 on success, negative AVERROR in case of failure + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_bsf_list_parse_str( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str, AVBSFContext** @bsf); + + /// Retrieve a filtered packet. + /// this struct will be filled with the contents of the filtered packet. It is owned by the caller and must be freed using av_packet_unref() when it is no longer needed. This parameter should be "clean" (i.e. freshly allocated with av_packet_alloc() or unreffed with av_packet_unref()) when this function is called. If this function returns successfully, the contents of pkt will be completely overwritten by the returned data. On failure, pkt is not touched. + /// - 0 on success. - AVERROR(EAGAIN) if more packets need to be sent to the filter (using av_bsf_send_packet()) to get more output. - AVERROR_EOF if there will be no further output from the filter. - Another negative AVERROR value if an error occurs. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_bsf_receive_packet(AVBSFContext* @ctx, AVPacket* @pkt); + + /// Submit a packet for filtering. + /// the packet to filter. The bitstream filter will take ownership of the packet and reset the contents of pkt. pkt is not touched if an error occurs. If pkt is empty (i.e. NULL, or pkt->data is NULL and pkt->side_data_elems zero), it signals the end of the stream (i.e. no more non-empty packets will be sent; sending more empty packets does nothing) and will cause the filter to output any packets it may have buffered internally. + /// - 0 on success. - AVERROR(EAGAIN) if packets need to be retrieved from the filter (using av_bsf_receive_packet()) before new input can be consumed. - Another negative AVERROR value if an error occurs. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_bsf_send_packet(AVBSFContext* @ctx, AVPacket* @pkt); + + /// Allocate an AVBuffer of the given size using av_malloc(). + /// an AVBufferRef of given size or NULL when out of memory + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferRef* av_buffer_alloc(ulong @size); + + /// Same as av_buffer_alloc(), except the returned buffer will be initialized to zero. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferRef* av_buffer_allocz(ulong @size); + + /// Create an AVBuffer from an existing array. + /// data array + /// size of data in bytes + /// a callback for freeing this buffer's data + /// parameter to be got for processing or passed to free + /// a combination of AV_BUFFER_FLAG_* + /// an AVBufferRef referring to data on success, NULL on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferRef* av_buffer_create(byte* @data, ulong @size, av_buffer_create_free_func @free, void* @opaque, int @flags); + + /// Default free callback, which calls av_free() on the buffer data. This function is meant to be passed to av_buffer_create(), not called directly. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_buffer_default_free(void* @opaque, byte* @data); + + /// Returns the opaque parameter set by av_buffer_create. + /// the opaque parameter set by av_buffer_create. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_buffer_get_opaque(AVBufferRef* @buf); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffer_get_ref_count(AVBufferRef* @buf); + + /// Returns 1 if the caller may write to the data referred to by buf (which is true if and only if buf is the only reference to the underlying AVBuffer). Return 0 otherwise. A positive answer is valid until av_buffer_ref() is called on buf. + /// 1 if the caller may write to the data referred to by buf (which is true if and only if buf is the only reference to the underlying AVBuffer). Return 0 otherwise. A positive answer is valid until av_buffer_ref() is called on buf. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffer_is_writable(AVBufferRef* @buf); + + /// Create a writable reference from a given buffer reference, avoiding data copy if possible. + /// buffer reference to make writable. On success, buf is either left untouched, or it is unreferenced and a new writable AVBufferRef is written in its place. On failure, buf is left untouched. + /// 0 on success, a negative AVERROR on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffer_make_writable(AVBufferRef** @buf); + + /// Query the original opaque parameter of an allocated buffer in the pool. + /// a buffer reference to a buffer returned by av_buffer_pool_get. + /// the opaque parameter set by the buffer allocator function of the buffer pool. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_buffer_pool_buffer_get_opaque(AVBufferRef* @ref); + + /// Allocate a new AVBuffer, reusing an old buffer from the pool when available. This function may be called simultaneously from multiple threads. + /// a reference to the new buffer on success, NULL on error. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferRef* av_buffer_pool_get(AVBufferPool* @pool); + + /// Allocate and initialize a buffer pool. + /// size of each buffer in this pool + /// a function that will be used to allocate new buffers when the pool is empty. May be NULL, then the default allocator will be used (av_buffer_alloc()). + /// newly created buffer pool on success, NULL on error. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferPool* av_buffer_pool_init(ulong @size, av_buffer_pool_init_alloc_func @alloc); + + /// Allocate and initialize a buffer pool with a more complex allocator. + /// size of each buffer in this pool + /// arbitrary user data used by the allocator + /// a function that will be used to allocate new buffers when the pool is empty. May be NULL, then the default allocator will be used (av_buffer_alloc()). + /// a function that will be called immediately before the pool is freed. I.e. after av_buffer_pool_uninit() is called by the caller and all the frames are returned to the pool and freed. It is intended to uninitialize the user opaque data. May be NULL. + /// newly created buffer pool on success, NULL on error. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferPool* av_buffer_pool_init2(ulong @size, void* @opaque, av_buffer_pool_init2_alloc_func @alloc, av_buffer_pool_init2_pool_free_func @pool_free); + + /// Mark the pool as being available for freeing. It will actually be freed only once all the allocated buffers associated with the pool are released. Thus it is safe to call this function while some of the allocated buffers are still in use. + /// pointer to the pool to be freed. It will be set to NULL. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_buffer_pool_uninit(AVBufferPool** @pool); + + /// Reallocate a given buffer. + /// a buffer reference to reallocate. On success, buf will be unreferenced and a new reference with the required size will be written in its place. On failure buf will be left untouched. *buf may be NULL, then a new buffer is allocated. + /// required new buffer size. + /// 0 on success, a negative AVERROR on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffer_realloc(AVBufferRef** @buf, ulong @size); + + /// Create a new reference to an AVBuffer. + /// a new AVBufferRef referring to the same AVBuffer as buf or NULL on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferRef* av_buffer_ref(AVBufferRef* @buf); + + /// Ensure dst refers to the same data as src. + /// Pointer to either a valid buffer reference or NULL. On success, this will point to a buffer reference equivalent to src. On failure, dst will be left untouched. + /// A buffer reference to replace dst with. May be NULL, then this function is equivalent to av_buffer_unref(dst). + /// 0 on success AVERROR(ENOMEM) on memory allocation failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffer_replace(AVBufferRef** @dst, AVBufferRef* @src); + + /// Free a given reference and automatically free the buffer if there are no more references to it. + /// the reference to be freed. The pointer is set to NULL on return. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_buffer_unref(AVBufferRef** @buf); + + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersink_get_ch_layout(AVFilterContext* @ctx, AVChannelLayout* @ch_layout); + + [Obsolete()] + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern ulong av_buffersink_get_channel_layout(AVFilterContext* @ctx); + + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersink_get_channels(AVFilterContext* @ctx); + + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersink_get_format(AVFilterContext* @ctx); + + /// Get a frame with filtered data from sink and put it in frame. + /// pointer to a context of a buffersink or abuffersink AVFilter. + /// pointer to an allocated frame that will be filled with data. The data must be freed using av_frame_unref() / av_frame_free() + /// - >= 0 if a frame was successfully returned. - AVERROR(EAGAIN) if no frames are available at this point; more input frames must be added to the filtergraph to get more output. - AVERROR_EOF if there will be no more output frames on this sink. - A different negative AVERROR code in other failure cases. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersink_get_frame(AVFilterContext* @ctx, AVFrame* @frame); + + /// Get a frame with filtered data from sink and put it in frame. + /// pointer to a buffersink or abuffersink filter context. + /// pointer to an allocated frame that will be filled with data. The data must be freed using av_frame_unref() / av_frame_free() + /// a combination of AV_BUFFERSINK_FLAG_* flags + /// >= 0 in for success, a negative AVERROR code for failure. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersink_get_frame_flags(AVFilterContext* @ctx, AVFrame* @frame, int @flags); + + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_buffersink_get_frame_rate(AVFilterContext* @ctx); + + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersink_get_h(AVFilterContext* @ctx); + + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferRef* av_buffersink_get_hw_frames_ctx(AVFilterContext* @ctx); + + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_buffersink_get_sample_aspect_ratio(AVFilterContext* @ctx); + + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersink_get_sample_rate(AVFilterContext* @ctx); + + /// Same as av_buffersink_get_frame(), but with the ability to specify the number of samples read. This function is less efficient than av_buffersink_get_frame(), because it copies the data around. + /// pointer to a context of the abuffersink AVFilter. + /// pointer to an allocated frame that will be filled with data. The data must be freed using av_frame_unref() / av_frame_free() frame will contain exactly nb_samples audio samples, except at the end of stream, when it can contain less than nb_samples. + /// The return codes have the same meaning as for av_buffersink_get_frame(). + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersink_get_samples(AVFilterContext* @ctx, AVFrame* @frame, int @nb_samples); + + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_buffersink_get_time_base(AVFilterContext* @ctx); + + /// Get the properties of the stream @{ + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern AVMediaType av_buffersink_get_type(AVFilterContext* @ctx); + + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersink_get_w(AVFilterContext* @ctx); + + /// Create an AVBufferSinkParams structure. + [Obsolete()] + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferSinkParams* av_buffersink_params_alloc(); + + /// Set the frame size for an audio buffer sink. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_buffersink_set_frame_size(AVFilterContext* @ctx, uint @frame_size); + + /// Add a frame to the buffer source. + /// an instance of the buffersrc filter + /// frame to be added. If the frame is reference counted, this function will take ownership of the reference(s) and reset the frame. Otherwise the frame data will be copied. If this function returns an error, the input frame is not touched. + /// 0 on success, a negative AVERROR on error. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersrc_add_frame(AVFilterContext* @ctx, AVFrame* @frame); + + /// Add a frame to the buffer source. + /// pointer to a buffer source context + /// a frame, or NULL to mark EOF + /// a combination of AV_BUFFERSRC_FLAG_* + /// >= 0 in case of success, a negative AVERROR code in case of failure + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersrc_add_frame_flags(AVFilterContext* @buffer_src, AVFrame* @frame, int @flags); + + /// Close the buffer source after EOF. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersrc_close(AVFilterContext* @ctx, long @pts, uint @flags); + + /// Get the number of failed requests. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern uint av_buffersrc_get_nb_failed_requests(AVFilterContext* @buffer_src); + + /// Allocate a new AVBufferSrcParameters instance. It should be freed by the caller with av_free(). + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferSrcParameters* av_buffersrc_parameters_alloc(); + + /// Initialize the buffersrc or abuffersrc filter with the provided parameters. This function may be called multiple times, the later calls override the previous ones. Some of the parameters may also be set through AVOptions, then whatever method is used last takes precedence. + /// an instance of the buffersrc or abuffersrc filter + /// the stream parameters. The frames later passed to this filter must conform to those parameters. All the allocated fields in param remain owned by the caller, libavfilter will make internal copies or references when necessary. + /// 0 on success, a negative AVERROR code on failure. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersrc_parameters_set(AVFilterContext* @ctx, AVBufferSrcParameters* @param); + + /// Add a frame to the buffer source. + /// an instance of the buffersrc filter + /// frame to be added. If the frame is reference counted, this function will make a new reference to it. Otherwise the frame data will be copied. + /// 0 on success, a negative AVERROR on error + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersrc_write_frame(AVFilterContext* @ctx, AVFrame* @frame); + + /// Allocate a memory block for an array with av_mallocz(). + /// Number of elements + /// Size of the single element + /// Pointer to the allocated block, or `NULL` if the block cannot be allocated + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_calloc(ulong @nmemb, ulong @size); + + /// Get a human readable string describing a given channel. + /// pre-allocated buffer where to put the generated string + /// size in bytes of the buffer. + /// amount of bytes needed to hold the output string, or a negative AVERROR on failure. If the returned value is bigger than buf_size, then the string was truncated. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_description(byte* @buf, ulong @buf_size, AVChannel @channel); + + /// bprint variant of av_channel_description(). + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_channel_description_bprint(AVBPrint* @bp, AVChannel @channel_id); + + /// This is the inverse function of av_channel_name(). + /// the channel with the given name AV_CHAN_NONE when name does not identify a known channel + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVChannel av_channel_from_string( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Get the channel with the given index in a channel layout. + /// input channel layout + /// channel with the index idx in channel_layout on success or AV_CHAN_NONE on failure (if idx is not valid or the channel order is unspecified) + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVChannel av_channel_layout_channel_from_index(AVChannelLayout* @channel_layout, uint @idx); + + /// Get a channel described by the given string. + /// input channel layout + /// a channel described by the given string in channel_layout on success or AV_CHAN_NONE on failure (if the string is not valid or the channel order is unspecified) + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVChannel av_channel_layout_channel_from_string(AVChannelLayout* @channel_layout, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Check whether a channel layout is valid, i.e. can possibly describe audio data. + /// input channel layout + /// 1 if channel_layout is valid, 0 otherwise. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_layout_check(AVChannelLayout* @channel_layout); + + /// Check whether two channel layouts are semantically the same, i.e. the same channels are present on the same positions in both. + /// input channel layout + /// input channel layout + /// 0 if chl and chl1 are equal, 1 if they are not equal. A negative AVERROR code if one or both are invalid. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_layout_compare(AVChannelLayout* @chl, AVChannelLayout* @chl1); + + /// Make a copy of a channel layout. This differs from just assigning src to dst in that it allocates and copies the map for AV_CHANNEL_ORDER_CUSTOM. + /// destination channel layout + /// source channel layout + /// 0 on success, a negative AVERROR on error. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_layout_copy(AVChannelLayout* @dst, AVChannelLayout* @src); + + /// Get the default channel layout for a given number of channels. + /// number of channels + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_channel_layout_default(AVChannelLayout* @ch_layout, int @nb_channels); + + /// Get a human-readable string describing the channel layout properties. The string will be in the same format that is accepted by av_channel_layout_from_string(), allowing to rebuild the same channel layout, except for opaque pointers. + /// channel layout to be described + /// pre-allocated buffer where to put the generated string + /// size in bytes of the buffer. + /// amount of bytes needed to hold the output string, or a negative AVERROR on failure. If the returned value is bigger than buf_size, then the string was truncated. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_layout_describe(AVChannelLayout* @channel_layout, byte* @buf, ulong @buf_size); + + /// bprint variant of av_channel_layout_describe(). + /// 0 on success, or a negative AVERROR value on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_layout_describe_bprint(AVChannelLayout* @channel_layout, AVBPrint* @bp); + + /// Get the channel with the given index in channel_layout. + [Obsolete("use av_channel_layout_channel_from_index()")] + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern ulong av_channel_layout_extract_channel(ulong @channel_layout, int @index); + + /// Initialize a native channel layout from a bitmask indicating which channels are present. + /// the layout structure to be initialized + /// bitmask describing the channel layout + /// 0 on success AVERROR(EINVAL) for invalid mask values + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_layout_from_mask(AVChannelLayout* @channel_layout, ulong @mask); + + /// Initialize a channel layout from a given string description. The input string can be represented by: - the formal channel layout name (returned by av_channel_layout_describe()) - single or multiple channel names (returned by av_channel_name(), eg. "FL", or concatenated with "+", each optionally containing a custom name after a "", eg. "FL+FR+LFE") - a decimal or hexadecimal value of a native channel layout (eg. "4" or "0x4") - the number of channels with default layout (eg. "4c") - the number of unordered channels (eg. "4C" or "4 channels") - the ambisonic order followed by optional non-diegetic channels (eg. "ambisonic 2+stereo") + /// input channel layout + /// string describing the channel layout + /// 0 channel layout was detected, AVERROR_INVALIDATATA otherwise + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_layout_from_string(AVChannelLayout* @channel_layout, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str); + + /// Get the index of a given channel in a channel layout. In case multiple channels are found, only the first match will be returned. + /// input channel layout + /// index of channel in channel_layout on success or a negative number if channel is not present in channel_layout. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_layout_index_from_channel(AVChannelLayout* @channel_layout, AVChannel @channel); + + /// Get the index in a channel layout of a channel described by the given string. In case multiple channels are found, only the first match will be returned. + /// input channel layout + /// a channel index described by the given string, or a negative AVERROR value. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_layout_index_from_string(AVChannelLayout* @channel_layout, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Iterate over all standard channel layouts. + /// a pointer where libavutil will store the iteration state. Must point to NULL to start the iteration. + /// the standard channel layout or NULL when the iteration is finished + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVChannelLayout* av_channel_layout_standard(void** @opaque); + + /// Find out what channels from a given set are present in a channel layout, without regard for their positions. + /// input channel layout + /// a combination of AV_CH_* representing a set of channels + /// a bitfield representing all the channels from mask that are present in channel_layout + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern ulong av_channel_layout_subset(AVChannelLayout* @channel_layout, ulong @mask); + + /// Free any allocated data in the channel layout and reset the channel count to 0. + /// the layout structure to be uninitialized + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_channel_layout_uninit(AVChannelLayout* @channel_layout); + + /// Get a human readable string in an abbreviated form describing a given channel. This is the inverse function of av_channel_from_string(). + /// pre-allocated buffer where to put the generated string + /// size in bytes of the buffer. + /// amount of bytes needed to hold the output string, or a negative AVERROR on failure. If the returned value is bigger than buf_size, then the string was truncated. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_name(byte* @buf, ulong @buf_size, AVChannel @channel); + + /// bprint variant of av_channel_name(). + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_channel_name_bprint(AVBPrint* @bp, AVChannel @channel_id); + + /// Returns the AVChromaLocation value for name or an AVError if not found. + /// the AVChromaLocation value for name or an AVError if not found. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_chroma_location_from_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Returns the name for provided chroma location or NULL if unknown. + /// the name for provided chroma location or NULL if unknown. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_chroma_location_name(AVChromaLocation @location); + + /// Get the AVCodecID for the given codec tag tag. If no codec id is found returns AV_CODEC_ID_NONE. + /// list of supported codec_id-codec_tag pairs, as stored in AVInputFormat.codec_tag and AVOutputFormat.codec_tag + /// codec tag to match to a codec ID + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecID av_codec_get_id(AVCodecTag** @tags, uint @tag); + + /// Get the codec tag for the given codec id id. If no codec tag is found returns 0. + /// list of supported codec_id-codec_tag pairs, as stored in AVInputFormat.codec_tag and AVOutputFormat.codec_tag + /// codec ID to match to a codec tag + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern uint av_codec_get_tag(AVCodecTag** @tags, AVCodecID @id); + + /// Get the codec tag for the given codec id. + /// list of supported codec_id - codec_tag pairs, as stored in AVInputFormat.codec_tag and AVOutputFormat.codec_tag + /// codec id that should be searched for in the list + /// A pointer to the found tag + /// 0 if id was not found in tags, > 0 if it was found + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_codec_get_tag2(AVCodecTag** @tags, AVCodecID @id, uint* @tag); + + /// Returns a non-zero number if codec is a decoder, zero otherwise + /// a non-zero number if codec is a decoder, zero otherwise + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_codec_is_decoder(AVCodec* @codec); + + /// Returns a non-zero number if codec is an encoder, zero otherwise + /// a non-zero number if codec is an encoder, zero otherwise + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_codec_is_encoder(AVCodec* @codec); + + /// Iterate over all registered codecs. + /// a pointer where libavcodec will store the iteration state. Must point to NULL to start the iteration. + /// the next registered codec or NULL when the iteration is finished + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodec* av_codec_iterate(void** @opaque); + + /// Returns the AVColorPrimaries value for name or an AVError if not found. + /// the AVColorPrimaries value for name or an AVError if not found. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_color_primaries_from_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Returns the name for provided color primaries or NULL if unknown. + /// the name for provided color primaries or NULL if unknown. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_color_primaries_name(AVColorPrimaries @primaries); + + /// Returns the AVColorRange value for name or an AVError if not found. + /// the AVColorRange value for name or an AVError if not found. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_color_range_from_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Returns the name for provided color range or NULL if unknown. + /// the name for provided color range or NULL if unknown. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_color_range_name(AVColorRange @range); + + /// Returns the AVColorSpace value for name or an AVError if not found. + /// the AVColorSpace value for name or an AVError if not found. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_color_space_from_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Returns the name for provided color space or NULL if unknown. + /// the name for provided color space or NULL if unknown. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_color_space_name(AVColorSpace @space); + + /// Returns the AVColorTransferCharacteristic value for name or an AVError if not found. + /// the AVColorTransferCharacteristic value for name or an AVError if not found. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_color_transfer_from_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Returns the name for provided color transfer or NULL if unknown. + /// the name for provided color transfer or NULL if unknown. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_color_transfer_name(AVColorTransferCharacteristic @transfer); + + /// Compare the remainders of two integer operands divided by a common divisor. + /// Divisor; must be a power of 2 + /// - a negative value if `a % mod < b % mod` - a positive value if `a % mod > b % mod` - zero if `a % mod == b % mod` + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_compare_mod(ulong @a, ulong @b, ulong @mod); + + /// Compare two timestamps each in its own time base. + /// One of the following values: - -1 if `ts_a` is before `ts_b` - 1 if `ts_a` is after `ts_b` - 0 if they represent the same position + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_compare_ts(long @ts_a, AVRational @tb_a, long @ts_b, AVRational @tb_b); + + /// Allocate an AVContentLightMetadata structure and set its fields to default values. The resulting struct can be freed using av_freep(). + /// An AVContentLightMetadata filled with default values or NULL on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVContentLightMetadata* av_content_light_metadata_alloc(ulong* @size); + + /// Allocate a complete AVContentLightMetadata and add it to the frame. + /// The frame which side data is added to. + /// The AVContentLightMetadata structure to be filled by caller. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVContentLightMetadata* av_content_light_metadata_create_side_data(AVFrame* @frame); + + /// Allocate a CPB properties structure and initialize its fields to default values. + /// if non-NULL, the size of the allocated struct will be written here. This is useful for embedding it in side data. + /// the newly allocated struct or NULL on failure + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCPBProperties* av_cpb_properties_alloc(ulong* @size); + + /// Returns the number of logical CPU cores present. + /// the number of logical CPU cores present. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_cpu_count(); + + /// Overrides cpu count detection and forces the specified count. Count < 1 disables forcing of specific count. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_cpu_force_count(int @count); + + /// Get the maximum data alignment that may be required by FFmpeg. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern ulong av_cpu_max_align(); + + /// Convert a double precision floating point number to a rational. + /// `double` to convert + /// Maximum allowed numerator and denominator + /// `d` in AVRational form + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_d2q(double @d, int @max); + + /// Allocate an AVD3D11VAContext. + /// Newly-allocated AVD3D11VAContext or NULL on failure. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVD3D11VAContext* av_d3d11va_alloc_context(); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClassCategory av_default_get_category(void* @ptr); + + /// Return the context name + /// The AVClass context + /// The AVClass class_name + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_default_item_name(void* @ctx); + + /// Iterate over all registered demuxers. + /// a pointer where libavformat will store the iteration state. Must point to NULL to start the iteration. + /// the next registered demuxer or NULL when the iteration is finished + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVInputFormat* av_demuxer_iterate(void** @opaque); + + /// Copy entries from one AVDictionary struct into another. + /// pointer to a pointer to a AVDictionary struct. If *dst is NULL, this function will allocate a struct for you and put it in *dst + /// pointer to source AVDictionary struct + /// flags to use when setting entries in *dst + /// 0 on success, negative AVERROR code on failure. If dst was allocated by this function, callers should free the associated memory. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_dict_copy(AVDictionary** @dst, AVDictionary* @src, int @flags); + + /// Get number of entries in dictionary. + /// dictionary + /// number of entries in dictionary + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_dict_count(AVDictionary* @m); + + /// Free all the memory allocated for an AVDictionary struct and all keys and values. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_dict_free(AVDictionary** @m); + + /// Get a dictionary entry with matching key. + /// matching key + /// Set to the previous matching element to find the next. If set to NULL the first matching element is returned. + /// a collection of AV_DICT_* flags controlling how the entry is retrieved + /// found entry or NULL in case no matching entry was found in the dictionary + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVDictionaryEntry* av_dict_get(AVDictionary* @m, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, AVDictionaryEntry* @prev, int @flags); + + /// Get dictionary entries as a string. + /// dictionary + /// Pointer to buffer that will be allocated with string containg entries. Buffer must be freed by the caller when is no longer needed. + /// character used to separate key from value + /// character used to separate two pairs from each other + /// >= 0 on success, negative on error + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_dict_get_string(AVDictionary* @m, byte** @buffer, byte @key_val_sep, byte @pairs_sep); + + /// Parse the key/value pairs list and add the parsed entries to a dictionary. + /// a 0-terminated list of characters used to separate key from value + /// a 0-terminated list of characters used to separate two pairs from each other + /// flags to use when adding to dictionary. AV_DICT_DONT_STRDUP_KEY and AV_DICT_DONT_STRDUP_VAL are ignored since the key/value tokens will always be duplicated. + /// 0 on success, negative AVERROR code on failure + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_dict_parse_string(AVDictionary** @pm, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key_val_sep, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @pairs_sep, int @flags); + + /// Set the given entry in *pm, overwriting an existing entry. + /// pointer to a pointer to a dictionary struct. If *pm is NULL a dictionary struct is allocated and put in *pm. + /// entry key to add to *pm (will either be av_strduped or added as a new key depending on flags) + /// entry value to add to *pm (will be av_strduped or added as a new key depending on flags). Passing a NULL value will cause an existing entry to be deleted. + /// >= 0 on success otherwise an error code < 0 + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_dict_set(AVDictionary** @pm, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @value, int @flags); + + /// Convenience wrapper for av_dict_set that converts the value to a string and stores it. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_dict_set_int(AVDictionary** @pm, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, long @value, int @flags); + + /// Returns The AV_DISPOSITION_* flag corresponding to disp or a negative error code if disp does not correspond to a known stream disposition. + /// The AV_DISPOSITION_* flag corresponding to disp or a negative error code if disp does not correspond to a known stream disposition. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_disposition_from_string( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @disp); + + /// Returns The string description corresponding to the lowest set bit in disposition. NULL when the lowest set bit does not correspond to a known disposition or when disposition is 0. + /// a combination of AV_DISPOSITION_* values + /// The string description corresponding to the lowest set bit in disposition. NULL when the lowest set bit does not correspond to a known disposition or when disposition is 0. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_disposition_to_string(int @disposition); + + /// Divide one rational by another. + /// First rational + /// Second rational + /// b/c + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_div_q(AVRational @b, AVRational @c); + + /// Print detailed information about the input or output format, such as duration, bitrate, streams, container, programs, metadata, side data, codec and time base. + /// the context to analyze + /// index of the stream to dump information about + /// the URL to print, such as source or destination file + /// Select whether the specified context is an input(0) or output(1) + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_dump_format(AVFormatContext* @ic, int @index, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, int @is_output); + + /// Allocate an AVDynamicHDRPlus structure and set its fields to default values. The resulting struct can be freed using av_freep(). + /// An AVDynamicHDRPlus filled with default values or NULL on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVDynamicHDRPlus* av_dynamic_hdr_plus_alloc(ulong* @size); + + /// Allocate a complete AVDynamicHDRPlus and add it to the frame. + /// The frame which side data is added to. + /// The AVDynamicHDRPlus structure to be filled by caller or NULL on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVDynamicHDRPlus* av_dynamic_hdr_plus_create_side_data(AVFrame* @frame); + + /// Add the pointer to an element to a dynamic array. + /// Pointer to the array to grow + /// Pointer to the number of elements in the array + /// Element to add + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_dynarray_add(void* @tab_ptr, int* @nb_ptr, void* @elem); + + /// Add an element to a dynamic array. + /// >=0 on success, negative otherwise + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_dynarray_add_nofree(void* @tab_ptr, int* @nb_ptr, void* @elem); + + /// Add an element of size `elem_size` to a dynamic array. + /// Pointer to the array to grow + /// Pointer to the number of elements in the array + /// Size in bytes of an element in the array + /// Pointer to the data of the element to add. If `NULL`, the space of the newly added element is allocated but left uninitialized. + /// Pointer to the data of the element to copy in the newly allocated space + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_dynarray2_add(void** @tab_ptr, int* @nb_ptr, ulong @elem_size, byte* @elem_data); + + /// Allocate a buffer, reusing the given one if large enough. + /// Pointer to pointer to an already allocated buffer. `*ptr` will be overwritten with pointer to new buffer on success or `NULL` on failure + /// Pointer to the size of buffer `*ptr`. `*size` is updated to the new allocated size, in particular 0 in case of failure. + /// Desired minimal size of buffer `*ptr` + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_fast_malloc(void* @ptr, uint* @size, ulong @min_size); + + /// Allocate and clear a buffer, reusing the given one if large enough. + /// Pointer to pointer to an already allocated buffer. `*ptr` will be overwritten with pointer to new buffer on success or `NULL` on failure + /// Pointer to the size of buffer `*ptr`. `*size` is updated to the new allocated size, in particular 0 in case of failure. + /// Desired minimal size of buffer `*ptr` + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_fast_mallocz(void* @ptr, uint* @size, ulong @min_size); + + /// Same behaviour av_fast_malloc but the buffer has additional AV_INPUT_BUFFER_PADDING_SIZE at the end which will always be 0. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_fast_padded_malloc(void* @ptr, uint* @size, ulong @min_size); + + /// Same behaviour av_fast_padded_malloc except that buffer will always be 0-initialized after call. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_fast_padded_mallocz(void* @ptr, uint* @size, ulong @min_size); + + /// Reallocate the given buffer if it is not large enough, otherwise do nothing. + /// Already allocated buffer, or `NULL` + /// Pointer to the size of buffer `ptr`. `*size` is updated to the new allocated size, in particular 0 in case of failure. + /// Desired minimal size of buffer `ptr` + /// `ptr` if the buffer is large enough, a pointer to newly reallocated buffer if the buffer was not large enough, or `NULL` in case of error + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_fast_realloc(void* @ptr, uint* @size, ulong @min_size); + + /// Read the file with name filename, and put its content in a newly allocated buffer or map it with mmap() when available. In case of success set *bufptr to the read or mmapped buffer, and *size to the size in bytes of the buffer in *bufptr. Unlike mmap this function succeeds with zero sized files, in this case *bufptr will be set to NULL and *size will be set to 0. The returned buffer must be released with av_file_unmap(). + /// loglevel offset used for logging + /// context used for logging + /// a non negative number in case of success, a negative value corresponding to an AVERROR error code in case of failure + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_file_map( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename, byte** @bufptr, ulong* @size, int @log_offset, void* @log_ctx); + + /// Unmap or free the buffer bufptr created by av_file_map(). + /// size in bytes of bufptr, must be the same as returned by av_file_map() + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_file_unmap(byte* @bufptr, ulong @size); + + /// Check whether filename actually is a numbered sequence generator. + /// possible numbered sequence string + /// 1 if a valid numbered sequence string, 0 otherwise + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_filename_number_test( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename); + + /// Iterate over all registered filters. + /// a pointer where libavfilter will store the iteration state. Must point to NULL to start the iteration. + /// the next registered filter or NULL when the iteration is finished + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFilter* av_filter_iterate(void** @opaque); + + /// Compute what kind of losses will occur when converting from one specific pixel format to another. When converting from one pixel format to another, information loss may occur. For example, when converting from RGB24 to GRAY, the color information will be lost. Similarly, other losses occur when converting from some formats to other formats. These losses can involve loss of chroma, but also loss of resolution, loss of color depth, loss due to the color space conversion, loss of the alpha bits or loss due to color quantization. av_get_fix_fmt_loss() informs you about the various types of losses which will occur when converting from one pixel format to another. + /// source pixel format + /// Whether the source pixel format alpha channel is used. + /// Combination of flags informing you what kind of losses will occur (maximum loss for an invalid dst_pix_fmt). + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVPixelFormat av_find_best_pix_fmt_of_2(AVPixelFormat @dst_pix_fmt1, AVPixelFormat @dst_pix_fmt2, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr); + + /// Find the "best" stream in the file. The best stream is determined according to various heuristics as the most likely to be what the user expects. If the decoder parameter is non-NULL, av_find_best_stream will find the default decoder for the stream's codec; streams for which no decoder can be found are ignored. + /// media file handle + /// stream type: video, audio, subtitles, etc. + /// user-requested stream number, or -1 for automatic selection + /// try to find a stream related (eg. in the same program) to this one, or -1 if none + /// if non-NULL, returns the decoder for the selected stream + /// flags; none are currently defined + /// the non-negative stream number in case of success, AVERROR_STREAM_NOT_FOUND if no stream with the requested type could be found, AVERROR_DECODER_NOT_FOUND if streams were found but no decoder + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_find_best_stream(AVFormatContext* @ic, AVMediaType @type, int @wanted_stream_nb, int @related_stream, AVCodec** @decoder_ret, int @flags); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_find_default_stream_index(AVFormatContext* @s); + + /// Find AVInputFormat based on the short name of the input format. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVInputFormat* av_find_input_format( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @short_name); + + /// Find the value in a list of rationals nearest a given reference rational. + /// Reference rational + /// Array of rationals terminated by `{0, 0}` + /// Index of the nearest value found in the array + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_find_nearest_q_idx(AVRational @q, AVRational* @q_list); + + /// Find the programs which belong to a given stream. + /// media file handle + /// the last found program, the search will start after this program, or from the beginning if it is NULL + /// stream index + /// the next program which belongs to s, NULL if no program is found or the last program is not among the programs of ic. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVProgram* av_find_program_from_stream(AVFormatContext* @ic, AVProgram* @last, int @s); + + /// Returns the method used to set ctx->duration. + /// AVFMT_DURATION_FROM_PTS, AVFMT_DURATION_FROM_STREAM, or AVFMT_DURATION_FROM_BITRATE. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVDurationEstimationMethod av_fmt_ctx_get_duration_estimation_method(AVFormatContext* @ctx); + + /// Open a file using a UTF-8 filename. The API of this function matches POSIX fopen(), errors are returned through errno. + [Obsolete("Avoid using it, as on Windows, the FILE* allocated by this function may be allocated with a different CRT than the caller who uses the FILE*. No replacement provided in public API.")] + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern _iobuf* av_fopen_utf8( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @path, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @mode); + + /// Disables cpu detection and forces the specified flags. -1 is a special case that disables forcing of specific flags. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_force_cpu_flags(int @flags); + + /// This function will cause global side data to be injected in the next packet of each stream as well as after any subsequent seek. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_format_inject_global_side_data(AVFormatContext* @s); + + /// Fill the provided buffer with a string containing a FourCC (four-character code) representation. + /// a buffer with size in bytes of at least AV_FOURCC_MAX_STRING_SIZE + /// the fourcc to represent + /// the buffer in input + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_fourcc_make_string(byte* @buf, uint @fourcc); + + /// Allocate an AVFrame and set its fields to default values. The resulting struct must be freed using av_frame_free(). + /// An AVFrame filled with default values or NULL on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFrame* av_frame_alloc(); + + /// Crop the given video AVFrame according to its crop_left/crop_top/crop_right/ crop_bottom fields. If cropping is successful, the function will adjust the data pointers and the width/height fields, and set the crop fields to 0. + /// the frame which should be cropped + /// Some combination of AV_FRAME_CROP_* flags, or 0. + /// >= 0 on success, a negative AVERROR on error. If the cropping fields were invalid, AVERROR(ERANGE) is returned, and nothing is changed. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_frame_apply_cropping(AVFrame* @frame, int @flags); + + /// Create a new frame that references the same data as src. + /// newly created AVFrame on success, NULL on error. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFrame* av_frame_clone(AVFrame* @src); + + /// Copy the frame data from src to dst. + /// >= 0 on success, a negative AVERROR on error. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_frame_copy(AVFrame* @dst, AVFrame* @src); + + /// Copy only "metadata" fields from src to dst. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_frame_copy_props(AVFrame* @dst, AVFrame* @src); + + /// Free the frame and any dynamically allocated objects in it, e.g. extended_data. If the frame is reference counted, it will be unreferenced first. + /// frame to be freed. The pointer will be set to NULL. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_frame_free(AVFrame** @frame); + + /// Allocate new buffer(s) for audio or video data. + /// frame in which to store the new buffers. + /// Required buffer size alignment. If equal to 0, alignment will be chosen automatically for the current CPU. It is highly recommended to pass 0 here unless you know what you are doing. + /// 0 on success, a negative AVERROR on error. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_frame_get_buffer(AVFrame* @frame, int @align); + + /// Get the buffer reference a given data plane is stored in. + /// index of the data plane of interest in frame->extended_data. + /// the buffer reference that contains the plane or NULL if the input frame is not valid. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferRef* av_frame_get_plane_buffer(AVFrame* @frame, int @plane); + + /// Returns a pointer to the side data of a given type on success, NULL if there is no side data with such type in this frame. + /// a pointer to the side data of a given type on success, NULL if there is no side data with such type in this frame. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFrameSideData* av_frame_get_side_data(AVFrame* @frame, AVFrameSideDataType @type); + + /// Check if the frame data is writable. + /// A positive value if the frame data is writable (which is true if and only if each of the underlying buffers has only one reference, namely the one stored in this frame). Return 0 otherwise. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_frame_is_writable(AVFrame* @frame); + + /// Ensure that the frame data is writable, avoiding data copy if possible. + /// 0 on success, a negative AVERROR on error. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_frame_make_writable(AVFrame* @frame); + + /// Move everything contained in src to dst and reset src. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_frame_move_ref(AVFrame* @dst, AVFrame* @src); + + /// Add a new side data to a frame. + /// a frame to which the side data should be added + /// type of the added side data + /// size of the side data + /// newly added side data on success, NULL on error + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFrameSideData* av_frame_new_side_data(AVFrame* @frame, AVFrameSideDataType @type, ulong @size); + + /// Add a new side data to a frame from an existing AVBufferRef + /// a frame to which the side data should be added + /// the type of the added side data + /// an AVBufferRef to add as side data. The ownership of the reference is transferred to the frame. + /// newly added side data on success, NULL on error. On failure the frame is unchanged and the AVBufferRef remains owned by the caller. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFrameSideData* av_frame_new_side_data_from_buf(AVFrame* @frame, AVFrameSideDataType @type, AVBufferRef* @buf); + + /// Set up a new reference to the data described by the source frame. + /// 0 on success, a negative AVERROR on error + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_frame_ref(AVFrame* @dst, AVFrame* @src); + + /// Remove and free all side data instances of the given type. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_frame_remove_side_data(AVFrame* @frame, AVFrameSideDataType @type); + + /// Returns a string identifying the side data type + /// a string identifying the side data type + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_frame_side_data_name(AVFrameSideDataType @type); + + /// Unreference all the buffers referenced by frame and reset the frame fields. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_frame_unref(AVFrame* @frame); + + /// Free a memory block which has been allocated with a function of av_malloc() or av_realloc() family. + /// Pointer to the memory block which should be freed. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_free(void* @ptr); + + /// Free a memory block which has been allocated with a function of av_malloc() or av_realloc() family, and set the pointer pointing to it to `NULL`. + /// Pointer to the pointer to the memory block which should be freed + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_freep(void* @ptr); + + /// Compute the greatest common divisor of two integer operands. + /// GCD of a and b up to sign; if a >= 0 and b >= 0, return value is >= 0; if a == 0 and b == 0, returns 0. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_gcd(long @a, long @b); + + /// Return the best rational so that a and b are multiple of it. If the resulting denominator is larger than max_den, return def. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_gcd_q(AVRational @a, AVRational @b, int @max_den, AVRational @def); + + /// Return the planar<->packed alternative form of the given sample format, or AV_SAMPLE_FMT_NONE on error. If the passed sample_fmt is already in the requested planar/packed format, the format returned is the same as the input. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVSampleFormat av_get_alt_sample_fmt(AVSampleFormat @sample_fmt, int @planar); + + /// Return audio frame duration. + /// codec context + /// size of the frame, or 0 if unknown + /// frame duration, in samples, if known. 0 if not able to determine. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_audio_frame_duration(AVCodecContext* @avctx, int @frame_bytes); + + /// This function is the same as av_get_audio_frame_duration(), except it works with AVCodecParameters instead of an AVCodecContext. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_audio_frame_duration2(AVCodecParameters* @par, int @frame_bytes); + + /// Return the number of bits per pixel used by the pixel format described by pixdesc. Note that this is not the same as the number of bits per sample. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_bits_per_pixel(AVPixFmtDescriptor* @pixdesc); + + /// Return codec bits per sample. + /// the codec + /// Number of bits per sample or zero if unknown for the given codec. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_bits_per_sample(AVCodecID @codec_id); + + /// Return number of bytes per sample. + /// the sample format + /// number of bytes per sample or zero if unknown for the given sample format + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_bytes_per_sample(AVSampleFormat @sample_fmt); + + /// Get the description of a given channel. + /// a channel layout with a single channel + /// channel description on success, NULL on error + [Obsolete("use av_channel_description()")] + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_get_channel_description(ulong @channel); + + /// Return a channel layout id that matches name, or 0 if no match is found. + [Obsolete("use av_channel_layout_from_string()")] + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern ulong av_get_channel_layout( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Get the index of a channel in channel_layout. + /// a channel layout describing exactly one channel which must be present in channel_layout. + /// index of channel in channel_layout on success, a negative AVERROR on error. + [Obsolete("use av_channel_layout_index_from_channel()")] + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_channel_layout_channel_index(ulong @channel_layout, ulong @channel); + + /// Return the number of channels in the channel layout. + [Obsolete("use AVChannelLayout.nb_channels")] + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_channel_layout_nb_channels(ulong @channel_layout); + + /// Return a description of a channel layout. If nb_channels is <= 0, it is guessed from the channel_layout. + /// put here the string containing the channel layout + /// size in bytes of the buffer + [Obsolete("use av_channel_layout_describe()")] + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_get_channel_layout_string(byte* @buf, int @buf_size, int @nb_channels, ulong @channel_layout); + + /// Get the name of a given channel. + /// channel name on success, NULL on error. + [Obsolete("use av_channel_name()")] + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_get_channel_name(ulong @channel); + + /// Get the name of a colorspace. + /// a static string identifying the colorspace; can be NULL. + [Obsolete("use av_color_space_name()")] + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_get_colorspace_name(AVColorSpace @val); + + /// Return the flags which specify extensions supported by the CPU. The returned value is affected by av_force_cpu_flags() if that was used before. So av_get_cpu_flags() can easily be used in an application to detect the enabled cpu flags. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_cpu_flags(); + + /// Return default channel layout for a given number of channels. + [Obsolete("use av_channel_layout_default()")] + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_get_default_channel_layout(int @nb_channels); + + /// Return codec bits per sample. Only return non-zero if the bits per sample is exactly correct, not an approximation. + /// the codec + /// Number of bits per sample or zero if unknown for the given codec. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_exact_bits_per_sample(AVCodecID @codec_id); + + /// Return a channel layout and the number of channels based on the specified name. + /// channel layout specification string + /// parsed channel layout (0 if unknown) + /// number of channels + /// 0 on success, AVERROR(EINVAL) if the parsing fails. + [Obsolete("use av_channel_layout_from_string()")] + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_extended_channel_layout( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, ulong* @channel_layout, int* @nb_channels); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_frame_filename(byte* @buf, int @buf_size, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @path, int @number); + + /// Return in 'buf' the path with '%d' replaced by a number. + /// destination buffer + /// destination buffer size + /// numbered sequence string + /// frame number + /// AV_FRAME_FILENAME_FLAGS_* + /// 0 if OK, -1 on format error + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_frame_filename2(byte* @buf, int @buf_size, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @path, int @number, int @flags); + + /// Return a string describing the media_type enum, NULL if media_type is unknown. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_get_media_type_string(AVMediaType @media_type); + + /// Get timing information for the data currently output. The exact meaning of "currently output" depends on the format. It is mostly relevant for devices that have an internal buffer and/or work in real time. + /// media file handle + /// stream in the media file + /// DTS of the last packet output for the stream, in stream time_base units + /// absolute time when that packet whas output, in microsecond + /// 0 if OK, AVERROR(ENOSYS) if the format does not support it Note: some formats or devices may not allow to measure dts and wall atomically. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_output_timestamp(AVFormatContext* @s, int @stream, long* @dts, long* @wall); + + /// Get the packed alternative form of the given sample format. + /// the packed alternative form of the given sample format or AV_SAMPLE_FMT_NONE on error. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVSampleFormat av_get_packed_sample_fmt(AVSampleFormat @sample_fmt); + + /// Allocate and read the payload of a packet and initialize its fields with default values. + /// associated IO context + /// packet + /// desired payload size + /// >0 (read size) if OK, AVERROR_xxx otherwise + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_packet(AVIOContext* @s, AVPacket* @pkt, int @size); + + /// Return the number of bits per pixel for the pixel format described by pixdesc, including any padding or unused bits. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_padded_bits_per_pixel(AVPixFmtDescriptor* @pixdesc); + + /// Return the PCM codec associated with a sample format. + /// endianness, 0 for little, 1 for big, -1 (or anything else) for native + /// AV_CODEC_ID_PCM_* or AV_CODEC_ID_NONE + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecID av_get_pcm_codec(AVSampleFormat @fmt, int @be); + + /// Return a single letter to describe the given picture type pict_type. + /// the picture type + /// a single character representing the picture type, '?' if pict_type is unknown + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern byte av_get_picture_type_char(AVPictureType @pict_type); + + /// Return the pixel format corresponding to name. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVPixelFormat av_get_pix_fmt( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Compute what kind of losses will occur when converting from one specific pixel format to another. When converting from one pixel format to another, information loss may occur. For example, when converting from RGB24 to GRAY, the color information will be lost. Similarly, other losses occur when converting from some formats to other formats. These losses can involve loss of chroma, but also loss of resolution, loss of color depth, loss due to the color space conversion, loss of the alpha bits or loss due to color quantization. av_get_fix_fmt_loss() informs you about the various types of losses which will occur when converting from one pixel format to another. + /// destination pixel format + /// source pixel format + /// Whether the source pixel format alpha channel is used. + /// Combination of flags informing you what kind of losses will occur (maximum loss for an invalid dst_pix_fmt). + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_pix_fmt_loss(AVPixelFormat @dst_pix_fmt, AVPixelFormat @src_pix_fmt, int @has_alpha); + + /// Return the short name for a pixel format, NULL in case pix_fmt is unknown. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_get_pix_fmt_name(AVPixelFormat @pix_fmt); + + /// Print in buf the string corresponding to the pixel format with number pix_fmt, or a header if pix_fmt is negative. + /// the buffer where to write the string + /// the size of buf + /// the number of the pixel format to print the corresponding info string, or a negative value to print the corresponding header. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_get_pix_fmt_string(byte* @buf, int @buf_size, AVPixelFormat @pix_fmt); + + /// Get the planar alternative form of the given sample format. + /// the planar alternative form of the given sample format or AV_SAMPLE_FMT_NONE on error. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVSampleFormat av_get_planar_sample_fmt(AVSampleFormat @sample_fmt); + + /// Return a name for the specified profile, if available. + /// the codec that is searched for the given profile + /// the profile value for which a name is requested + /// A name for the profile if found, NULL otherwise. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_get_profile_name(AVCodec* @codec, int @profile); + + /// Return a sample format corresponding to name, or AV_SAMPLE_FMT_NONE on error. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVSampleFormat av_get_sample_fmt( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Return the name of sample_fmt, or NULL if sample_fmt is not recognized. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_get_sample_fmt_name(AVSampleFormat @sample_fmt); + + /// Generate a string corresponding to the sample format with sample_fmt, or a header if sample_fmt is negative. + /// the buffer where to write the string + /// the size of buf + /// the number of the sample format to print the corresponding info string, or a negative value to print the corresponding header. + /// the pointer to the filled buffer or NULL if sample_fmt is unknown or in case of other errors + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_get_sample_fmt_string(byte* @buf, int @buf_size, AVSampleFormat @sample_fmt); + + /// Get the value and name of a standard channel layout. + /// index in an internal list, starting at 0 + /// channel layout mask + /// name of the layout + /// 0 if the layout exists, < 0 if index is beyond the limits + [Obsolete("use av_channel_layout_standard()")] + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_standard_channel_layout(uint @index, ulong* @layout, byte** @name); + + /// Return the fractional representation of the internal time base. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_get_time_base_q(); + + /// Get the current time in microseconds. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_gettime(); + + /// Get the current time in microseconds since some unspecified starting point. On platforms that support it, the time comes from a monotonic clock This property makes this time source ideal for measuring relative time. The returned values may not be monotonic on platforms where a monotonic clock is not available. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_gettime_relative(); + + /// Indicates with a boolean result if the av_gettime_relative() time source is monotonic. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_gettime_relative_is_monotonic(); + + /// Increase packet size, correctly zeroing padding + /// packet + /// number of bytes by which to increase the size of the packet + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_grow_packet(AVPacket* @pkt, int @grow_by); + + /// Guess the codec ID based upon muxer and filename. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecID av_guess_codec(AVOutputFormat* @fmt, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @short_name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @mime_type, AVMediaType @type); + + /// Return the output format in the list of registered output formats which best matches the provided parameters, or return NULL if there is no match. + /// if non-NULL checks if short_name matches with the names of the registered formats + /// if non-NULL checks if filename terminates with the extensions of the registered formats + /// if non-NULL checks if mime_type matches with the MIME type of the registered formats + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVOutputFormat* av_guess_format( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @short_name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @mime_type); + + /// Guess the frame rate, based on both the container and codec information. + /// the format context which the stream is part of + /// the stream which the frame is part of + /// the frame for which the frame rate should be determined, may be NULL + /// the guessed (valid) frame rate, 0/1 if no idea + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_guess_frame_rate(AVFormatContext* @ctx, AVStream* @stream, AVFrame* @frame); + + /// Guess the sample aspect ratio of a frame, based on both the stream and the frame aspect ratio. + /// the format context which the stream is part of + /// the stream which the frame is part of + /// the frame with the aspect ratio to be determined + /// the guessed (valid) sample_aspect_ratio, 0/1 if no idea + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_guess_sample_aspect_ratio(AVFormatContext* @format, AVStream* @stream, AVFrame* @frame); + + /// Send a nice hexadecimal dump of a buffer to the specified file stream. + /// The file stream pointer where the dump should be sent to. + /// buffer + /// buffer size + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_hex_dump(_iobuf* @f, byte* @buf, int @size); + + /// Send a nice hexadecimal dump of a buffer to the log. + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct. + /// The importance level of the message, lower values signifying higher importance. + /// buffer + /// buffer size + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_hex_dump_log(void* @avcl, int @level, byte* @buf, int @size); + + /// Allocate an AVHWDeviceContext for a given hardware type. + /// the type of the hardware device to allocate. + /// a reference to the newly created AVHWDeviceContext on success or NULL on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferRef* av_hwdevice_ctx_alloc(AVHWDeviceType @type); + + /// Open a device of the specified type and create an AVHWDeviceContext for it. + /// On success, a reference to the newly-created device context will be written here. The reference is owned by the caller and must be released with av_buffer_unref() when no longer needed. On failure, NULL will be written to this pointer. + /// The type of the device to create. + /// A type-specific string identifying the device to open. + /// A dictionary of additional (type-specific) options to use in opening the device. The dictionary remains owned by the caller. + /// currently unused + /// 0 on success, a negative AVERROR code on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_hwdevice_ctx_create(AVBufferRef** @device_ctx, AVHWDeviceType @type, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @device, AVDictionary* @opts, int @flags); + + /// Create a new device of the specified type from an existing device. + /// On success, a reference to the newly-created AVHWDeviceContext. + /// The type of the new device to create. + /// A reference to an existing AVHWDeviceContext which will be used to create the new device. + /// Currently unused; should be set to zero. + /// Zero on success, a negative AVERROR code on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_hwdevice_ctx_create_derived(AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, int @flags); + + /// Create a new device of the specified type from an existing device. + /// On success, a reference to the newly-created AVHWDeviceContext. + /// The type of the new device to create. + /// A reference to an existing AVHWDeviceContext which will be used to create the new device. + /// Options for the new device to create, same format as in av_hwdevice_ctx_create. + /// Currently unused; should be set to zero. + /// Zero on success, a negative AVERROR code on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_hwdevice_ctx_create_derived_opts(AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, AVDictionary* @options, int @flags); + + /// Finalize the device context before use. This function must be called after the context is filled with all the required information and before it is used in any way. + /// a reference to the AVHWDeviceContext + /// 0 on success, a negative AVERROR code on failure + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_hwdevice_ctx_init(AVBufferRef* @ref); + + /// Look up an AVHWDeviceType by name. + /// String name of the device type (case-insensitive). + /// The type from enum AVHWDeviceType, or AV_HWDEVICE_TYPE_NONE if not found. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVHWDeviceType av_hwdevice_find_type_by_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Get the constraints on HW frames given a device and the HW-specific configuration to be used with that device. If no HW-specific configuration is provided, returns the maximum possible capabilities of the device. + /// a reference to the associated AVHWDeviceContext. + /// a filled HW-specific configuration structure, or NULL to return the maximum possible capabilities of the device. + /// AVHWFramesConstraints structure describing the constraints on the device, or NULL if not available. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVHWFramesConstraints* av_hwdevice_get_hwframe_constraints(AVBufferRef* @ref, void* @hwconfig); + + /// Get the string name of an AVHWDeviceType. + /// Type from enum AVHWDeviceType. + /// Pointer to a static string containing the name, or NULL if the type is not valid. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_hwdevice_get_type_name(AVHWDeviceType @type); + + /// Allocate a HW-specific configuration structure for a given HW device. After use, the user must free all members as required by the specific hardware structure being used, then free the structure itself with av_free(). + /// a reference to the associated AVHWDeviceContext. + /// The newly created HW-specific configuration structure on success or NULL on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_hwdevice_hwconfig_alloc(AVBufferRef* @device_ctx); + + /// Iterate over supported device types. + /// The next usable device type from enum AVHWDeviceType, or AV_HWDEVICE_TYPE_NONE if there are no more. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVHWDeviceType av_hwdevice_iterate_types(AVHWDeviceType @prev); + + /// Free an AVHWFrameConstraints structure. + /// The (filled or unfilled) AVHWFrameConstraints structure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_hwframe_constraints_free(AVHWFramesConstraints** @constraints); + + /// Allocate an AVHWFramesContext tied to a given device context. + /// a reference to a AVHWDeviceContext. This function will make a new reference for internal use, the one passed to the function remains owned by the caller. + /// a reference to the newly created AVHWFramesContext on success or NULL on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferRef* av_hwframe_ctx_alloc(AVBufferRef* @device_ctx); + + /// Create and initialise an AVHWFramesContext as a mapping of another existing AVHWFramesContext on a different device. + /// On success, a reference to the newly created AVHWFramesContext. + /// A reference to the device to create the new AVHWFramesContext on. + /// A reference to an existing AVHWFramesContext which will be mapped to the derived context. + /// Some combination of AV_HWFRAME_MAP_* flags, defining the mapping parameters to apply to frames which are allocated in the derived device. + /// Zero on success, negative AVERROR code on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_hwframe_ctx_create_derived(AVBufferRef** @derived_frame_ctx, AVPixelFormat @format, AVBufferRef* @derived_device_ctx, AVBufferRef* @source_frame_ctx, int @flags); + + /// Finalize the context before use. This function must be called after the context is filled with all the required information and before it is attached to any frames. + /// a reference to the AVHWFramesContext + /// 0 on success, a negative AVERROR code on failure + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_hwframe_ctx_init(AVBufferRef* @ref); + + /// Allocate a new frame attached to the given AVHWFramesContext. + /// a reference to an AVHWFramesContext + /// an empty (freshly allocated or unreffed) frame to be filled with newly allocated buffers. + /// currently unused, should be set to zero + /// 0 on success, a negative AVERROR code on failure + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_hwframe_get_buffer(AVBufferRef* @hwframe_ctx, AVFrame* @frame, int @flags); + + /// Map a hardware frame. + /// Destination frame, to contain the mapping. + /// Source frame, to be mapped. + /// Some combination of AV_HWFRAME_MAP_* flags. + /// Zero on success, negative AVERROR code on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_hwframe_map(AVFrame* @dst, AVFrame* @src, int @flags); + + /// Copy data to or from a hw surface. At least one of dst/src must have an AVHWFramesContext attached. + /// the destination frame. dst is not touched on failure. + /// the source frame. + /// currently unused, should be set to zero + /// 0 on success, a negative AVERROR error code on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_hwframe_transfer_data(AVFrame* @dst, AVFrame* @src, int @flags); + + /// Get a list of possible source or target formats usable in av_hwframe_transfer_data(). + /// the frame context to obtain the information for + /// the direction of the transfer + /// the pointer to the output format list will be written here. The list is terminated with AV_PIX_FMT_NONE and must be freed by the caller when no longer needed using av_free(). If this function returns successfully, the format list will have at least one item (not counting the terminator). On failure, the contents of this pointer are unspecified. + /// currently unused, should be set to zero + /// 0 on success, a negative AVERROR code on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_hwframe_transfer_get_formats(AVBufferRef* @hwframe_ctx, AVHWFrameTransferDirection @dir, AVPixelFormat** @formats, int @flags); + + /// Allocate an image with size w and h and pixel format pix_fmt, and fill pointers and linesizes accordingly. The allocated image buffer has to be freed by using av_freep(&pointers[0]). + /// the value to use for buffer size alignment + /// the size in bytes required for the image buffer, a negative error code in case of failure + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_alloc(ref byte_ptr4 @pointers, ref int4 @linesizes, int @w, int @h, AVPixelFormat @pix_fmt, int @align); + + /// Check if the given sample aspect ratio of an image is valid. + /// width of the image + /// height of the image + /// sample aspect ratio of the image + /// 0 if valid, a negative AVERROR code otherwise + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_check_sar(uint @w, uint @h, AVRational @sar); + + /// Check if the given dimension of an image is valid, meaning that all bytes of the image can be addressed with a signed int. + /// the width of the picture + /// the height of the picture + /// the offset to sum to the log level for logging with log_ctx + /// the parent logging context, it may be NULL + /// >= 0 if valid, a negative error code otherwise + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_check_size(uint @w, uint @h, int @log_offset, void* @log_ctx); + + /// Check if the given dimension of an image is valid, meaning that all bytes of a plane of an image with the specified pix_fmt can be addressed with a signed int. + /// the width of the picture + /// the height of the picture + /// the maximum number of pixels the user wants to accept + /// the pixel format, can be AV_PIX_FMT_NONE if unknown. + /// the offset to sum to the log level for logging with log_ctx + /// the parent logging context, it may be NULL + /// >= 0 if valid, a negative error code otherwise + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_check_size2(uint @w, uint @h, long @max_pixels, AVPixelFormat @pix_fmt, int @log_offset, void* @log_ctx); + + /// Copy image in src_data to dst_data. + /// linesizes for the image in dst_data + /// linesizes for the image in src_data + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_image_copy(ref byte_ptr4 @dst_data, ref int4 @dst_linesizes, in byte_ptr4 @src_data, in int4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height); + + /// Copy image plane from src to dst. That is, copy "height" number of lines of "bytewidth" bytes each. The first byte of each successive line is separated by *_linesize bytes. + /// linesize for the image plane in dst + /// linesize for the image plane in src + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_image_copy_plane(byte* @dst, int @dst_linesize, byte* @src, int @src_linesize, int @bytewidth, int @height); + + /// Copy image data located in uncacheable (e.g. GPU mapped) memory. Where available, this function will use special functionality for reading from such memory, which may result in greatly improved performance compared to plain av_image_copy_plane(). + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_image_copy_plane_uc_from(byte* @dst, long @dst_linesize, byte* @src, long @src_linesize, long @bytewidth, int @height); + + /// Copy image data from an image into a buffer. + /// a buffer into which picture data will be copied + /// the size in bytes of dst + /// pointers containing the source image data + /// linesizes for the image in src_data + /// the pixel format of the source image + /// the width of the source image in pixels + /// the height of the source image in pixels + /// the assumed linesize alignment for dst + /// the number of bytes written to dst, or a negative value (error code) on error + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_copy_to_buffer(byte* @dst, int @dst_size, in byte_ptr4 @src_data, in int4 @src_linesize, AVPixelFormat @pix_fmt, int @width, int @height, int @align); + + /// Copy image data located in uncacheable (e.g. GPU mapped) memory. Where available, this function will use special functionality for reading from such memory, which may result in greatly improved performance compared to plain av_image_copy(). + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_image_copy_uc_from(ref byte_ptr4 @dst_data, in long4 @dst_linesizes, in byte_ptr4 @src_data, in long4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height); + + /// Setup the data pointers and linesizes based on the specified image parameters and the provided array. + /// data pointers to be filled in + /// linesizes for the image in dst_data to be filled in + /// buffer which will contain or contains the actual image data, can be NULL + /// the pixel format of the image + /// the width of the image in pixels + /// the height of the image in pixels + /// the value used in src for linesize alignment + /// the size in bytes required for src, a negative error code in case of failure + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_fill_arrays(ref byte_ptr4 @dst_data, ref int4 @dst_linesize, byte* @src, AVPixelFormat @pix_fmt, int @width, int @height, int @align); + + /// Overwrite the image data with black. This is suitable for filling a sub-rectangle of an image, meaning the padding between the right most pixel and the left most pixel on the next line will not be overwritten. For some formats, the image size might be rounded up due to inherent alignment. + /// data pointers to destination image + /// linesizes for the destination image + /// the pixel format of the image + /// the color range of the image (important for colorspaces such as YUV) + /// the width of the image in pixels + /// the height of the image in pixels + /// 0 if the image data was cleared, a negative AVERROR code otherwise + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_fill_black(ref byte_ptr4 @dst_data, in long4 @dst_linesize, AVPixelFormat @pix_fmt, AVColorRange @range, int @width, int @height); + + /// Fill plane linesizes for an image with pixel format pix_fmt and width width. + /// array to be filled with the linesize for each plane + /// >= 0 in case of success, a negative error code otherwise + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_fill_linesizes(ref int4 @linesizes, AVPixelFormat @pix_fmt, int @width); + + /// Compute the max pixel step for each plane of an image with a format described by pixdesc. + /// an array which is filled with the max pixel step for each plane. Since a plane may contain different pixel components, the computed max_pixsteps[plane] is relative to the component in the plane with the max pixel step. + /// an array which is filled with the component for each plane which has the max pixel step. May be NULL. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_image_fill_max_pixsteps(ref int4 @max_pixsteps, ref int4 @max_pixstep_comps, AVPixFmtDescriptor* @pixdesc); + + /// Fill plane sizes for an image with pixel format pix_fmt and height height. + /// the array to be filled with the size of each image plane + /// the array containing the linesize for each plane, should be filled by av_image_fill_linesizes() + /// >= 0 in case of success, a negative error code otherwise + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_fill_plane_sizes(ref ulong4 @size, AVPixelFormat @pix_fmt, int @height, in long4 @linesizes); + + /// Fill plane data pointers for an image with pixel format pix_fmt and height height. + /// pointers array to be filled with the pointer for each image plane + /// the pointer to a buffer which will contain the image + /// the array containing the linesize for each plane, should be filled by av_image_fill_linesizes() + /// the size in bytes required for the image buffer, a negative error code in case of failure + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_fill_pointers(ref byte_ptr4 @data, AVPixelFormat @pix_fmt, int @height, byte* @ptr, in int4 @linesizes); + + /// Return the size in bytes of the amount of data required to store an image with the given parameters. + /// the pixel format of the image + /// the width of the image in pixels + /// the height of the image in pixels + /// the assumed linesize alignment + /// the buffer size in bytes, a negative error code in case of failure + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_get_buffer_size(AVPixelFormat @pix_fmt, int @width, int @height, int @align); + + /// Compute the size of an image line with format pix_fmt and width width for the plane plane. + /// the computed size in bytes + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_get_linesize(AVPixelFormat @pix_fmt, int @width, int @plane); + + /// Get the index for a specific timestamp. + /// stream that the timestamp belongs to + /// timestamp to retrieve the index for + /// if AVSEEK_FLAG_BACKWARD then the returned index will correspond to the timestamp which is < = the requested one, if backward is 0, then it will be >= if AVSEEK_FLAG_ANY seek to any frame, only keyframes otherwise + /// < 0 if no such timestamp could be found + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_index_search_timestamp(AVStream* @st, long @timestamp, int @flags); + + /// Initialize optional fields of a packet with default values. + /// packet + [Obsolete("This function is deprecated. Once it's removed, sizeof(AVPacket) will not be a part of the ABI anymore.")] + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_init_packet(AVPacket* @pkt); + + /// Audio input devices iterator. + [DllImport("avdevice-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVInputFormat* av_input_audio_device_next(AVInputFormat* @d); + + /// Video input devices iterator. + [DllImport("avdevice-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVInputFormat* av_input_video_device_next(AVInputFormat* @d); + + /// Compute the length of an integer list. + /// size in bytes of each list element (only 1, 2, 4 or 8) + /// pointer to the list + /// list terminator (usually 0 or -1) + /// length of the list, in elements, not counting the terminator + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern uint av_int_list_length_for_size(uint @elsize, void* @list, ulong @term); + + /// Write a packet to an output media file ensuring correct interleaving. + /// media file handle + /// The packet containing the data to be written. If the packet is reference-counted, this function will take ownership of this reference and unreference it later when it sees fit. If the packet is not reference-counted, libavformat will make a copy. The returned packet will be blank (as if returned from av_packet_alloc()), even on error. This parameter can be NULL (at any time, not just at the end), to flush the interleaving queues. Packet's "stream_index" field must be set to the index of the corresponding stream in "s->streams". The timestamps ( "pts", "dts") must be set to correct values in the stream's timebase (unless the output format is flagged with the AVFMT_NOTIMESTAMPS flag, then they can be set to AV_NOPTS_VALUE). The dts for subsequent packets in one stream must be strictly increasing (unless the output format is flagged with the AVFMT_TS_NONSTRICT, then they merely have to be nondecreasing). "duration" should also be set if known. + /// 0 on success, a negative AVERROR on error. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_interleaved_write_frame(AVFormatContext* @s, AVPacket* @pkt); + + /// Write an uncoded frame to an output media file. + /// >=0 for success, a negative code on error + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_interleaved_write_uncoded_frame(AVFormatContext* @s, int @stream_index, AVFrame* @frame); + + /// Send the specified message to the log if the level is less than or equal to the current av_log_level. By default, all logging messages are sent to stderr. This behavior can be altered by setting a different logging callback function. + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct or NULL if general log. + /// The importance level of the message expressed using a "Logging Constant". + /// The format string (printf-compatible) that specifies how subsequent arguments are converted to output. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_log(void* @avcl, int @level, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt); + + /// Default logging callback + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct. + /// The importance level of the message expressed using a "Logging Constant". + /// The format string (printf-compatible) that specifies how subsequent arguments are converted to output. + /// The arguments referenced by the format string. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_log_default_callback(void* @avcl, int @level, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt, byte* @vl); + + /// Format a line of log the same way as the default callback. + /// buffer to receive the formatted line + /// size of the buffer + /// used to store whether the prefix must be printed; must point to a persistent integer initially set to 1 + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_log_format_line(void* @ptr, int @level, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix); + + /// Format a line of log the same way as the default callback. + /// buffer to receive the formatted line; may be NULL if line_size is 0 + /// size of the buffer; at most line_size-1 characters will be written to the buffer, plus one null terminator + /// used to store whether the prefix must be printed; must point to a persistent integer initially set to 1 + /// Returns a negative value if an error occurred, otherwise returns the number of characters that would have been written for a sufficiently large buffer, not including the terminating null character. If the return value is not less than line_size, it means that the log message was truncated to fit the buffer. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_log_format_line2(void* @ptr, int @level, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_log_get_flags(); + + /// Get the current log level + /// Current log level + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_log_get_level(); + + /// Send the specified message to the log once with the initial_level and then with the subsequent_level. By default, all logging messages are sent to stderr. This behavior can be altered by setting a different logging callback function. + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct or NULL if general log. + /// importance level of the message expressed using a "Logging Constant" for the first occurance. + /// importance level of the message expressed using a "Logging Constant" after the first occurance. + /// a variable to keep trak of if a message has already been printed this must be initialized to 0 before the first use. The same state must not be accessed by 2 Threads simultaneously. + /// The format string (printf-compatible) that specifies how subsequent arguments are converted to output. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_log_once(void* @avcl, int @initial_level, int @subsequent_level, int* @state, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt); + + /// Set the logging callback + /// A logging function with a compatible signature. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_log_set_callback(av_log_set_callback_callback_func @callback); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_log_set_flags(int @arg); + + /// Set the log level + /// Logging level + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_log_set_level(int @level); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_log2(uint @v); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_log2_16bit(uint @v); + + /// Allocate a memory block with alignment suitable for all memory accesses (including vectors if available on the CPU). + /// Size in bytes for the memory block to be allocated + /// Pointer to the allocated block, or `NULL` if the block cannot be allocated + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_malloc(ulong @size); + + /// Allocate a memory block for an array with av_malloc(). + /// Number of element + /// Size of a single element + /// Pointer to the allocated block, or `NULL` if the block cannot be allocated + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_malloc_array(ulong @nmemb, ulong @size); + + /// Allocate a memory block with alignment suitable for all memory accesses (including vectors if available on the CPU) and zero all the bytes of the block. + /// Size in bytes for the memory block to be allocated + /// Pointer to the allocated block, or `NULL` if it cannot be allocated + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_mallocz(ulong @size); + + [Obsolete("use av_calloc()")] + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_mallocz_array(ulong @nmemb, ulong @size); + + /// Allocate an AVMasteringDisplayMetadata structure and set its fields to default values. The resulting struct can be freed using av_freep(). + /// An AVMasteringDisplayMetadata filled with default values or NULL on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVMasteringDisplayMetadata* av_mastering_display_metadata_alloc(); + + /// Allocate a complete AVMasteringDisplayMetadata and add it to the frame. + /// The frame which side data is added to. + /// The AVMasteringDisplayMetadata structure to be filled by caller. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVMasteringDisplayMetadata* av_mastering_display_metadata_create_side_data(AVFrame* @frame); + + /// Return a positive value if the given filename has one of the given extensions, 0 otherwise. + /// file name to check against the given extensions + /// a comma-separated list of filename extensions + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_match_ext( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @extensions); + + /// Set the maximum size that may be allocated in one block. + /// Value to be set as the new maximum size + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_max_alloc(ulong @max); + + /// Overlapping memcpy() implementation. + /// Destination buffer + /// Number of bytes back to start copying (i.e. the initial size of the overlapping window); must be > 0 + /// Number of bytes to copy; must be >= 0 + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_memcpy_backptr(byte* @dst, int @back, int @cnt); + + /// Duplicate a buffer with av_malloc(). + /// Buffer to be duplicated + /// Size in bytes of the buffer copied + /// Pointer to a newly allocated buffer containing a copy of `p` or `NULL` if the buffer cannot be allocated + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_memdup(void* @p, ulong @size); + + /// Multiply two rationals. + /// First rational + /// Second rational + /// b*c + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_mul_q(AVRational @b, AVRational @c); + + /// Iterate over all registered muxers. + /// a pointer where libavformat will store the iteration state. Must point to NULL to start the iteration. + /// the next registered muxer or NULL when the iteration is finished + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVOutputFormat* av_muxer_iterate(void** @opaque); + + /// Find which of the two rationals is closer to another rational. + /// Rational to be compared against + /// One of the following values: - 1 if `q1` is nearer to `q` than `q2` - -1 if `q2` is nearer to `q` than `q1` - 0 if they have the same distance + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_nearer_q(AVRational @q, AVRational @q1, AVRational @q2); + + /// Allocate the payload of a packet and initialize its fields with default values. + /// packet + /// wanted payload size + /// 0 if OK, AVERROR_xxx otherwise + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_new_packet(AVPacket* @pkt, int @size); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVProgram* av_new_program(AVFormatContext* @s, int @id); + + /// Iterate over potential AVOptions-enabled children of parent. + /// a pointer where iteration state is stored. + /// AVClass corresponding to next potential child or NULL + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* av_opt_child_class_iterate(AVClass* @parent, void** @iter); + + /// Iterate over AVOptions-enabled children of obj. + /// result of a previous call to this function or NULL + /// next AVOptions-enabled child or NULL + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_opt_child_next(void* @obj, void* @prev); + + /// Copy options from src object into dest object. + /// Object to copy from + /// Object to copy into + /// 0 on success, negative on error + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_copy(void* @dest, void* @src); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_eval_double(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, double* @double_out); + + /// @{ This group of functions can be used to evaluate option strings and get numbers out of them. They do the same thing as av_opt_set(), except the result is written into the caller-supplied pointer. + /// a struct whose first element is a pointer to AVClass. + /// an option for which the string is to be evaluated. + /// string to be evaluated. + /// 0 on success, a negative number on failure. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_eval_flags(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, int* @flags_out); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_eval_float(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, float* @float_out); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_eval_int(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, int* @int_out); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_eval_int64(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, long* @int64_out); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_eval_q(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, AVRational* @q_out); + + /// Look for an option in an object. Consider only options which have all the specified flags set. + /// A pointer to a struct whose first element is a pointer to an AVClass. Alternatively a double pointer to an AVClass, if AV_OPT_SEARCH_FAKE_OBJ search flag is set. + /// The name of the option to look for. + /// When searching for named constants, name of the unit it belongs to. + /// Find only options with all the specified flags set (AV_OPT_FLAG). + /// A combination of AV_OPT_SEARCH_*. + /// A pointer to the option found, or NULL if no option was found. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVOption* av_opt_find(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @unit, int @opt_flags, int @search_flags); + + /// Look for an option in an object. Consider only options which have all the specified flags set. + /// A pointer to a struct whose first element is a pointer to an AVClass. Alternatively a double pointer to an AVClass, if AV_OPT_SEARCH_FAKE_OBJ search flag is set. + /// The name of the option to look for. + /// When searching for named constants, name of the unit it belongs to. + /// Find only options with all the specified flags set (AV_OPT_FLAG). + /// A combination of AV_OPT_SEARCH_*. + /// if non-NULL, an object to which the option belongs will be written here. It may be different from obj if AV_OPT_SEARCH_CHILDREN is present in search_flags. This parameter is ignored if search_flags contain AV_OPT_SEARCH_FAKE_OBJ. + /// A pointer to the option found, or NULL if no option was found. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVOption* av_opt_find2(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @unit, int @opt_flags, int @search_flags, void** @target_obj); + + /// Check whether a particular flag is set in a flags field. + /// the name of the flag field option + /// the name of the flag to check + /// non-zero if the flag is set, zero if the flag isn't set, isn't of the right type, or the flags field doesn't exist. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_flag_is_set(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @field_name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @flag_name); + + /// Free all allocated objects in obj. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_opt_free(void* @obj); + + /// Free an AVOptionRanges struct and set it to NULL. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_opt_freep_ranges(AVOptionRanges** @ranges); + + /// @{ Those functions get a value of the option with the given name from an object. + /// a struct whose first element is a pointer to an AVClass. + /// name of the option to get. + /// flags passed to av_opt_find2. I.e. if AV_OPT_SEARCH_CHILDREN is passed here, then the option may be found in a child of obj. + /// value of the option will be written here + /// >=0 on success, a negative error code otherwise + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, byte** @out_val); + + [Obsolete()] + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_channel_layout(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, long* @ch_layout); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_chlayout(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVChannelLayout* @layout); + + /// The returned dictionary is a copy of the actual value and must be freed with av_dict_free() by the caller + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_dict_val(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVDictionary** @out_val); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_double(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, double* @out_val); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_image_size(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, int* @w_out, int* @h_out); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_int(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, long* @out_val); + + /// Extract a key-value pair from the beginning of a string. + /// pointer to the options string, will be updated to point to the rest of the string (one of the pairs_sep or the final NUL) + /// a 0-terminated list of characters used to separate key from value, for example '=' + /// a 0-terminated list of characters used to separate two pairs from each other, for example ':' or ',' + /// flags; see the AV_OPT_FLAG_* values below + /// parsed key; must be freed using av_free() + /// parsed value; must be freed using av_free() + /// >=0 for success, or a negative value corresponding to an AVERROR code in case of error; in particular: AVERROR(EINVAL) if no key is present + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_key_value(byte** @ropts, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key_val_sep, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @pairs_sep, uint @flags, byte** @rkey, byte** @rval); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_pixel_fmt(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVPixelFormat* @out_fmt); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_q(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVRational* @out_val); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_sample_fmt(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVSampleFormat* @out_fmt); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_video_rate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVRational* @out_val); + + /// Check if given option is set to its default value. + /// AVClass object to check option on + /// option to be checked + /// >0 when option is set to its default, 0 when option is not set its default, < 0 on error + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_is_set_to_default(void* @obj, AVOption* @o); + + /// Check if given option is set to its default value. + /// AVClass object to check option on + /// option name + /// combination of AV_OPT_SEARCH_* + /// >0 when option is set to its default, 0 when option is not set its default, < 0 on error + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_is_set_to_default_by_name(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags); + + /// Iterate over all AVOptions belonging to obj. + /// an AVOptions-enabled struct or a double pointer to an AVClass describing it. + /// result of the previous call to av_opt_next() on this object or NULL + /// next AVOption or NULL + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVOption* av_opt_next(void* @obj, AVOption* @prev); + + /// @} + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_opt_ptr(AVClass* @avclass, void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Get a list of allowed ranges for the given option. + /// is a bitmask of flags, undefined flags should not be set and should be ignored AV_OPT_SEARCH_FAKE_OBJ indicates that the obj is a double pointer to a AVClass instead of a full instance AV_OPT_MULTI_COMPONENT_RANGE indicates that function may return more than one component, + /// number of compontents returned on success, a negative errro code otherwise + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_query_ranges(AVOptionRanges** @p0, void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, int @flags); + + /// Get a default list of allowed ranges for the given option. + /// is a bitmask of flags, undefined flags should not be set and should be ignored AV_OPT_SEARCH_FAKE_OBJ indicates that the obj is a double pointer to a AVClass instead of a full instance AV_OPT_MULTI_COMPONENT_RANGE indicates that function may return more than one component, + /// number of compontents returned on success, a negative errro code otherwise + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_query_ranges_default(AVOptionRanges** @p0, void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, int @flags); + + /// Serialize object's options. + /// AVClass object to serialize + /// serialize options with all the specified flags set (AV_OPT_FLAG) + /// combination of AV_OPT_SERIALIZE_* flags + /// Pointer to buffer that will be allocated with string containg serialized options. Buffer must be freed by the caller when is no longer needed. + /// character used to separate key from value + /// character used to separate two pairs from each other + /// >= 0 on success, negative on error + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_serialize(void* @obj, int @opt_flags, int @flags, byte** @buffer, byte @key_val_sep, byte @pairs_sep); + + /// @{ Those functions set the field of obj with the given name to value. + /// A struct whose first element is a pointer to an AVClass. + /// the name of the field to set + /// The value to set. In case of av_opt_set() if the field is not of a string type, then the given string is parsed. SI postfixes and some named scalars are supported. If the field is of a numeric type, it has to be a numeric or named scalar. Behavior with more than one scalar and +- infix operators is undefined. If the field is of a flags type, it has to be a sequence of numeric scalars or named flags separated by '+' or '-'. Prefixing a flag with '+' causes it to be set without affecting the other flags; similarly, '-' unsets a flag. If the field is of a dictionary type, it has to be a ':' separated list of key=value parameters. Values containing ':' special characters must be escaped. + /// flags passed to av_opt_find2. I.e. if AV_OPT_SEARCH_CHILDREN is passed here, then the option may be set on a child of obj. + /// 0 if the value has been set, or an AVERROR code in case of error: AVERROR_OPTION_NOT_FOUND if no matching option exists AVERROR(ERANGE) if the value is out of range AVERROR(EINVAL) if the value is not valid + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, int @search_flags); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_bin(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, byte* @val, int @size, int @search_flags); + + [Obsolete()] + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_channel_layout(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, long @ch_layout, int @search_flags); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_chlayout(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVChannelLayout* @layout, int @search_flags); + + /// Set the values of all AVOption fields to their default values. + /// an AVOption-enabled struct (its first member must be a pointer to AVClass) + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_opt_set_defaults(void* @s); + + /// Set the values of all AVOption fields to their default values. Only these AVOption fields for which (opt->flags & mask) == flags will have their default applied to s. + /// an AVOption-enabled struct (its first member must be a pointer to AVClass) + /// combination of AV_OPT_FLAG_* + /// combination of AV_OPT_FLAG_* + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_opt_set_defaults2(void* @s, int @mask, int @flags); + + /// Set all the options from a given dictionary on an object. + /// a struct whose first element is a pointer to AVClass + /// options to process. This dictionary will be freed and replaced by a new one containing all options not found in obj. Of course this new dictionary needs to be freed by caller with av_dict_free(). + /// 0 on success, a negative AVERROR if some option was found in obj, but could not be set. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_dict(void* @obj, AVDictionary** @options); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_dict_val(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVDictionary* @val, int @search_flags); + + /// Set all the options from a given dictionary on an object. + /// a struct whose first element is a pointer to AVClass + /// options to process. This dictionary will be freed and replaced by a new one containing all options not found in obj. Of course this new dictionary needs to be freed by caller with av_dict_free(). + /// A combination of AV_OPT_SEARCH_*. + /// 0 on success, a negative AVERROR if some option was found in obj, but could not be set. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_dict2(void* @obj, AVDictionary** @options, int @search_flags); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_double(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, double @val, int @search_flags); + + /// Parse the key-value pairs list in opts. For each key=value pair found, set the value of the corresponding option in ctx. + /// the AVClass object to set options on + /// the options string, key-value pairs separated by a delimiter + /// a NULL-terminated array of options names for shorthand notation: if the first field in opts has no key part, the key is taken from the first element of shorthand; then again for the second, etc., until either opts is finished, shorthand is finished or a named option is found; after that, all options must be named + /// a 0-terminated list of characters used to separate key from value, for example '=' + /// a 0-terminated list of characters used to separate two pairs from each other, for example ':' or ',' + /// the number of successfully set key=value pairs, or a negative value corresponding to an AVERROR code in case of error: AVERROR(EINVAL) if opts cannot be parsed, the error code issued by av_set_string3() if a key/value pair cannot be set + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_from_string(void* @ctx, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @opts, byte** @shorthand, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key_val_sep, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @pairs_sep); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_image_size(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @w, int @h, int @search_flags); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_int(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, long @val, int @search_flags); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_pixel_fmt(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVPixelFormat @fmt, int @search_flags); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_q(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVRational @val, int @search_flags); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_sample_fmt(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVSampleFormat @fmt, int @search_flags); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_video_rate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVRational @val, int @search_flags); + + /// Show the obj options. + /// log context to use for showing the options + /// requested flags for the options to show. Show only the options for which it is opt->flags & req_flags. + /// rejected flags for the options to show. Show only the options for which it is !(opt->flags & req_flags). + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_show2(void* @obj, void* @av_log_obj, int @req_flags, int @rej_flags); + + /// Audio output devices iterator. + [DllImport("avdevice-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVOutputFormat* av_output_audio_device_next(AVOutputFormat* @d); + + /// Video output devices iterator. + [DllImport("avdevice-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVOutputFormat* av_output_video_device_next(AVOutputFormat* @d); + + /// Wrap an existing array as a packet side data. + /// packet + /// side information type + /// the side data array. It must be allocated with the av_malloc() family of functions. The ownership of the data is transferred to pkt. + /// side information size + /// a non-negative number on success, a negative AVERROR code on failure. On failure, the packet is unchanged and the data remains owned by the caller. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_packet_add_side_data(AVPacket* @pkt, AVPacketSideDataType @type, byte* @data, ulong @size); + + /// Allocate an AVPacket and set its fields to default values. The resulting struct must be freed using av_packet_free(). + /// An AVPacket filled with default values or NULL on failure. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVPacket* av_packet_alloc(); + + /// Create a new packet that references the same data as src. + /// newly created AVPacket on success, NULL on error. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVPacket* av_packet_clone(AVPacket* @src); + + /// Copy only "properties" fields from src to dst. + /// Destination packet + /// Source packet + /// 0 on success AVERROR on failure. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_packet_copy_props(AVPacket* @dst, AVPacket* @src); + + /// Free the packet, if the packet is reference counted, it will be unreferenced first. + /// packet to be freed. The pointer will be set to NULL. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_packet_free(AVPacket** @pkt); + + /// Convenience function to free all the side data stored. All the other fields stay untouched. + /// packet + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_packet_free_side_data(AVPacket* @pkt); + + /// Initialize a reference-counted packet from av_malloc()ed data. + /// packet to be initialized. This function will set the data, size, and buf fields, all others are left untouched. + /// Data allocated by av_malloc() to be used as packet data. If this function returns successfully, the data is owned by the underlying AVBuffer. The caller may not access the data through other means. + /// size of data in bytes, without the padding. I.e. the full buffer size is assumed to be size + AV_INPUT_BUFFER_PADDING_SIZE. + /// 0 on success, a negative AVERROR on error + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_packet_from_data(AVPacket* @pkt, byte* @data, int @size); + + /// Get side information from packet. + /// packet + /// desired side information type + /// If supplied, *size will be set to the size of the side data or to zero if the desired side data is not present. + /// pointer to data if present or NULL otherwise + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_packet_get_side_data(AVPacket* @pkt, AVPacketSideDataType @type, ulong* @size); + + /// Ensure the data described by a given packet is reference counted. + /// packet whose data should be made reference counted. + /// 0 on success, a negative AVERROR on error. On failure, the packet is unchanged. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_packet_make_refcounted(AVPacket* @pkt); + + /// Create a writable reference for the data described by a given packet, avoiding data copy if possible. + /// Packet whose data should be made writable. + /// 0 on success, a negative AVERROR on failure. On failure, the packet is unchanged. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_packet_make_writable(AVPacket* @pkt); + + /// Move every field in src to dst and reset src. + /// Destination packet + /// Source packet, will be reset + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_packet_move_ref(AVPacket* @dst, AVPacket* @src); + + /// Allocate new information of a packet. + /// packet + /// side information type + /// side information size + /// pointer to fresh allocated data or NULL otherwise + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_packet_new_side_data(AVPacket* @pkt, AVPacketSideDataType @type, ulong @size); + + /// Pack a dictionary for use in side_data. + /// The dictionary to pack. + /// pointer to store the size of the returned data + /// pointer to data if successful, NULL otherwise + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_packet_pack_dictionary(AVDictionary* @dict, ulong* @size); + + /// Setup a new reference to the data described by a given packet + /// Destination packet. Will be completely overwritten. + /// Source packet + /// 0 on success, a negative AVERROR on error. On error, dst will be blank (as if returned by av_packet_alloc()). + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_packet_ref(AVPacket* @dst, AVPacket* @src); + + /// Convert valid timing fields (timestamps / durations) in a packet from one timebase to another. Timestamps with unknown values (AV_NOPTS_VALUE) will be ignored. + /// packet on which the conversion will be performed + /// source timebase, in which the timing fields in pkt are expressed + /// destination timebase, to which the timing fields will be converted + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_packet_rescale_ts(AVPacket* @pkt, AVRational @tb_src, AVRational @tb_dst); + + /// Shrink the already allocated side data buffer + /// packet + /// side information type + /// new side information size + /// 0 on success, < 0 on failure + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_packet_shrink_side_data(AVPacket* @pkt, AVPacketSideDataType @type, ulong @size); + + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_packet_side_data_name(AVPacketSideDataType @type); + + /// Unpack a dictionary from side_data. + /// data from side_data + /// size of the data + /// the metadata storage dictionary + /// 0 on success, < 0 on failure + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_packet_unpack_dictionary(byte* @data, ulong @size, AVDictionary** @dict); + + /// Wipe the packet. + /// The packet to be unreferenced. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_packet_unref(AVPacket* @pkt); + + /// Parse CPU caps from a string and update the given AV_CPU_* flags based on that. + /// negative on error. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_parse_cpu_caps(uint* @flags, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @s); + + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_parser_close(AVCodecParserContext* @s); + + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecParserContext* av_parser_init(int @codec_id); + + /// Iterate over all registered codec parsers. + /// a pointer where libavcodec will store the iteration state. Must point to NULL to start the iteration. + /// the next registered codec parser or NULL when the iteration is finished + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecParser* av_parser_iterate(void** @opaque); + + /// Parse a packet. + /// parser context. + /// codec context. + /// set to pointer to parsed buffer or NULL if not yet finished. + /// set to size of parsed buffer or zero if not yet finished. + /// input buffer. + /// buffer size in bytes without the padding. I.e. the full buffer size is assumed to be buf_size + AV_INPUT_BUFFER_PADDING_SIZE. To signal EOF, this should be 0 (so that the last frame can be output). + /// input presentation timestamp. + /// input decoding timestamp. + /// input byte position in stream. + /// the number of bytes of the input bitstream used. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_parser_parse2(AVCodecParserContext* @s, AVCodecContext* @avctx, byte** @poutbuf, int* @poutbuf_size, byte* @buf, int @buf_size, long @pts, long @dts, long @pos); + + /// Returns number of planes in pix_fmt, a negative AVERROR if pix_fmt is not a valid pixel format. + /// number of planes in pix_fmt, a negative AVERROR if pix_fmt is not a valid pixel format. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_pix_fmt_count_planes(AVPixelFormat @pix_fmt); + + /// Returns a pixel format descriptor for provided pixel format or NULL if this pixel format is unknown. + /// a pixel format descriptor for provided pixel format or NULL if this pixel format is unknown. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVPixFmtDescriptor* av_pix_fmt_desc_get(AVPixelFormat @pix_fmt); + + /// Returns an AVPixelFormat id described by desc, or AV_PIX_FMT_NONE if desc is not a valid pointer to a pixel format descriptor. + /// an AVPixelFormat id described by desc, or AV_PIX_FMT_NONE if desc is not a valid pointer to a pixel format descriptor. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVPixelFormat av_pix_fmt_desc_get_id(AVPixFmtDescriptor* @desc); + + /// Iterate over all pixel format descriptors known to libavutil. + /// previous descriptor. NULL to get the first descriptor. + /// next descriptor or NULL after the last descriptor + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVPixFmtDescriptor* av_pix_fmt_desc_next(AVPixFmtDescriptor* @prev); + + /// Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor. + /// the pixel format + /// store log2_chroma_w (horizontal/width shift) + /// store log2_chroma_h (vertical/height shift) + /// 0 on success, AVERROR(ENOSYS) on invalid or unknown pixel format + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_pix_fmt_get_chroma_sub_sample(AVPixelFormat @pix_fmt, int* @h_shift, int* @v_shift); + + /// Utility function to swap the endianness of a pixel format. + /// the pixel format + /// pixel format with swapped endianness if it exists, otherwise AV_PIX_FMT_NONE + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVPixelFormat av_pix_fmt_swap_endianness(AVPixelFormat @pix_fmt); + + /// Send a nice dump of a packet to the log. + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct. + /// The importance level of the message, lower values signifying higher importance. + /// packet to dump + /// True if the payload must be displayed, too. + /// AVStream that the packet belongs to + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_pkt_dump_log2(void* @avcl, int @level, AVPacket* @pkt, int @dump_payload, AVStream* @st); + + /// Send a nice dump of a packet to the specified file stream. + /// The file stream pointer where the dump should be sent to. + /// packet to dump + /// True if the payload must be displayed, too. + /// AVStream that the packet belongs to + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_pkt_dump2(_iobuf* @f, AVPacket* @pkt, int @dump_payload, AVStream* @st); + + /// Like av_probe_input_buffer2() but returns 0 on success + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_probe_input_buffer(AVIOContext* @pb, AVInputFormat** @fmt, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, void* @logctx, uint @offset, uint @max_probe_size); + + /// Probe a bytestream to determine the input format. Each time a probe returns with a score that is too low, the probe buffer size is increased and another attempt is made. When the maximum probe size is reached, the input format with the highest score is returned. + /// the bytestream to probe + /// the input format is put here + /// the url of the stream + /// the log context + /// the offset within the bytestream to probe from + /// the maximum probe buffer size (zero for default) + /// the score in case of success, a negative value corresponding to an the maximal score is AVPROBE_SCORE_MAX AVERROR code otherwise + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_probe_input_buffer2(AVIOContext* @pb, AVInputFormat** @fmt, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, void* @logctx, uint @offset, uint @max_probe_size); + + /// Guess the file format. + /// data to be probed + /// Whether the file is already opened; determines whether demuxers with or without AVFMT_NOFILE are probed. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVInputFormat* av_probe_input_format(AVProbeData* @pd, int @is_opened); + + /// Guess the file format. + /// data to be probed + /// Whether the file is already opened; determines whether demuxers with or without AVFMT_NOFILE are probed. + /// A probe score larger that this is required to accept a detection, the variable is set to the actual detection score afterwards. If the score is < = AVPROBE_SCORE_MAX / 4 it is recommended to retry with a larger probe buffer. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVInputFormat* av_probe_input_format2(AVProbeData* @pd, int @is_opened, int* @score_max); + + /// Guess the file format. + /// Whether the file is already opened; determines whether demuxers with or without AVFMT_NOFILE are probed. + /// The score of the best detection. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVInputFormat* av_probe_input_format3(AVProbeData* @pd, int @is_opened, int* @score_ret); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_program_add_stream_index(AVFormatContext* @ac, int @progid, uint @idx); + + /// Convert an AVRational to a IEEE 32-bit `float` expressed in fixed-point format. + /// Rational to be converted + /// Equivalent floating-point value, expressed as an unsigned 32-bit integer. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern uint av_q2intfloat(AVRational @q); + + /// Return the next frame of a stream. This function returns what is stored in the file, and does not validate that what is there are valid frames for the decoder. It will split what is stored in the file into frames and return one for each call. It will not omit invalid data between valid frames so as to give the decoder the maximum information possible for decoding. + /// 0 if OK, < 0 on error or end of file. On error, pkt will be blank (as if it came from av_packet_alloc()). + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_read_frame(AVFormatContext* @s, AVPacket* @pkt); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_read_image_line(ushort* @dst, in byte_ptr4 @data, in int4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component); + + /// Read a line from an image, and write the values of the pixel format component c to dst. + /// the array containing the pointers to the planes of the image + /// the array containing the linesizes of the image + /// the pixel format descriptor for the image + /// the horizontal coordinate of the first pixel to read + /// the vertical coordinate of the first pixel to read + /// the width of the line to read, that is the number of values to write to dst + /// if not zero and the format is a paletted format writes the values corresponding to the palette component c in data[1] to dst, rather than the palette indexes in data[0]. The behavior is undefined if the format is not paletted. + /// size of elements in dst array (2 or 4 byte) + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_read_image_line2(void* @dst, in byte_ptr4 @data, in int4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component, int @dst_element_size); + + /// Pause a network-based stream (e.g. RTSP stream). + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_read_pause(AVFormatContext* @s); + + /// Start playing a network-based stream (e.g. RTSP stream) at the current position. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_read_play(AVFormatContext* @s); + + /// Allocate, reallocate, or free a block of memory. + /// Pointer to a memory block already allocated with av_realloc() or `NULL` + /// Size in bytes of the memory block to be allocated or reallocated + /// Pointer to a newly-reallocated block or `NULL` if the block cannot be reallocated + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_realloc(void* @ptr, ulong @size); + + /// Allocate, reallocate, or free an array. + /// Pointer to a memory block already allocated with av_realloc() or `NULL` + /// Number of elements in the array + /// Size of the single element of the array + /// Pointer to a newly-reallocated block or NULL if the block cannot be reallocated + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_realloc_array(void* @ptr, ulong @nmemb, ulong @size); + + /// Allocate, reallocate, or free a block of memory. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_realloc_f(void* @ptr, ulong @nelem, ulong @elsize); + + /// Allocate, reallocate, or free a block of memory through a pointer to a pointer. + /// Pointer to a pointer to a memory block already allocated with av_realloc(), or a pointer to `NULL`. The pointer is updated on success, or freed on failure. + /// Size in bytes for the memory block to be allocated or reallocated + /// Zero on success, an AVERROR error code on failure + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_reallocp(void* @ptr, ulong @size); + + /// Allocate, reallocate an array through a pointer to a pointer. + /// Pointer to a pointer to a memory block already allocated with av_realloc(), or a pointer to `NULL`. The pointer is updated on success, or freed on failure. + /// Number of elements + /// Size of the single element + /// Zero on success, an AVERROR error code on failure + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_reallocp_array(void* @ptr, ulong @nmemb, ulong @size); + + /// Reduce a fraction. + /// Destination numerator + /// Destination denominator + /// Source numerator + /// Source denominator + /// Maximum allowed values for `dst_num` & `dst_den` + /// 1 if the operation is exact, 0 otherwise + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_reduce(int* @dst_num, int* @dst_den, long @num, long @den, long @max); + + /// Rescale a 64-bit integer with rounding to nearest. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_rescale(long @a, long @b, long @c); + + /// Rescale a timestamp while preserving known durations. + /// Input time base + /// Input timestamp + /// Duration time base; typically this is finer-grained (greater) than `in_tb` and `out_tb` + /// Duration till the next call to this function (i.e. duration of the current packet/frame) + /// Pointer to a timestamp expressed in terms of `fs_tb`, acting as a state variable + /// Output timebase + /// Timestamp expressed in terms of `out_tb` + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_rescale_delta(AVRational @in_tb, long @in_ts, AVRational @fs_tb, int @duration, long* @last, AVRational @out_tb); + + /// Rescale a 64-bit integer by 2 rational numbers. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_rescale_q(long @a, AVRational @bq, AVRational @cq); + + /// Rescale a 64-bit integer by 2 rational numbers with specified rounding. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_rescale_q_rnd(long @a, AVRational @bq, AVRational @cq, AVRounding @rnd); + + /// Rescale a 64-bit integer with specified rounding. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_rescale_rnd(long @a, long @b, long @c, AVRounding @rnd); + + /// Check if the sample format is planar. + /// the sample format to inspect + /// 1 if the sample format is planar, 0 if it is interleaved + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_sample_fmt_is_planar(AVSampleFormat @sample_fmt); + + /// Allocate a samples buffer for nb_samples samples, and fill data pointers and linesize accordingly. The allocated samples buffer can be freed by using av_freep(&audio_data[0]) Allocated data will be initialized to silence. + /// array to be filled with the pointer for each channel + /// aligned size for audio buffer(s), may be NULL + /// number of audio channels + /// number of samples per channel + /// buffer size alignment (0 = default, 1 = no alignment) + /// >=0 on success or a negative error code on failure + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_samples_alloc(byte** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align); + + /// Allocate a data pointers array, samples buffer for nb_samples samples, and fill data pointers and linesize accordingly. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_samples_alloc_array_and_samples(byte*** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align); + + /// Copy samples from src to dst. + /// destination array of pointers to data planes + /// source array of pointers to data planes + /// offset in samples at which the data will be written to dst + /// offset in samples at which the data will be read from src + /// number of samples to be copied + /// number of audio channels + /// audio sample format + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_samples_copy(byte** @dst, byte** @src, int @dst_offset, int @src_offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt); + + /// Fill plane data pointers and linesize for samples with sample format sample_fmt. + /// array to be filled with the pointer for each channel + /// calculated linesize, may be NULL + /// the pointer to a buffer containing the samples + /// the number of channels + /// the number of samples in a single channel + /// the sample format + /// buffer size alignment (0 = default, 1 = no alignment) + /// minimum size in bytes required for the buffer on success, or a negative error code on failure + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_samples_fill_arrays(byte** @audio_data, int* @linesize, byte* @buf, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align); + + /// Get the required buffer size for the given audio parameters. + /// calculated linesize, may be NULL + /// the number of channels + /// the number of samples in a single channel + /// the sample format + /// buffer size alignment (0 = default, 1 = no alignment) + /// required buffer size, or negative error code on failure + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_samples_get_buffer_size(int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align); + + /// Fill an audio buffer with silence. + /// array of pointers to data planes + /// offset in samples at which to start filling + /// number of samples to fill + /// number of audio channels + /// audio sample format + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_samples_set_silence(byte** @audio_data, int @offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt); + + /// Generate an SDP for an RTP session. + /// array of AVFormatContexts describing the RTP streams. If the array is composed by only one context, such context can contain multiple AVStreams (one AVStream per RTP stream). Otherwise, all the contexts in the array (an AVCodecContext per RTP stream) must contain only one AVStream. + /// number of AVCodecContexts contained in ac + /// buffer where the SDP will be stored (must be allocated by the caller) + /// the size of the buffer + /// 0 if OK, AVERROR_xxx on error + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_sdp_create(AVFormatContext** @ac, int @n_files, byte* @buf, int @size); + + /// Seek to the keyframe at timestamp. 'timestamp' in 'stream_index'. + /// media file handle + /// If stream_index is (-1), a default stream is selected, and timestamp is automatically converted from AV_TIME_BASE units to the stream specific time_base. + /// Timestamp in AVStream.time_base units or, if no stream is specified, in AV_TIME_BASE units. + /// flags which select direction and seeking mode + /// >= 0 on success + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_seek_frame(AVFormatContext* @s, int @stream_index, long @timestamp, int @flags); + + /// Parse the key/value pairs list in opts. For each key/value pair found, stores the value in the field in ctx that is named like the key. ctx must be an AVClass context, storing is done using AVOptions. + /// options string to parse, may be NULL + /// a 0-terminated list of characters used to separate key from value + /// a 0-terminated list of characters used to separate two pairs from each other + /// the number of successfully set key/value pairs, or a negative value corresponding to an AVERROR code in case of error: AVERROR(EINVAL) if opts cannot be parsed, the error code issued by av_opt_set() if a key/value pair cannot be set + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_set_options_string(void* @ctx, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @opts, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key_val_sep, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @pairs_sep); + + /// Reduce packet size, correctly zeroing padding + /// packet + /// new size + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_shrink_packet(AVPacket* @pkt, int @size); + + /// Multiply two `size_t` values checking for overflow. + /// Pointer to the result of the operation + /// 0 on success, AVERROR(EINVAL) on overflow + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_size_mult(ulong @a, ulong @b, ulong* @r); + + /// Duplicate a string. + /// String to be duplicated + /// Pointer to a newly-allocated string containing a copy of `s` or `NULL` if the string cannot be allocated + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_strdup( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @s); + + /// Wrap an existing array as stream side data. + /// stream + /// side information type + /// the side data array. It must be allocated with the av_malloc() family of functions. The ownership of the data is transferred to st. + /// side information size + /// zero on success, a negative AVERROR code on failure. On failure, the stream is unchanged and the data remains owned by the caller. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_stream_add_side_data(AVStream* @st, AVPacketSideDataType @type, byte* @data, ulong @size); + + /// Get the AVClass for AVStream. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* av_stream_get_class(); + + /// Get the internal codec timebase from a stream. + /// input stream to extract the timebase from + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_stream_get_codec_timebase(AVStream* @st); + + /// Returns the pts of the last muxed packet + its duration + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_stream_get_end_pts(AVStream* @st); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecParserContext* av_stream_get_parser(AVStream* @s); + + /// Get side information from stream. + /// stream + /// desired side information type + /// If supplied, *size will be set to the size of the side data or to zero if the desired side data is not present. + /// pointer to data if present or NULL otherwise + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_stream_get_side_data(AVStream* @stream, AVPacketSideDataType @type, ulong* @size); + + /// Allocate new information from stream. + /// stream + /// desired side information type + /// side information size + /// pointer to fresh allocated data or NULL otherwise + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_stream_new_side_data(AVStream* @stream, AVPacketSideDataType @type, ulong @size); + + /// Put a description of the AVERROR code errnum in errbuf. In case of failure the global variable errno is set to indicate the error. Even in case of failure av_strerror() will print a generic error message indicating the errnum provided to errbuf. + /// error code to describe + /// buffer to which description is written + /// the size in bytes of errbuf + /// 0 on success, a negative value if a description for errnum cannot be found + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_strerror(int @errnum, byte* @errbuf, ulong @errbuf_size); + + /// Duplicate a substring of a string. + /// String to be duplicated + /// Maximum length of the resulting string (not counting the terminating byte) + /// Pointer to a newly-allocated string containing a substring of `s` or `NULL` if the string cannot be allocated + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_strndup( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @s, ulong @len); + + /// Subtract one rational from another. + /// First rational + /// Second rational + /// b-c + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_sub_q(AVRational @b, AVRational @c); + + /// Wrapper to work around the lack of mkstemp() on mingw. Also, tries to create file in /tmp first, if possible. *prefix can be a character constant; *filename will be allocated internally. + /// file descriptor of opened file (or negative value corresponding to an AVERROR code on error) and opened file name in **filename. + [Obsolete("as fd numbers cannot be passed saftely between libs on some platforms")] + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_tempfile( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @prefix, byte** @filename, int @log_offset, void* @log_ctx); + + /// Adjust frame number for NTSC drop frame time code. + /// frame number to adjust + /// frame per second, multiples of 30 + /// adjusted frame number + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_timecode_adjust_ntsc_framenum2(int @framenum, int @fps); + + /// Check if the timecode feature is available for the given frame rate + /// 0 if supported, < 0 otherwise + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_timecode_check_frame_rate(AVRational @rate); + + /// Convert sei info to SMPTE 12M binary representation. + /// frame rate in rational form + /// drop flag + /// hour + /// minute + /// second + /// frame number + /// the SMPTE binary representation + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern uint av_timecode_get_smpte(AVRational @rate, int @drop, int @hh, int @mm, int @ss, int @ff); + + /// Convert frame number to SMPTE 12M binary representation. + /// timecode data correctly initialized + /// frame number + /// the SMPTE binary representation + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern uint av_timecode_get_smpte_from_framenum(AVTimecode* @tc, int @framenum); + + /// Init a timecode struct with the passed parameters. + /// pointer to an allocated AVTimecode + /// frame rate in rational form + /// miscellaneous flags such as drop frame, +24 hours, ... (see AVTimecodeFlag) + /// the first frame number + /// a pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct (used for av_log) + /// 0 on success, AVERROR otherwise + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_timecode_init(AVTimecode* @tc, AVRational @rate, int @flags, int @frame_start, void* @log_ctx); + + /// Init a timecode struct from the passed timecode components. + /// pointer to an allocated AVTimecode + /// frame rate in rational form + /// miscellaneous flags such as drop frame, +24 hours, ... (see AVTimecodeFlag) + /// hours + /// minutes + /// seconds + /// frames + /// a pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct (used for av_log) + /// 0 on success, AVERROR otherwise + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_timecode_init_from_components(AVTimecode* @tc, AVRational @rate, int @flags, int @hh, int @mm, int @ss, int @ff, void* @log_ctx); + + /// Parse timecode representation (hh:mm:ss[:;.]ff). + /// pointer to an allocated AVTimecode + /// frame rate in rational form + /// timecode string which will determine the frame start + /// a pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct (used for av_log). + /// 0 on success, AVERROR otherwise + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_timecode_init_from_string(AVTimecode* @tc, AVRational @rate, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str, void* @log_ctx); + + /// Get the timecode string from the 25-bit timecode format (MPEG GOP format). + /// destination buffer, must be at least AV_TIMECODE_STR_SIZE long + /// the 25-bits timecode + /// the buf parameter + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_timecode_make_mpeg_tc_string(byte* @buf, uint @tc25bit); + + /// Get the timecode string from the SMPTE timecode format. + /// destination buffer, must be at least AV_TIMECODE_STR_SIZE long + /// the 32-bit SMPTE timecode + /// prevent the use of a drop flag when it is known the DF bit is arbitrary + /// the buf parameter + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_timecode_make_smpte_tc_string(byte* @buf, uint @tcsmpte, int @prevent_df); + + /// Get the timecode string from the SMPTE timecode format. + /// destination buffer, must be at least AV_TIMECODE_STR_SIZE long + /// frame rate of the timecode + /// the 32-bit SMPTE timecode + /// prevent the use of a drop flag when it is known the DF bit is arbitrary + /// prevent the use of a field flag when it is known the field bit is arbitrary (e.g. because it is used as PC flag) + /// the buf parameter + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_timecode_make_smpte_tc_string2(byte* @buf, AVRational @rate, uint @tcsmpte, int @prevent_df, int @skip_field); + + /// Load timecode string in buf. + /// timecode data correctly initialized + /// destination buffer, must be at least AV_TIMECODE_STR_SIZE long + /// frame number + /// the buf parameter + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_timecode_make_string(AVTimecode* @tc, byte* @buf, int @framenum); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_tree_destroy(AVTreeNode* @t); + + /// Apply enu(opaque, &elem) to all the elements in the tree in a given range. + /// a comparison function that returns < 0 for an element below the range, > 0 for an element above the range and == 0 for an element inside the range + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_tree_enumerate(AVTreeNode* @t, void* @opaque, av_tree_enumerate_cmp_func @cmp, av_tree_enumerate_enu_func @enu); + + /// Find an element. + /// a pointer to the root node of the tree + /// compare function used to compare elements in the tree, API identical to that of Standard C's qsort It is guaranteed that the first and only the first argument to cmp() will be the key parameter to av_tree_find(), thus it could if the user wants, be a different type (like an opaque context). + /// If next is not NULL, then next[0] will contain the previous element and next[1] the next element. If either does not exist, then the corresponding entry in next is unchanged. + /// An element with cmp(key, elem) == 0 or NULL if no such element exists in the tree. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_tree_find(AVTreeNode* @root, void* @key, av_tree_find_cmp_func @cmp, ref void_ptr2 @next); + + /// Insert or remove an element. + /// A pointer to a pointer to the root node of the tree; note that the root node can change during insertions, this is required to keep the tree balanced. + /// pointer to the element key to insert in the tree + /// compare function used to compare elements in the tree, API identical to that of Standard C's qsort + /// Used to allocate and free AVTreeNodes. For insertion the user must set it to an allocated and zeroed object of at least av_tree_node_size bytes size. av_tree_insert() will set it to NULL if it has been consumed. For deleting elements *next is set to NULL by the user and av_tree_insert() will set it to the AVTreeNode which was used for the removed element. This allows the use of flat arrays, which have lower overhead compared to many malloced elements. You might want to define a function like: + /// If no insertion happened, the found element; if an insertion or removal happened, then either key or NULL will be returned. Which one it is depends on the tree state and the implementation. You should make no assumptions that it's one or the other in the code. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_tree_insert(AVTreeNode** @rootp, void* @key, av_tree_insert_cmp_func @cmp, AVTreeNode** @next); + + /// Allocate an AVTreeNode. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern AVTreeNode* av_tree_node_alloc(); + + /// Split a URL string into components. + /// the buffer for the protocol + /// the size of the proto buffer + /// the buffer for the authorization + /// the size of the authorization buffer + /// the buffer for the host name + /// the size of the hostname buffer + /// a pointer to store the port number in + /// the buffer for the path + /// the size of the path buffer + /// the URL to split + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_url_split(byte* @proto, int @proto_size, byte* @authorization, int @authorization_size, byte* @hostname, int @hostname_size, int* @port_ptr, byte* @path, int @path_size, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url); + + /// Sleep for a period of time. Although the duration is expressed in microseconds, the actual delay may be rounded to the precision of the system timer. + /// Number of microseconds to sleep. + /// zero on success or (negative) error code. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_usleep(uint @usec); + + /// Return an informative version string. This usually is the actual release version number or a git commit description. This string has no fixed format and can change any time. It should never be parsed by code. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_version_info(); + + /// Send the specified message to the log if the level is less than or equal to the current av_log_level. By default, all logging messages are sent to stderr. This behavior can be altered by setting a different logging callback function. + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct. + /// The importance level of the message expressed using a "Logging Constant". + /// The format string (printf-compatible) that specifies how subsequent arguments are converted to output. + /// The arguments referenced by the format string. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_vlog(void* @avcl, int @level, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt, byte* @vl); + + /// Write a packet to an output media file. + /// media file handle + /// The packet containing the data to be written. Note that unlike av_interleaved_write_frame(), this function does not take ownership of the packet passed to it (though some muxers may make an internal reference to the input packet). This parameter can be NULL (at any time, not just at the end), in order to immediately flush data buffered within the muxer, for muxers that buffer up data internally before writing it to the output. Packet's "stream_index" field must be set to the index of the corresponding stream in "s->streams". The timestamps ( "pts", "dts") must be set to correct values in the stream's timebase (unless the output format is flagged with the AVFMT_NOTIMESTAMPS flag, then they can be set to AV_NOPTS_VALUE). The dts for subsequent packets passed to this function must be strictly increasing when compared in their respective timebases (unless the output format is flagged with the AVFMT_TS_NONSTRICT, then they merely have to be nondecreasing). "duration") should also be set if known. + /// < 0 on error, = 0 if OK, 1 if flushed and there is no more data to flush + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_write_frame(AVFormatContext* @s, AVPacket* @pkt); + + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_write_image_line(ushort* @src, ref byte_ptr4 @data, in int4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w); + + /// Write the values from src to the pixel format component c of an image line. + /// array containing the values to write + /// the array containing the pointers to the planes of the image to write into. It is supposed to be zeroed. + /// the array containing the linesizes of the image + /// the pixel format descriptor for the image + /// the horizontal coordinate of the first pixel to write + /// the vertical coordinate of the first pixel to write + /// the width of the line to write, that is the number of values to write to the image line + /// size of elements in src array (2 or 4 byte) + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_write_image_line2(void* @src, ref byte_ptr4 @data, in int4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @src_element_size); + + /// Write the stream trailer to an output media file and free the file private data. + /// media file handle + /// 0 if OK, AVERROR_xxx on error + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_write_trailer(AVFormatContext* @s); + + /// Write an uncoded frame to an output media file. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_write_uncoded_frame(AVFormatContext* @s, int @stream_index, AVFrame* @frame); + + /// Test whether a muxer supports uncoded frame. + /// >=0 if an uncoded frame can be written to that muxer and stream, < 0 if not + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_write_uncoded_frame_query(AVFormatContext* @s, int @stream_index); + + /// Encode extradata length to a buffer. Used by xiph codecs. + /// buffer to write to; must be at least (v/255+1) bytes long + /// size of extradata in bytes + /// number of bytes written to the buffer. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern uint av_xiphlacing(byte* @s, uint @v); + + /// Modify width and height values so that they will result in a memory buffer that is acceptable for the codec if you do not use any horizontal padding. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avcodec_align_dimensions(AVCodecContext* @s, int* @width, int* @height); + + /// Modify width and height values so that they will result in a memory buffer that is acceptable for the codec if you also ensure that all line sizes are a multiple of the respective linesize_align[i]. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avcodec_align_dimensions2(AVCodecContext* @s, int* @width, int* @height, ref int8 @linesize_align); + + /// Allocate an AVCodecContext and set its fields to default values. The resulting struct should be freed with avcodec_free_context(). + /// if non-NULL, allocate private data and initialize defaults for the given codec. It is illegal to then call avcodec_open2() with a different codec. If NULL, then the codec-specific defaults won't be initialized, which may result in suboptimal default settings (this is important mainly for encoders, e.g. libx264). + /// An AVCodecContext filled with default values or NULL on failure. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecContext* avcodec_alloc_context3(AVCodec* @codec); + + /// Converts swscale x/y chroma position to AVChromaLocation. + /// horizontal chroma sample position + /// vertical chroma sample position + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVChromaLocation avcodec_chroma_pos_to_enum(int @xpos, int @ypos); + + /// Close a given AVCodecContext and free all the data associated with it (but not the AVCodecContext itself). + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_close(AVCodecContext* @avctx); + + /// Return the libavcodec build-time configuration. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avcodec_configuration(); + + /// Decode a subtitle message. Return a negative value on error, otherwise return the number of bytes used. If no subtitle could be decompressed, got_sub_ptr is zero. Otherwise, the subtitle is stored in *sub. Note that AV_CODEC_CAP_DR1 is not available for subtitle codecs. This is for simplicity, because the performance difference is expected to be negligible and reusing a get_buffer written for video codecs would probably perform badly due to a potentially very different allocation pattern. + /// the codec context + /// The preallocated AVSubtitle in which the decoded subtitle will be stored, must be freed with avsubtitle_free if *got_sub_ptr is set. + /// Zero if no subtitle could be decompressed, otherwise, it is nonzero. + /// The input AVPacket containing the input buffer. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_decode_subtitle2(AVCodecContext* @avctx, AVSubtitle* @sub, int* @got_sub_ptr, AVPacket* @avpkt); + + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_default_execute(AVCodecContext* @c, avcodec_default_execute_func_func @func, void* @arg, int* @ret, int @count, int @size); + + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_default_execute2(AVCodecContext* @c, avcodec_default_execute2_func_func @func, void* @arg, int* @ret, int @count); + + /// The default callback for AVCodecContext.get_buffer2(). It is made public so it can be called by custom get_buffer2() implementations for decoders without AV_CODEC_CAP_DR1 set. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_default_get_buffer2(AVCodecContext* @s, AVFrame* @frame, int @flags); + + /// The default callback for AVCodecContext.get_encode_buffer(). It is made public so it can be called by custom get_encode_buffer() implementations for encoders without AV_CODEC_CAP_DR1 set. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_default_get_encode_buffer(AVCodecContext* @s, AVPacket* @pkt, int @flags); + + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVPixelFormat avcodec_default_get_format(AVCodecContext* @s, AVPixelFormat* @fmt); + + /// Returns descriptor for given codec ID or NULL if no descriptor exists. + /// descriptor for given codec ID or NULL if no descriptor exists. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecDescriptor* avcodec_descriptor_get(AVCodecID @id); + + /// Returns codec descriptor with the given name or NULL if no such descriptor exists. + /// codec descriptor with the given name or NULL if no such descriptor exists. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecDescriptor* avcodec_descriptor_get_by_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Iterate over all codec descriptors known to libavcodec. + /// previous descriptor. NULL to get the first descriptor. + /// next descriptor or NULL after the last descriptor + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecDescriptor* avcodec_descriptor_next(AVCodecDescriptor* @prev); + + /// @{ + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_encode_subtitle(AVCodecContext* @avctx, byte* @buf, int @buf_size, AVSubtitle* @sub); + + /// Converts AVChromaLocation to swscale x/y chroma position. + /// horizontal chroma sample position + /// vertical chroma sample position + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_enum_to_chroma_pos(int* @xpos, int* @ypos, AVChromaLocation @pos); + + /// Fill AVFrame audio data and linesize pointers. + /// the AVFrame frame->nb_samples must be set prior to calling the function. This function fills in frame->data, frame->extended_data, frame->linesize[0]. + /// channel count + /// sample format + /// buffer to use for frame data + /// size of buffer + /// plane size sample alignment (0 = default) + /// >=0 on success, negative error code on failure + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_fill_audio_frame(AVFrame* @frame, int @nb_channels, AVSampleFormat @sample_fmt, byte* @buf, int @buf_size, int @align); + + /// Find the best pixel format to convert to given a certain source pixel format. When converting from one pixel format to another, information loss may occur. For example, when converting from RGB24 to GRAY, the color information will be lost. Similarly, other losses occur when converting from some formats to other formats. avcodec_find_best_pix_fmt_of_2() searches which of the given pixel formats should be used to suffer the least amount of loss. The pixel formats from which it chooses one, are determined by the pix_fmt_list parameter. + /// AV_PIX_FMT_NONE terminated array of pixel formats to choose from + /// source pixel format + /// Whether the source pixel format alpha channel is used. + /// Combination of flags informing you what kind of losses will occur. + /// The best pixel format to convert to or -1 if none was found. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVPixelFormat avcodec_find_best_pix_fmt_of_list(AVPixelFormat* @pix_fmt_list, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr); + + /// Find a registered decoder with a matching codec ID. + /// AVCodecID of the requested decoder + /// A decoder if one was found, NULL otherwise. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodec* avcodec_find_decoder(AVCodecID @id); + + /// Find a registered decoder with the specified name. + /// name of the requested decoder + /// A decoder if one was found, NULL otherwise. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodec* avcodec_find_decoder_by_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Find a registered encoder with a matching codec ID. + /// AVCodecID of the requested encoder + /// An encoder if one was found, NULL otherwise. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodec* avcodec_find_encoder(AVCodecID @id); + + /// Find a registered encoder with the specified name. + /// name of the requested encoder + /// An encoder if one was found, NULL otherwise. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodec* avcodec_find_encoder_by_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Reset the internal codec state / flush internal buffers. Should be called e.g. when seeking or when switching to a different stream. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avcodec_flush_buffers(AVCodecContext* @avctx); + + /// Free the codec context and everything associated with it and write NULL to the provided pointer. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avcodec_free_context(AVCodecContext** @avctx); + + /// Get the AVClass for AVCodecContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* avcodec_get_class(); + + [Obsolete("This function should not be used.")] + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* avcodec_get_frame_class(); + + /// Retrieve supported hardware configurations for a codec. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecHWConfig* avcodec_get_hw_config(AVCodec* @codec, int @index); + + /// Create and return a AVHWFramesContext with values adequate for hardware decoding. This is meant to get called from the get_format callback, and is a helper for preparing a AVHWFramesContext for AVCodecContext.hw_frames_ctx. This API is for decoding with certain hardware acceleration modes/APIs only. + /// The context which is currently calling get_format, and which implicitly contains all state needed for filling the returned AVHWFramesContext properly. + /// A reference to the AVHWDeviceContext describing the device which will be used by the hardware decoder. + /// The hwaccel format you are going to return from get_format. + /// On success, set to a reference to an _uninitialized_ AVHWFramesContext, created from the given device_ref. Fields will be set to values required for decoding. Not changed if an error is returned. + /// zero on success, a negative value on error. The following error codes have special semantics: AVERROR(ENOENT): the decoder does not support this functionality. Setup is always manual, or it is a decoder which does not support setting AVCodecContext.hw_frames_ctx at all, or it is a software format. AVERROR(EINVAL): it is known that hardware decoding is not supported for this configuration, or the device_ref is not supported for the hwaccel referenced by hw_pix_fmt. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_get_hw_frames_parameters(AVCodecContext* @avctx, AVBufferRef* @device_ref, AVPixelFormat @hw_pix_fmt, AVBufferRef** @out_frames_ref); + + /// Get the name of a codec. + /// a static string identifying the codec; never NULL + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avcodec_get_name(AVCodecID @id); + + /// Get the AVClass for AVSubtitleRect. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* avcodec_get_subtitle_rect_class(); + + /// Get the type of the given codec. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVMediaType avcodec_get_type(AVCodecID @codec_id); + + /// Returns a positive value if s is open (i.e. avcodec_open2() was called on it with no corresponding avcodec_close()), 0 otherwise. + /// a positive value if s is open (i.e. avcodec_open2() was called on it with no corresponding avcodec_close()), 0 otherwise. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_is_open(AVCodecContext* @s); + + /// Return the libavcodec license. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avcodec_license(); + + /// Initialize the AVCodecContext to use the given AVCodec. Prior to using this function the context has to be allocated with avcodec_alloc_context3(). + /// The context to initialize. + /// The codec to open this context for. If a non-NULL codec has been previously passed to avcodec_alloc_context3() or for this context, then this parameter MUST be either NULL or equal to the previously passed codec. + /// A dictionary filled with AVCodecContext and codec-private options. On return this object will be filled with options that were not found. + /// zero on success, a negative value on error + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_open2(AVCodecContext* @avctx, AVCodec* @codec, AVDictionary** @options); + + /// Allocate a new AVCodecParameters and set its fields to default values (unknown/invalid/0). The returned struct must be freed with avcodec_parameters_free(). + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecParameters* avcodec_parameters_alloc(); + + /// Copy the contents of src to dst. Any allocated fields in dst are freed and replaced with newly allocated duplicates of the corresponding fields in src. + /// >= 0 on success, a negative AVERROR code on failure. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_parameters_copy(AVCodecParameters* @dst, AVCodecParameters* @src); + + /// Free an AVCodecParameters instance and everything associated with it and write NULL to the supplied pointer. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avcodec_parameters_free(AVCodecParameters** @par); + + /// Fill the parameters struct based on the values from the supplied codec context. Any allocated fields in par are freed and replaced with duplicates of the corresponding fields in codec. + /// >= 0 on success, a negative AVERROR code on failure + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_parameters_from_context(AVCodecParameters* @par, AVCodecContext* @codec); + + /// Fill the codec context based on the values from the supplied codec parameters. Any allocated fields in codec that have a corresponding field in par are freed and replaced with duplicates of the corresponding field in par. Fields in codec that do not have a counterpart in par are not touched. + /// >= 0 on success, a negative AVERROR code on failure. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_parameters_to_context(AVCodecContext* @codec, AVCodecParameters* @par); + + /// Return a value representing the fourCC code associated to the pixel format pix_fmt, or 0 if no associated fourCC code can be found. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avcodec_pix_fmt_to_codec_tag(AVPixelFormat @pix_fmt); + + /// Return a name for the specified profile, if available. + /// the ID of the codec to which the requested profile belongs + /// the profile value for which a name is requested + /// A name for the profile if found, NULL otherwise. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avcodec_profile_name(AVCodecID @codec_id, int @profile); + + /// Return decoded output data from a decoder. + /// codec context + /// This will be set to a reference-counted video or audio frame (depending on the decoder type) allocated by the decoder. Note that the function will always call av_frame_unref(frame) before doing anything else. + /// 0: success, a frame was returned AVERROR(EAGAIN): output is not available in this state - user must try to send new input AVERROR_EOF: the decoder has been fully flushed, and there will be no more output frames AVERROR(EINVAL): codec not opened, or it is an encoder AVERROR_INPUT_CHANGED: current decoded frame has changed parameters with respect to first decoded frame. Applicable when flag AV_CODEC_FLAG_DROPCHANGED is set. other negative values: legitimate decoding errors + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_receive_frame(AVCodecContext* @avctx, AVFrame* @frame); + + /// Read encoded data from the encoder. + /// codec context + /// This will be set to a reference-counted packet allocated by the encoder. Note that the function will always call av_packet_unref(avpkt) before doing anything else. + /// 0 on success, otherwise negative error code: AVERROR(EAGAIN): output is not available in the current state - user must try to send input AVERROR_EOF: the encoder has been fully flushed, and there will be no more output packets AVERROR(EINVAL): codec not opened, or it is a decoder other errors: legitimate encoding errors + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_receive_packet(AVCodecContext* @avctx, AVPacket* @avpkt); + + /// Supply a raw video or audio frame to the encoder. Use avcodec_receive_packet() to retrieve buffered output packets. + /// codec context + /// AVFrame containing the raw audio or video frame to be encoded. Ownership of the frame remains with the caller, and the encoder will not write to the frame. The encoder may create a reference to the frame data (or copy it if the frame is not reference-counted). It can be NULL, in which case it is considered a flush packet. This signals the end of the stream. If the encoder still has packets buffered, it will return them after this call. Once flushing mode has been entered, additional flush packets are ignored, and sending frames will return AVERROR_EOF. + /// 0 on success, otherwise negative error code: AVERROR(EAGAIN): input is not accepted in the current state - user must read output with avcodec_receive_packet() (once all output is read, the packet should be resent, and the call will not fail with EAGAIN). AVERROR_EOF: the encoder has been flushed, and no new frames can be sent to it AVERROR(EINVAL): codec not opened, it is a decoder, or requires flush AVERROR(ENOMEM): failed to add packet to internal queue, or similar other errors: legitimate encoding errors + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_send_frame(AVCodecContext* @avctx, AVFrame* @frame); + + /// Supply raw packet data as input to a decoder. + /// codec context + /// The input AVPacket. Usually, this will be a single video frame, or several complete audio frames. Ownership of the packet remains with the caller, and the decoder will not write to the packet. The decoder may create a reference to the packet data (or copy it if the packet is not reference-counted). Unlike with older APIs, the packet is always fully consumed, and if it contains multiple frames (e.g. some audio codecs), will require you to call avcodec_receive_frame() multiple times afterwards before you can send a new packet. It can be NULL (or an AVPacket with data set to NULL and size set to 0); in this case, it is considered a flush packet, which signals the end of the stream. Sending the first flush packet will return success. Subsequent ones are unnecessary and will return AVERROR_EOF. If the decoder still has frames buffered, it will return them after sending a flush packet. + /// 0 on success, otherwise negative error code: AVERROR(EAGAIN): input is not accepted in the current state - user must read output with avcodec_receive_frame() (once all output is read, the packet should be resent, and the call will not fail with EAGAIN). AVERROR_EOF: the decoder has been flushed, and no new packets can be sent to it (also returned if more than 1 flush packet is sent) AVERROR(EINVAL): codec not opened, it is an encoder, or requires flush AVERROR(ENOMEM): failed to add packet to internal queue, or similar other errors: legitimate decoding errors + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_send_packet(AVCodecContext* @avctx, AVPacket* @avpkt); + + /// @} + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avcodec_string(byte* @buf, int @buf_size, AVCodecContext* @enc, int @encode); + + /// Return the LIBAVCODEC_VERSION_INT constant. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avcodec_version(); + + /// Send control message from application to device. + /// device context. + /// message type. + /// message data. Exact type depends on message type. + /// size of message data. + /// >= 0 on success, negative on error. AVERROR(ENOSYS) when device doesn't implement handler of the message. + [DllImport("avdevice-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avdevice_app_to_dev_control_message(AVFormatContext* @s, AVAppToDevMessageType @type, void* @data, ulong @data_size); + + /// Initialize capabilities probing API based on AVOption API. + /// Device capabilities data. Pointer to a NULL pointer must be passed. + /// Context of the device. + /// An AVDictionary filled with device-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. The same options must be passed later to avformat_write_header() for output devices or avformat_open_input() for input devices, or at any other place that affects device-private options. + /// >= 0 on success, negative otherwise. + [Obsolete()] + [DllImport("avdevice-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avdevice_capabilities_create(AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s, AVDictionary** @device_options); + + /// Free resources created by avdevice_capabilities_create() + /// Device capabilities data to be freed. + /// Context of the device. + [Obsolete()] + [DllImport("avdevice-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avdevice_capabilities_free(AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s); + + /// Return the libavdevice build-time configuration. + [DllImport("avdevice-59", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avdevice_configuration(); + + /// Send control message from device to application. + /// device context. + /// message type. + /// message data. Can be NULL. + /// size of message data. + /// >= 0 on success, negative on error. AVERROR(ENOSYS) when application doesn't implement handler of the message. + [DllImport("avdevice-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avdevice_dev_to_app_control_message(AVFormatContext* @s, AVDevToAppMessageType @type, void* @data, ulong @data_size); + + /// Convenient function to free result of avdevice_list_devices(). + [DllImport("avdevice-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avdevice_free_list_devices(AVDeviceInfoList** @device_list); + + /// Return the libavdevice license. + [DllImport("avdevice-59", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avdevice_license(); + + /// List devices. + /// device context. + /// list of autodetected devices. + /// count of autodetected devices, negative on error. + [DllImport("avdevice-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avdevice_list_devices(AVFormatContext* @s, AVDeviceInfoList** @device_list); + + /// List devices. + /// device format. May be NULL if device name is set. + /// device name. May be NULL if device format is set. + /// An AVDictionary filled with device-private options. May be NULL. The same options must be passed later to avformat_write_header() for output devices or avformat_open_input() for input devices, or at any other place that affects device-private options. + /// list of autodetected devices + /// count of autodetected devices, negative on error. + [DllImport("avdevice-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avdevice_list_input_sources(AVInputFormat* @device, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list); + + [DllImport("avdevice-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avdevice_list_output_sinks(AVOutputFormat* @device, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list); + + /// Initialize libavdevice and register all the input and output devices. + [DllImport("avdevice-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avdevice_register_all(); + + /// Return the LIBAVDEVICE_VERSION_INT constant. + [DllImport("avdevice-59", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avdevice_version(); + + /// Negotiate the media format, dimensions, etc of all inputs to a filter. + /// the filter to negotiate the properties for its inputs + /// zero on successful negotiation + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_config_links(AVFilterContext* @filter); + + /// Return the libavfilter build-time configuration. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avfilter_configuration(); + + /// Get the number of elements in an AVFilter's inputs or outputs array. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avfilter_filter_pad_count(AVFilter* @filter, int @is_output); + + /// Free a filter context. This will also remove the filter from its filtergraph's list of filters. + /// the filter to free + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern void avfilter_free(AVFilterContext* @filter); + + /// Get a filter definition matching the given name. + /// the filter name to find + /// the filter definition, if any matching one is registered. NULL if none found. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFilter* avfilter_get_by_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Returns AVClass for AVFilterContext. + /// AVClass for AVFilterContext. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* avfilter_get_class(); + + /// Allocate a filter graph. + /// the allocated filter graph on success or NULL. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFilterGraph* avfilter_graph_alloc(); + + /// Create a new filter instance in a filter graph. + /// graph in which the new filter will be used + /// the filter to create an instance of + /// Name to give to the new instance (will be copied to AVFilterContext.name). This may be used by the caller to identify different filters, libavfilter itself assigns no semantics to this parameter. May be NULL. + /// the context of the newly created filter instance (note that it is also retrievable directly through AVFilterGraph.filters or with avfilter_graph_get_filter()) on success or NULL on failure. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFilterContext* avfilter_graph_alloc_filter(AVFilterGraph* @graph, AVFilter* @filter, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Check validity and configure all the links and formats in the graph. + /// the filter graph + /// context used for logging + /// >= 0 in case of success, a negative AVERROR code otherwise + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_graph_config(AVFilterGraph* @graphctx, void* @log_ctx); + + /// Create and add a filter instance into an existing graph. The filter instance is created from the filter filt and inited with the parameter args. opaque is currently ignored. + /// the instance name to give to the created filter instance + /// the filter graph + /// a negative AVERROR error code in case of failure, a non negative value otherwise + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_graph_create_filter(AVFilterContext** @filt_ctx, AVFilter* @filt, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @args, void* @opaque, AVFilterGraph* @graph_ctx); + + /// Dump a graph into a human-readable string representation. + /// the graph to dump + /// formatting options; currently ignored + /// a string, or NULL in case of memory allocation failure; the string must be freed using av_free + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* avfilter_graph_dump(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @options); + + /// Free a graph, destroy its links, and set *graph to NULL. If *graph is NULL, do nothing. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern void avfilter_graph_free(AVFilterGraph** @graph); + + /// Get a filter instance identified by instance name from graph. + /// filter graph to search through. + /// filter instance name (should be unique in the graph). + /// the pointer to the found filter instance or NULL if it cannot be found. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFilterContext* avfilter_graph_get_filter(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Add a graph described by a string to a graph. + /// the filter graph where to link the parsed graph context + /// string to be parsed + /// linked list to the inputs of the graph + /// linked list to the outputs of the graph + /// zero on success, a negative AVERROR code on error + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_graph_parse(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filters, AVFilterInOut* @inputs, AVFilterInOut* @outputs, void* @log_ctx); + + /// Add a graph described by a string to a graph. + /// the filter graph where to link the parsed graph context + /// string to be parsed + /// pointer to a linked list to the inputs of the graph, may be NULL. If non-NULL, *inputs is updated to contain the list of open inputs after the parsing, should be freed with avfilter_inout_free(). + /// pointer to a linked list to the outputs of the graph, may be NULL. If non-NULL, *outputs is updated to contain the list of open outputs after the parsing, should be freed with avfilter_inout_free(). + /// non negative on success, a negative AVERROR code on error + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_graph_parse_ptr(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs, void* @log_ctx); + + /// Add a graph described by a string to a graph. + /// the filter graph where to link the parsed graph context + /// string to be parsed + /// a linked list of all free (unlinked) inputs of the parsed graph will be returned here. It is to be freed by the caller using avfilter_inout_free(). + /// a linked list of all free (unlinked) outputs of the parsed graph will be returned here. It is to be freed by the caller using avfilter_inout_free(). + /// zero on success, a negative AVERROR code on error + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_graph_parse2(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs); + + /// Queue a command for one or more filter instances. + /// the filter graph + /// the filter(s) to which the command should be sent "all" sends to all filters otherwise it can be a filter or filter instance name which will send the command to all matching filters. + /// the command to sent, for handling simplicity all commands must be alphanumeric only + /// the argument for the command + /// time at which the command should be sent to the filter + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_graph_queue_command(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @target, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @cmd, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @arg, int @flags, double @ts); + + /// Request a frame on the oldest sink link. + /// the return value of ff_request_frame(), or AVERROR_EOF if all links returned AVERROR_EOF + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_graph_request_oldest(AVFilterGraph* @graph); + + /// Send a command to one or more filter instances. + /// the filter graph + /// the filter(s) to which the command should be sent "all" sends to all filters otherwise it can be a filter or filter instance name which will send the command to all matching filters. + /// the command to send, for handling simplicity all commands must be alphanumeric only + /// the argument for the command + /// a buffer with size res_size where the filter(s) can return a response. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_graph_send_command(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @target, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @cmd, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @arg, byte* @res, int @res_len, int @flags); + + /// Enable or disable automatic format conversion inside the graph. + /// any of the AVFILTER_AUTO_CONVERT_* constants + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern void avfilter_graph_set_auto_convert(AVFilterGraph* @graph, uint @flags); + + /// Initialize a filter with the supplied dictionary of options. + /// uninitialized filter context to initialize + /// An AVDictionary filled with options for this filter. On return this parameter will be destroyed and replaced with a dict containing options that were not found. This dictionary must be freed by the caller. May be NULL, then this function is equivalent to avfilter_init_str() with the second parameter set to NULL. + /// 0 on success, a negative AVERROR on failure + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_init_dict(AVFilterContext* @ctx, AVDictionary** @options); + + /// Initialize a filter with the supplied parameters. + /// uninitialized filter context to initialize + /// Options to initialize the filter with. This must be a ':'-separated list of options in the 'key=value' form. May be NULL if the options have been set directly using the AVOptions API or there are no options that need to be set. + /// 0 on success, a negative AVERROR on failure + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_init_str(AVFilterContext* @ctx, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @args); + + /// Allocate a single AVFilterInOut entry. Must be freed with avfilter_inout_free(). + /// allocated AVFilterInOut on success, NULL on failure. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFilterInOut* avfilter_inout_alloc(); + + /// Free the supplied list of AVFilterInOut and set *inout to NULL. If *inout is NULL, do nothing. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern void avfilter_inout_free(AVFilterInOut** @inout); + + /// Insert a filter in the middle of an existing link. + /// the link into which the filter should be inserted + /// the filter to be inserted + /// the input pad on the filter to connect + /// the output pad on the filter to connect + /// zero on success + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_insert_filter(AVFilterLink* @link, AVFilterContext* @filt, uint @filt_srcpad_idx, uint @filt_dstpad_idx); + + /// Return the libavfilter license. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avfilter_license(); + + /// Link two filters together. + /// the source filter + /// index of the output pad on the source filter + /// the destination filter + /// index of the input pad on the destination filter + /// zero on success + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_link(AVFilterContext* @src, uint @srcpad, AVFilterContext* @dst, uint @dstpad); + + /// Free the link in *link, and set its pointer to NULL. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern void avfilter_link_free(AVFilterLink** @link); + + /// Get the number of elements in an AVFilter's inputs or outputs array. + [Obsolete("Use avfilter_filter_pad_count() instead.")] + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_pad_count(AVFilterPad* @pads); + + /// Get the name of an AVFilterPad. + /// an array of AVFilterPads + /// index of the pad in the array; it is the caller's responsibility to ensure the index is valid + /// name of the pad_idx'th pad in pads + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avfilter_pad_get_name(AVFilterPad* @pads, int @pad_idx); + + /// Get the type of an AVFilterPad. + /// an array of AVFilterPads + /// index of the pad in the array; it is the caller's responsibility to ensure the index is valid + /// type of the pad_idx'th pad in pads + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern AVMediaType avfilter_pad_get_type(AVFilterPad* @pads, int @pad_idx); + + /// Make the filter instance process a command. It is recommended to use avfilter_graph_send_command(). + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_process_command(AVFilterContext* @filter, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @cmd, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @arg, byte* @res, int @res_len, int @flags); + + /// Return the LIBAVFILTER_VERSION_INT constant. + [DllImport("avfilter-8", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avfilter_version(); + + /// Allocate an AVFormatContext. avformat_free_context() can be used to free the context and everything allocated by the framework within it. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFormatContext* avformat_alloc_context(); + + /// Allocate an AVFormatContext for an output format. avformat_free_context() can be used to free the context and everything allocated by the framework within it. + /// format to use for allocating the context, if NULL format_name and filename are used instead + /// the name of output format to use for allocating the context, if NULL filename is used instead + /// the name of the filename to use for allocating the context, may be NULL + /// >= 0 in case of success, a negative AVERROR code in case of failure + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_alloc_output_context2(AVFormatContext** @ctx, AVOutputFormat* @oformat, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @format_name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename); + + /// Close an opened input AVFormatContext. Free it and all its contents and set *s to NULL. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avformat_close_input(AVFormatContext** @s); + + /// Return the libavformat build-time configuration. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avformat_configuration(); + + /// Read packets of a media file to get stream information. This is useful for file formats with no headers such as MPEG. This function also computes the real framerate in case of MPEG-2 repeat frame mode. The logical file position is not changed by this function; examined packets may be buffered for later processing. + /// media file handle + /// If non-NULL, an ic.nb_streams long array of pointers to dictionaries, where i-th member contains options for codec corresponding to i-th stream. On return each dictionary will be filled with options that were not found. + /// >=0 if OK, AVERROR_xxx on error + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_find_stream_info(AVFormatContext* @ic, AVDictionary** @options); + + /// Discard all internally buffered data. This can be useful when dealing with discontinuities in the byte stream. Generally works only with formats that can resync. This includes headerless formats like MPEG-TS/TS but should also work with NUT, Ogg and in a limited way AVI for example. + /// media file handle + /// >=0 on success, error code otherwise + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_flush(AVFormatContext* @s); + + /// Free an AVFormatContext and all its streams. + /// context to free + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avformat_free_context(AVFormatContext* @s); + + /// Get the AVClass for AVFormatContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* avformat_get_class(); + + /// Returns the table mapping MOV FourCCs for audio to AVCodecID. + /// the table mapping MOV FourCCs for audio to AVCodecID. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecTag* avformat_get_mov_audio_tags(); + + /// Returns the table mapping MOV FourCCs for video to libavcodec AVCodecID. + /// the table mapping MOV FourCCs for video to libavcodec AVCodecID. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecTag* avformat_get_mov_video_tags(); + + /// Returns the table mapping RIFF FourCCs for audio to AVCodecID. + /// the table mapping RIFF FourCCs for audio to AVCodecID. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecTag* avformat_get_riff_audio_tags(); + + /// @{ Get the tables mapping RIFF FourCCs to libavcodec AVCodecIDs. The tables are meant to be passed to av_codec_get_id()/av_codec_get_tag() as in the following code: + /// the table mapping RIFF FourCCs for video to libavcodec AVCodecID. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecTag* avformat_get_riff_video_tags(); + + /// Get the index entry count for the given AVStream. + /// stream + /// the number of index entries in the stream + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_index_get_entries_count(AVStream* @st); + + /// Get the AVIndexEntry corresponding to the given index. + /// Stream containing the requested AVIndexEntry. + /// The desired index. + /// A pointer to the requested AVIndexEntry if it exists, NULL otherwise. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVIndexEntry* avformat_index_get_entry(AVStream* @st, int @idx); + + /// Get the AVIndexEntry corresponding to the given timestamp. + /// Stream containing the requested AVIndexEntry. + /// If AVSEEK_FLAG_BACKWARD then the returned entry will correspond to the timestamp which is < = the requested one, if backward is 0, then it will be >= if AVSEEK_FLAG_ANY seek to any frame, only keyframes otherwise. + /// A pointer to the requested AVIndexEntry if it exists, NULL otherwise. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVIndexEntry* avformat_index_get_entry_from_timestamp(AVStream* @st, long @wanted_timestamp, int @flags); + + /// Allocate the stream private data and initialize the codec, but do not write the header. May optionally be used before avformat_write_header to initialize stream parameters before actually writing the header. If using this function, do not pass the same options to avformat_write_header. + /// Media file handle, must be allocated with avformat_alloc_context(). Its oformat field must be set to the desired output format; Its pb field must be set to an already opened AVIOContext. + /// An AVDictionary filled with AVFormatContext and muxer-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. + /// AVSTREAM_INIT_IN_WRITE_HEADER on success if the codec requires avformat_write_header to fully initialize, AVSTREAM_INIT_IN_INIT_OUTPUT on success if the codec has been fully initialized, negative AVERROR on failure. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_init_output(AVFormatContext* @s, AVDictionary** @options); + + /// Return the libavformat license. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avformat_license(); + + /// Check if the stream st contained in s is matched by the stream specifier spec. + /// >0 if st is matched by spec; 0 if st is not matched by spec; AVERROR code if spec is invalid + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_match_stream_specifier(AVFormatContext* @s, AVStream* @st, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @spec); + + /// Undo the initialization done by avformat_network_init. Call it only once for each time you called avformat_network_init. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_network_deinit(); + + /// Do global initialization of network libraries. This is optional, and not recommended anymore. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_network_init(); + + /// Add a new stream to a media file. + /// media file handle + /// unused, does nothing + /// newly created stream or NULL on error. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVStream* avformat_new_stream(AVFormatContext* @s, AVCodec* @c); + + /// Open an input stream and read the header. The codecs are not opened. The stream must be closed with avformat_close_input(). + /// Pointer to user-supplied AVFormatContext (allocated by avformat_alloc_context). May be a pointer to NULL, in which case an AVFormatContext is allocated by this function and written into ps. Note that a user-supplied AVFormatContext will be freed on failure. + /// URL of the stream to open. + /// If non-NULL, this parameter forces a specific input format. Otherwise the format is autodetected. + /// A dictionary filled with AVFormatContext and demuxer-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. + /// 0 on success, a negative AVERROR on failure. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_open_input(AVFormatContext** @ps, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, AVInputFormat* @fmt, AVDictionary** @options); + + /// Test if the given container can store a codec. + /// container to check for compatibility + /// codec to potentially store in container + /// standards compliance level, one of FF_COMPLIANCE_* + /// 1 if codec with ID codec_id can be stored in ofmt, 0 if it cannot. A negative number if this information is not available. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_query_codec(AVOutputFormat* @ofmt, AVCodecID @codec_id, int @std_compliance); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_queue_attached_pictures(AVFormatContext* @s); + + /// Seek to timestamp ts. Seeking will be done so that the point from which all active streams can be presented successfully will be closest to ts and within min/max_ts. Active streams are all streams that have AVStream.discard < AVDISCARD_ALL. + /// media file handle + /// index of the stream which is used as time base reference + /// smallest acceptable timestamp + /// target timestamp + /// largest acceptable timestamp + /// flags + /// >=0 on success, error code otherwise + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_seek_file(AVFormatContext* @s, int @stream_index, long @min_ts, long @ts, long @max_ts, int @flags); + + /// Transfer internal timing information from one stream to another. + /// target output format for ost + /// output stream which needs timings copy and adjustments + /// reference input stream to copy timings from + /// define from where the stream codec timebase needs to be imported + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_transfer_internal_stream_timing_info(AVOutputFormat* @ofmt, AVStream* @ost, AVStream* @ist, AVTimebaseSource @copy_tb); + + /// Return the LIBAVFORMAT_VERSION_INT constant. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avformat_version(); + + /// Allocate the stream private data and write the stream header to an output media file. + /// Media file handle, must be allocated with avformat_alloc_context(). Its oformat field must be set to the desired output format; Its pb field must be set to an already opened AVIOContext. + /// An AVDictionary filled with AVFormatContext and muxer-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. + /// AVSTREAM_INIT_IN_WRITE_HEADER on success if the codec had not already been fully initialized in avformat_init, AVSTREAM_INIT_IN_INIT_OUTPUT on success if the codec had already been fully initialized in avformat_init, negative AVERROR on failure. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_write_header(AVFormatContext* @s, AVDictionary** @options); + + /// Accept and allocate a client context on a server context. + /// the server context + /// the client context, must be unallocated + /// >= 0 on success or a negative value corresponding to an AVERROR on failure + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_accept(AVIOContext* @s, AVIOContext** @c); + + /// Allocate and initialize an AVIOContext for buffered I/O. It must be later freed with avio_context_free(). + /// Memory block for input/output operations via AVIOContext. The buffer must be allocated with av_malloc() and friends. It may be freed and replaced with a new buffer by libavformat. AVIOContext.buffer holds the buffer currently in use, which must be later freed with av_free(). + /// The buffer size is very important for performance. For protocols with fixed blocksize it should be set to this blocksize. For others a typical size is a cache page, e.g. 4kb. + /// Set to 1 if the buffer should be writable, 0 otherwise. + /// An opaque pointer to user-specific data. + /// A function for refilling the buffer, may be NULL. For stream protocols, must never return 0 but rather a proper AVERROR code. + /// A function for writing the buffer contents, may be NULL. The function may not change the input buffers content. + /// A function for seeking to specified byte position, may be NULL. + /// Allocated AVIOContext or NULL on failure. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVIOContext* avio_alloc_context(byte* @buffer, int @buffer_size, int @write_flag, void* @opaque, avio_alloc_context_read_packet_func @read_packet, avio_alloc_context_write_packet_func @write_packet, avio_alloc_context_seek_func @seek); + + /// Return AVIO_FLAG_* access flags corresponding to the access permissions of the resource in url, or a negative value corresponding to an AVERROR code in case of failure. The returned access flags are masked by the value in flags. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_check( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, int @flags); + + /// Close the resource accessed by the AVIOContext s and free it. This function can only be used if s was opened by avio_open(). + /// 0 on success, an AVERROR < 0 on error. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_close(AVIOContext* @s); + + /// Close directory. + /// directory read context. + /// >=0 on success or negative on error. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_close_dir(AVIODirContext** @s); + + /// Return the written size and a pointer to the buffer. The buffer must be freed with av_free(). Padding of AV_INPUT_BUFFER_PADDING_SIZE is added to the buffer. + /// IO context + /// pointer to a byte buffer + /// the length of the byte buffer + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_close_dyn_buf(AVIOContext* @s, byte** @pbuffer); + + /// Close the resource accessed by the AVIOContext *s, free it and set the pointer pointing to it to NULL. This function can only be used if s was opened by avio_open(). + /// 0 on success, an AVERROR < 0 on error. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_closep(AVIOContext** @s); + + /// Free the supplied IO context and everything associated with it. + /// Double pointer to the IO context. This function will write NULL into s. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_context_free(AVIOContext** @s); + + /// Iterate through names of available protocols. + /// A private pointer representing current protocol. It must be a pointer to NULL on first iteration and will be updated by successive calls to avio_enum_protocols. + /// If set to 1, iterate over output protocols, otherwise over input protocols. + /// A static string containing the name of current protocol or NULL + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avio_enum_protocols(void** @opaque, int @output); + + /// Similar to feof() but also returns nonzero on read errors. + /// non zero if and only if at end of file or a read error happened when reading. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_feof(AVIOContext* @s); + + /// Return the name of the protocol that will handle the passed URL. + /// Name of the protocol or NULL. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avio_find_protocol_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url); + + /// Force flushing of buffered data. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_flush(AVIOContext* @s); + + /// Free entry allocated by avio_read_dir(). + /// entry to be freed. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_free_directory_entry(AVIODirEntry** @entry); + + /// Return the written size and a pointer to the buffer. The AVIOContext stream is left intact. The buffer must NOT be freed. No padding is added to the buffer. + /// IO context + /// pointer to a byte buffer + /// the length of the byte buffer + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_get_dyn_buf(AVIOContext* @s, byte** @pbuffer); + + /// Read a string from pb into buf. The reading will terminate when either a NULL character was encountered, maxlen bytes have been read, or nothing more can be read from pb. The result is guaranteed to be NULL-terminated, it will be truncated if buf is too small. Note that the string is not interpreted or validated in any way, it might get truncated in the middle of a sequence for multi-byte encodings. + /// number of bytes read (is always < = maxlen). If reading ends on EOF or error, the return value will be one more than bytes actually read. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_get_str(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_get_str16be(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen); + + /// Read a UTF-16 string from pb and convert it to UTF-8. The reading will terminate when either a null or invalid character was encountered or maxlen bytes have been read. + /// number of bytes read (is always < = maxlen) + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_get_str16le(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen); + + /// Perform one step of the protocol handshake to accept a new client. This function must be called on a client returned by avio_accept() before using it as a read/write context. It is separate from avio_accept() because it may block. A step of the handshake is defined by places where the application may decide to change the proceedings. For example, on a protocol with a request header and a reply header, each one can constitute a step because the application may use the parameters from the request to change parameters in the reply; or each individual chunk of the request can constitute a step. If the handshake is already finished, avio_handshake() does nothing and returns 0 immediately. + /// the client context to perform the handshake on + /// 0 on a complete and successful handshake > 0 if the handshake progressed, but is not complete < 0 for an AVERROR code + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_handshake(AVIOContext* @c); + + /// Create and initialize a AVIOContext for accessing the resource indicated by url. + /// Used to return the pointer to the created AVIOContext. In case of failure the pointed to value is set to NULL. + /// resource to access + /// flags which control how the resource indicated by url is to be opened + /// >= 0 in case of success, a negative value corresponding to an AVERROR code in case of failure + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_open(AVIOContext** @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, int @flags); + + /// Open directory for reading. + /// directory read context. Pointer to a NULL pointer must be passed. + /// directory to be listed. + /// A dictionary filled with protocol-private options. On return this parameter will be destroyed and replaced with a dictionary containing options that were not found. May be NULL. + /// >=0 on success or negative on error. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_open_dir(AVIODirContext** @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, AVDictionary** @options); + + /// Open a write only memory stream. + /// new IO context + /// zero if no error. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_open_dyn_buf(AVIOContext** @s); + + /// Create and initialize a AVIOContext for accessing the resource indicated by url. + /// Used to return the pointer to the created AVIOContext. In case of failure the pointed to value is set to NULL. + /// resource to access + /// flags which control how the resource indicated by url is to be opened + /// an interrupt callback to be used at the protocols level + /// A dictionary filled with protocol-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. + /// >= 0 in case of success, a negative value corresponding to an AVERROR code in case of failure + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_open2(AVIOContext** @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, int @flags, AVIOInterruptCB* @int_cb, AVDictionary** @options); + + /// Pause and resume playing - only meaningful if using a network streaming protocol (e.g. MMS). + /// IO context from which to call the read_pause function pointer + /// 1 for pause, 0 for resume + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_pause(AVIOContext* @h, int @pause); + + /// Write a NULL terminated array of strings to the context. Usually you don't need to use this function directly but its macro wrapper, avio_print. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_print_string_array(AVIOContext* @s, byte*[] @strings); + + /// Writes a formatted string to the context. + /// number of bytes written, < 0 on error. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_printf(AVIOContext* @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt); + + /// Get AVClass by names of available protocols. + /// A AVClass of input protocol name or NULL + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* avio_protocol_get_class( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Write a NULL-terminated string. + /// number of bytes written. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_put_str(AVIOContext* @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str); + + /// Convert an UTF-8 string to UTF-16BE and write it. + /// the AVIOContext + /// NULL-terminated UTF-8 string + /// number of bytes written. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_put_str16be(AVIOContext* @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str); + + /// Convert an UTF-8 string to UTF-16LE and write it. + /// the AVIOContext + /// NULL-terminated UTF-8 string + /// number of bytes written. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_put_str16le(AVIOContext* @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str); + + /// @{ + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_r8(AVIOContext* @s); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avio_rb16(AVIOContext* @s); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avio_rb24(AVIOContext* @s); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avio_rb32(AVIOContext* @s); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern ulong avio_rb64(AVIOContext* @s); + + /// Read size bytes from AVIOContext into buf. + /// number of bytes read or AVERROR + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_read(AVIOContext* @s, byte* @buf, int @size); + + /// Get next directory entry. + /// directory read context. + /// next entry or NULL when no more entries. + /// >=0 on success or negative on error. End of list is not considered an error. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_read_dir(AVIODirContext* @s, AVIODirEntry** @next); + + /// Read size bytes from AVIOContext into buf. Unlike avio_read(), this is allowed to read fewer bytes than requested. The missing bytes can be read in the next call. This always tries to read at least 1 byte. Useful to reduce latency in certain cases. + /// number of bytes read or AVERROR + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_read_partial(AVIOContext* @s, byte* @buf, int @size); + + /// Read contents of h into print buffer, up to max_size bytes, or up to EOF. + /// 0 for success (max_size bytes read or EOF reached), negative error code otherwise + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_read_to_bprint(AVIOContext* @h, AVBPrint* @pb, ulong @max_size); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avio_rl16(AVIOContext* @s); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avio_rl24(AVIOContext* @s); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avio_rl32(AVIOContext* @s); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern ulong avio_rl64(AVIOContext* @s); + + /// fseek() equivalent for AVIOContext. + /// new position or AVERROR. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern long avio_seek(AVIOContext* @s, long @offset, int @whence); + + /// Seek to a given timestamp relative to some component stream. Only meaningful if using a network streaming protocol (e.g. MMS.). + /// IO context from which to call the seek function pointers + /// The stream index that the timestamp is relative to. If stream_index is (-1) the timestamp should be in AV_TIME_BASE units from the beginning of the presentation. If a stream_index >= 0 is used and the protocol does not support seeking based on component streams, the call will fail. + /// timestamp in AVStream.time_base units or if there is no stream specified then in AV_TIME_BASE units. + /// Optional combination of AVSEEK_FLAG_BACKWARD, AVSEEK_FLAG_BYTE and AVSEEK_FLAG_ANY. The protocol may silently ignore AVSEEK_FLAG_BACKWARD and AVSEEK_FLAG_ANY, but AVSEEK_FLAG_BYTE will fail if used and not supported. + /// >= 0 on success + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern long avio_seek_time(AVIOContext* @h, int @stream_index, long @timestamp, int @flags); + + /// Get the filesize. + /// filesize or AVERROR + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern long avio_size(AVIOContext* @s); + + /// Skip given number of bytes forward + /// new position or AVERROR. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern long avio_skip(AVIOContext* @s, long @offset); + + /// Writes a formatted string to the context taking a va_list. + /// number of bytes written, < 0 on error. + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_vprintf(AVIOContext* @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt, byte* @ap); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_w8(AVIOContext* @s, int @b); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_wb16(AVIOContext* @s, uint @val); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_wb24(AVIOContext* @s, uint @val); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_wb32(AVIOContext* @s, uint @val); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_wb64(AVIOContext* @s, ulong @val); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_wl16(AVIOContext* @s, uint @val); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_wl24(AVIOContext* @s, uint @val); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_wl32(AVIOContext* @s, uint @val); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_wl64(AVIOContext* @s, ulong @val); + + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_write(AVIOContext* @s, byte* @buf, int @size); + + /// Mark the written bytestream as a specific type. + /// the stream time the current bytestream pos corresponds to (in AV_TIME_BASE units), or AV_NOPTS_VALUE if unknown or not applicable + /// the kind of data written starting at the current pos + [DllImport("avformat-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_write_marker(AVIOContext* @s, long @time, AVIODataMarkerType @type); + + /// Free all allocated data in the given subtitle struct. + /// AVSubtitle to free. + [DllImport("avcodec-59", CallingConvention = CallingConvention.Cdecl)] + public static extern void avsubtitle_free(AVSubtitle* @sub); + + /// Return the libavutil build-time configuration. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avutil_configuration(); + + /// Return the libavutil license. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avutil_license(); + + /// Return the LIBAVUTIL_VERSION_INT constant. + [DllImport("avutil-57", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avutil_version(); + + /// Return the libpostproc build-time configuration. + [DllImport("postproc-56", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string postproc_configuration(); + + /// Return the libpostproc license. + [DllImport("postproc-56", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string postproc_license(); + + /// Return the LIBPOSTPROC_VERSION_INT constant. + [DllImport("postproc-56", CallingConvention = CallingConvention.Cdecl)] + public static extern uint postproc_version(); + + [DllImport("postproc-56", CallingConvention = CallingConvention.Cdecl)] + public static extern void pp_free_context(void* @ppContext); + + [DllImport("postproc-56", CallingConvention = CallingConvention.Cdecl)] + public static extern void pp_free_mode(void* @mode); + + [DllImport("postproc-56", CallingConvention = CallingConvention.Cdecl)] + public static extern void* pp_get_context(int @width, int @height, int @flags); + + /// Return a pp_mode or NULL if an error occurred. + /// the string after "-pp" on the command line + /// a number from 0 to PP_QUALITY_MAX + [DllImport("postproc-56", CallingConvention = CallingConvention.Cdecl)] + public static extern void* pp_get_mode_by_name_and_quality( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @quality); + + [DllImport("postproc-56", CallingConvention = CallingConvention.Cdecl)] + public static extern void pp_postprocess(in byte_ptr3 @src, in int3 @srcStride, ref byte_ptr3 @dst, in int3 @dstStride, int @horizontalSize, int @verticalSize, sbyte* @QP_store, int @QP_stride, void* @mode, void* @ppContext, int @pict_type); + + /// Allocate SwrContext. + /// NULL on error, allocated context otherwise + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern SwrContext* swr_alloc(); + + /// Allocate SwrContext if needed and set/reset common parameters. + /// existing Swr context if available, or NULL if not + /// output channel layout (AV_CH_LAYOUT_*) + /// output sample format (AV_SAMPLE_FMT_*). + /// output sample rate (frequency in Hz) + /// input channel layout (AV_CH_LAYOUT_*) + /// input sample format (AV_SAMPLE_FMT_*). + /// input sample rate (frequency in Hz) + /// logging level offset + /// parent logging context, can be NULL + /// NULL on error, allocated context otherwise + [Obsolete("use ")] + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern SwrContext* swr_alloc_set_opts(SwrContext* @s, long @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, long @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx); + + /// Allocate SwrContext if needed and set/reset common parameters. + /// Pointer to an existing Swr context if available, or to NULL if not. On success, *ps will be set to the allocated context. + /// output channel layout (e.g. AV_CHANNEL_LAYOUT_*) + /// output sample format (AV_SAMPLE_FMT_*). + /// output sample rate (frequency in Hz) + /// input channel layout (e.g. AV_CHANNEL_LAYOUT_*) + /// input sample format (AV_SAMPLE_FMT_*). + /// input sample rate (frequency in Hz) + /// logging level offset + /// parent logging context, can be NULL + /// 0 on success, a negative AVERROR code on error. On error, the Swr context is freed and *ps set to NULL. + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_alloc_set_opts2(SwrContext** @ps, AVChannelLayout* @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, AVChannelLayout* @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx); + + /// Generate a channel mixing matrix. + /// input channel layout + /// output channel layout + /// mix level for the center channel + /// mix level for the surround channel(s) + /// mix level for the low-frequency effects channel + /// if 1.0, coefficients will be normalized to prevent overflow. if INT_MAX, coefficients will not be normalized. + /// mixing coefficients; matrix[i + stride * o] is the weight of input channel i in output channel o. + /// distance between adjacent input channels in the matrix array + /// matrixed stereo downmix mode (e.g. dplii) + /// parent logging context, can be NULL + /// 0 on success, negative AVERROR code on failure + [Obsolete("use ")] + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_build_matrix(ulong @in_layout, ulong @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @rematrix_maxval, double @rematrix_volume, double* @matrix, int @stride, AVMatrixEncoding @matrix_encoding, void* @log_ctx); + + /// Generate a channel mixing matrix. + /// input channel layout + /// output channel layout + /// mix level for the center channel + /// mix level for the surround channel(s) + /// mix level for the low-frequency effects channel + /// mixing coefficients; matrix[i + stride * o] is the weight of input channel i in output channel o. + /// distance between adjacent input channels in the matrix array + /// matrixed stereo downmix mode (e.g. dplii) + /// 0 on success, negative AVERROR code on failure + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_build_matrix2(AVChannelLayout* @in_layout, AVChannelLayout* @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @maxval, double @rematrix_volume, double* @matrix, long @stride, AVMatrixEncoding @matrix_encoding, void* @log_context); + + /// Closes the context so that swr_is_initialized() returns 0. + /// Swr context to be closed + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern void swr_close(SwrContext* @s); + + /// Configure or reconfigure the SwrContext using the information provided by the AVFrames. + /// audio resample context + /// 0 on success, AVERROR on failure. + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_config_frame(SwrContext* @swr, AVFrame* @out, AVFrame* @in); + + /// Convert audio. + /// allocated Swr context, with parameters set + /// output buffers, only the first one need be set in case of packed audio + /// amount of space available for output in samples per channel + /// input buffers, only the first one need to be set in case of packed audio + /// number of input samples available in one channel + /// number of samples output per channel, negative value on error + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_convert(SwrContext* @s, byte** @out, int @out_count, byte** @in, int @in_count); + + /// Convert the samples in the input AVFrame and write them to the output AVFrame. + /// audio resample context + /// output AVFrame + /// input AVFrame + /// 0 on success, AVERROR on failure or nonmatching configuration. + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_convert_frame(SwrContext* @swr, AVFrame* @output, AVFrame* @input); + + /// Drops the specified number of output samples. + /// allocated Swr context + /// number of samples to be dropped + /// >= 0 on success, or a negative AVERROR code on failure + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_drop_output(SwrContext* @s, int @count); + + /// Free the given SwrContext and set the pointer to NULL. + /// a pointer to a pointer to Swr context + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern void swr_free(SwrContext** @s); + + /// Get the AVClass for SwrContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + /// the AVClass of SwrContext + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* swr_get_class(); + + /// Gets the delay the next input sample will experience relative to the next output sample. + /// swr context + /// timebase in which the returned delay will be: + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern long swr_get_delay(SwrContext* @s, long @base); + + /// Find an upper bound on the number of samples that the next swr_convert call will output, if called with in_samples of input samples. This depends on the internal state, and anything changing the internal state (like further swr_convert() calls) will may change the number of samples swr_get_out_samples() returns for the same number of input samples. + /// number of input samples. + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_get_out_samples(SwrContext* @s, int @in_samples); + + /// Initialize context after user parameters have been set. + /// Swr context to initialize + /// AVERROR error code in case of failure. + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_init(SwrContext* @s); + + /// Injects the specified number of silence samples. + /// allocated Swr context + /// number of samples to be dropped + /// >= 0 on success, or a negative AVERROR code on failure + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_inject_silence(SwrContext* @s, int @count); + + /// Check whether an swr context has been initialized or not. + /// Swr context to check + /// positive if it has been initialized, 0 if not initialized + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_is_initialized(SwrContext* @s); + + /// Convert the next timestamp from input to output timestamps are in 1/(in_sample_rate * out_sample_rate) units. + /// the output timestamp for the next output sample + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern long swr_next_pts(SwrContext* @s, long @pts); + + /// Set a customized input channel mapping. + /// allocated Swr context, not yet initialized + /// customized input channel mapping (array of channel indexes, -1 for a muted channel) + /// >= 0 on success, or AVERROR error code in case of failure. + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_set_channel_mapping(SwrContext* @s, int* @channel_map); + + /// Activate resampling compensation ("soft" compensation). This function is internally called when needed in swr_next_pts(). + /// allocated Swr context. If it is not initialized, or SWR_FLAG_RESAMPLE is not set, swr_init() is called with the flag set. + /// delta in PTS per sample + /// number of samples to compensate for + /// >= 0 on success, AVERROR error codes if: + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_set_compensation(SwrContext* @s, int @sample_delta, int @compensation_distance); + + /// Set a customized remix matrix. + /// allocated Swr context, not yet initialized + /// remix coefficients; matrix[i + stride * o] is the weight of input channel i in output channel o + /// offset between lines of the matrix + /// >= 0 on success, or AVERROR error code in case of failure. + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_set_matrix(SwrContext* @s, double* @matrix, int @stride); + + /// Return the swr build-time configuration. + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string swresample_configuration(); + + /// Return the swr license. + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string swresample_license(); + + /// Return the LIBSWRESAMPLE_VERSION_INT constant. + [DllImport("swresample-4", CallingConvention = CallingConvention.Cdecl)] + public static extern uint swresample_version(); + + /// Allocate an empty SwsContext. This must be filled and passed to sws_init_context(). For filling see AVOptions, options.c and sws_setColorspaceDetails(). + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern SwsContext* sws_alloc_context(); + + /// Allocate and return an uninitialized vector with length coefficients. + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern SwsVector* sws_allocVec(int @length); + + /// Convert an 8-bit paletted frame into a frame with a color depth of 24 bits. + /// source frame buffer + /// destination frame buffer + /// number of pixels to convert + /// array with [256] entries, which must match color arrangement (RGB or BGR) of src + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern void sws_convertPalette8ToPacked24(byte* @src, byte* @dst, int @num_pixels, byte* @palette); + + /// Convert an 8-bit paletted frame into a frame with a color depth of 32 bits. + /// source frame buffer + /// destination frame buffer + /// number of pixels to convert + /// array with [256] entries, which must match color arrangement (RGB or BGR) of src + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern void sws_convertPalette8ToPacked32(byte* @src, byte* @dst, int @num_pixels, byte* @palette); + + /// Finish the scaling process for a pair of source/destination frames previously submitted with sws_frame_start(). Must be called after all sws_send_slice() and sws_receive_slice() calls are done, before any new sws_frame_start() calls. + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern void sws_frame_end(SwsContext* @c); + + /// Initialize the scaling process for a given pair of source/destination frames. Must be called before any calls to sws_send_slice() and sws_receive_slice(). + /// The destination frame. + /// The source frame. The data buffers must be allocated, but the frame data does not have to be ready at this point. Data availability is then signalled by sws_send_slice(). + /// 0 on success, a negative AVERROR code on failure + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_frame_start(SwsContext* @c, AVFrame* @dst, AVFrame* @src); + + /// Free the swscaler context swsContext. If swsContext is NULL, then does nothing. + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern void sws_freeContext(SwsContext* @swsContext); + + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern void sws_freeFilter(SwsFilter* @filter); + + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern void sws_freeVec(SwsVector* @a); + + /// Get the AVClass for swsContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* sws_get_class(); + + /// Check if context can be reused, otherwise reallocate a new one. + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern SwsContext* sws_getCachedContext(SwsContext* @context, int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param); + + /// Return a pointer to yuv<->rgb coefficients for the given colorspace suitable for sws_setColorspaceDetails(). + /// One of the SWS_CS_* macros. If invalid, SWS_CS_DEFAULT is used. + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern int* sws_getCoefficients(int @colorspace); + + /// #if LIBSWSCALE_VERSION_MAJOR > 6 + /// negative error code on error, non negative otherwise #else + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_getColorspaceDetails(SwsContext* @c, int** @inv_table, int* @srcRange, int** @table, int* @dstRange, int* @brightness, int* @contrast, int* @saturation); + + /// Allocate and return an SwsContext. You need it to perform scaling/conversion operations using sws_scale(). + /// the width of the source image + /// the height of the source image + /// the source image format + /// the width of the destination image + /// the height of the destination image + /// the destination image format + /// specify which algorithm and options to use for rescaling + /// extra parameters to tune the used scaler For SWS_BICUBIC param[0] and [1] tune the shape of the basis function, param[0] tunes f(1) and param[1] f´(1) For SWS_GAUSS param[0] tunes the exponent and thus cutoff frequency For SWS_LANCZOS param[0] tunes the width of the window function + /// a pointer to an allocated context, or NULL in case of error + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern SwsContext* sws_getContext(int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param); + + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern SwsFilter* sws_getDefaultFilter(float @lumaGBlur, float @chromaGBlur, float @lumaSharpen, float @chromaSharpen, float @chromaHShift, float @chromaVShift, int @verbose); + + /// Return a normalized Gaussian curve used to filter stuff quality = 3 is high quality, lower is lower quality. + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern SwsVector* sws_getGaussianVec(double @variance, double @quality); + + /// Initialize the swscaler context sws_context. + /// zero or positive value on success, a negative value on error + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_init_context(SwsContext* @sws_context, SwsFilter* @srcFilter, SwsFilter* @dstFilter); + + /// Returns a positive value if an endianness conversion for pix_fmt is supported, 0 otherwise. + /// the pixel format + /// a positive value if an endianness conversion for pix_fmt is supported, 0 otherwise. + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_isSupportedEndiannessConversion(AVPixelFormat @pix_fmt); + + /// Return a positive value if pix_fmt is a supported input format, 0 otherwise. + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_isSupportedInput(AVPixelFormat @pix_fmt); + + /// Return a positive value if pix_fmt is a supported output format, 0 otherwise. + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_isSupportedOutput(AVPixelFormat @pix_fmt); + + /// Scale all the coefficients of a so that their sum equals height. + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern void sws_normalizeVec(SwsVector* @a, double @height); + + /// Request a horizontal slice of the output data to be written into the frame previously provided to sws_frame_start(). + /// first row of the slice; must be a multiple of sws_receive_slice_alignment() + /// number of rows in the slice; must be a multiple of sws_receive_slice_alignment(), except for the last slice (i.e. when slice_start+slice_height is equal to output frame height) + /// a non-negative number if the data was successfully written into the output AVERROR(EAGAIN) if more input data needs to be provided before the output can be produced another negative AVERROR code on other kinds of scaling failure + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_receive_slice(SwsContext* @c, uint @slice_start, uint @slice_height); + + /// Returns alignment required for output slices requested with sws_receive_slice(). Slice offsets and sizes passed to sws_receive_slice() must be multiples of the value returned from this function. + /// alignment required for output slices requested with sws_receive_slice(). Slice offsets and sizes passed to sws_receive_slice() must be multiples of the value returned from this function. + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern uint sws_receive_slice_alignment(SwsContext* @c); + + /// Scale the image slice in srcSlice and put the resulting scaled slice in the image in dst. A slice is a sequence of consecutive rows in an image. + /// the scaling context previously created with sws_getContext() + /// the array containing the pointers to the planes of the source slice + /// the array containing the strides for each plane of the source image + /// the position in the source image of the slice to process, that is the number (counted starting from zero) in the image of the first row of the slice + /// the height of the source slice, that is the number of rows in the slice + /// the array containing the pointers to the planes of the destination image + /// the array containing the strides for each plane of the destination image + /// the height of the output slice + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_scale(SwsContext* @c, byte*[] @srcSlice, int[] @srcStride, int @srcSliceY, int @srcSliceH, byte*[] @dst, int[] @dstStride); + + /// Scale source data from src and write the output to dst. + /// The destination frame. See documentation for sws_frame_start() for more details. + /// The source frame. + /// 0 on success, a negative AVERROR code on failure + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_scale_frame(SwsContext* @c, AVFrame* @dst, AVFrame* @src); + + /// Scale all the coefficients of a by the scalar value. + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern void sws_scaleVec(SwsVector* @a, double @scalar); + + /// Indicate that a horizontal slice of input data is available in the source frame previously provided to sws_frame_start(). The slices may be provided in any order, but may not overlap. For vertically subsampled pixel formats, the slices must be aligned according to subsampling. + /// first row of the slice + /// number of rows in the slice + /// a non-negative number on success, a negative AVERROR code on failure. + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_send_slice(SwsContext* @c, uint @slice_start, uint @slice_height); + + /// Returns negative error code on error, non negative otherwise #else Returns -1 if not supported #endif + /// the yuv2rgb coefficients describing the input yuv space, normally ff_yuv2rgb_coeffs[x] + /// flag indicating the while-black range of the input (1=jpeg / 0=mpeg) + /// the yuv2rgb coefficients describing the output yuv space, normally ff_yuv2rgb_coeffs[x] + /// flag indicating the while-black range of the output (1=jpeg / 0=mpeg) + /// 16.16 fixed point brightness correction + /// 16.16 fixed point contrast correction + /// 16.16 fixed point saturation correction #if LIBSWSCALE_VERSION_MAJOR > 6 + /// negative error code on error, non negative otherwise #else + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_setColorspaceDetails(SwsContext* @c, in int4 @inv_table, int @srcRange, in int4 @table, int @dstRange, int @brightness, int @contrast, int @saturation); + + /// Return the libswscale build-time configuration. + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string swscale_configuration(); + + /// Return the libswscale license. + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string swscale_license(); + + /// Color conversion and scaling library. + [DllImport("swscale-6", CallingConvention = CallingConvention.Cdecl)] + public static extern uint swscale_version(); + + public unsafe static void Initialize() + { + vectors.av_abuffersink_params_alloc = av_abuffersink_params_alloc; + vectors.av_add_index_entry = av_add_index_entry; + vectors.av_add_q = av_add_q; + vectors.av_add_stable = av_add_stable; + vectors.av_append_packet = av_append_packet; + vectors.av_audio_fifo_alloc = av_audio_fifo_alloc; + vectors.av_audio_fifo_drain = av_audio_fifo_drain; + vectors.av_audio_fifo_free = av_audio_fifo_free; + vectors.av_audio_fifo_peek = av_audio_fifo_peek; + vectors.av_audio_fifo_peek_at = av_audio_fifo_peek_at; + vectors.av_audio_fifo_read = av_audio_fifo_read; + vectors.av_audio_fifo_realloc = av_audio_fifo_realloc; + vectors.av_audio_fifo_reset = av_audio_fifo_reset; + vectors.av_audio_fifo_size = av_audio_fifo_size; + vectors.av_audio_fifo_space = av_audio_fifo_space; + vectors.av_audio_fifo_write = av_audio_fifo_write; + vectors.av_bprint_channel_layout = av_bprint_channel_layout; + vectors.av_bsf_alloc = av_bsf_alloc; + vectors.av_bsf_flush = av_bsf_flush; + vectors.av_bsf_free = av_bsf_free; + vectors.av_bsf_get_by_name = av_bsf_get_by_name; + vectors.av_bsf_get_class = av_bsf_get_class; + vectors.av_bsf_get_null_filter = av_bsf_get_null_filter; + vectors.av_bsf_init = av_bsf_init; + vectors.av_bsf_iterate = av_bsf_iterate; + vectors.av_bsf_list_alloc = av_bsf_list_alloc; + vectors.av_bsf_list_append = av_bsf_list_append; + vectors.av_bsf_list_append2 = av_bsf_list_append2; + vectors.av_bsf_list_finalize = av_bsf_list_finalize; + vectors.av_bsf_list_free = av_bsf_list_free; + vectors.av_bsf_list_parse_str = av_bsf_list_parse_str; + vectors.av_bsf_receive_packet = av_bsf_receive_packet; + vectors.av_bsf_send_packet = av_bsf_send_packet; + vectors.av_buffer_alloc = av_buffer_alloc; + vectors.av_buffer_allocz = av_buffer_allocz; + vectors.av_buffer_create = av_buffer_create; + vectors.av_buffer_default_free = av_buffer_default_free; + vectors.av_buffer_get_opaque = av_buffer_get_opaque; + vectors.av_buffer_get_ref_count = av_buffer_get_ref_count; + vectors.av_buffer_is_writable = av_buffer_is_writable; + vectors.av_buffer_make_writable = av_buffer_make_writable; + vectors.av_buffer_pool_buffer_get_opaque = av_buffer_pool_buffer_get_opaque; + vectors.av_buffer_pool_get = av_buffer_pool_get; + vectors.av_buffer_pool_init = av_buffer_pool_init; + vectors.av_buffer_pool_init2 = av_buffer_pool_init2; + vectors.av_buffer_pool_uninit = av_buffer_pool_uninit; + vectors.av_buffer_realloc = av_buffer_realloc; + vectors.av_buffer_ref = av_buffer_ref; + vectors.av_buffer_replace = av_buffer_replace; + vectors.av_buffer_unref = av_buffer_unref; + vectors.av_buffersink_get_ch_layout = av_buffersink_get_ch_layout; + vectors.av_buffersink_get_channel_layout = av_buffersink_get_channel_layout; + vectors.av_buffersink_get_channels = av_buffersink_get_channels; + vectors.av_buffersink_get_format = av_buffersink_get_format; + vectors.av_buffersink_get_frame = av_buffersink_get_frame; + vectors.av_buffersink_get_frame_flags = av_buffersink_get_frame_flags; + vectors.av_buffersink_get_frame_rate = av_buffersink_get_frame_rate; + vectors.av_buffersink_get_h = av_buffersink_get_h; + vectors.av_buffersink_get_hw_frames_ctx = av_buffersink_get_hw_frames_ctx; + vectors.av_buffersink_get_sample_aspect_ratio = av_buffersink_get_sample_aspect_ratio; + vectors.av_buffersink_get_sample_rate = av_buffersink_get_sample_rate; + vectors.av_buffersink_get_samples = av_buffersink_get_samples; + vectors.av_buffersink_get_time_base = av_buffersink_get_time_base; + vectors.av_buffersink_get_type = av_buffersink_get_type; + vectors.av_buffersink_get_w = av_buffersink_get_w; + vectors.av_buffersink_params_alloc = av_buffersink_params_alloc; + vectors.av_buffersink_set_frame_size = av_buffersink_set_frame_size; + vectors.av_buffersrc_add_frame = av_buffersrc_add_frame; + vectors.av_buffersrc_add_frame_flags = av_buffersrc_add_frame_flags; + vectors.av_buffersrc_close = av_buffersrc_close; + vectors.av_buffersrc_get_nb_failed_requests = av_buffersrc_get_nb_failed_requests; + vectors.av_buffersrc_parameters_alloc = av_buffersrc_parameters_alloc; + vectors.av_buffersrc_parameters_set = av_buffersrc_parameters_set; + vectors.av_buffersrc_write_frame = av_buffersrc_write_frame; + vectors.av_calloc = av_calloc; + vectors.av_channel_description = av_channel_description; + vectors.av_channel_description_bprint = av_channel_description_bprint; + vectors.av_channel_from_string = av_channel_from_string; + vectors.av_channel_layout_channel_from_index = av_channel_layout_channel_from_index; + vectors.av_channel_layout_channel_from_string = av_channel_layout_channel_from_string; + vectors.av_channel_layout_check = av_channel_layout_check; + vectors.av_channel_layout_compare = av_channel_layout_compare; + vectors.av_channel_layout_copy = av_channel_layout_copy; + vectors.av_channel_layout_default = av_channel_layout_default; + vectors.av_channel_layout_describe = av_channel_layout_describe; + vectors.av_channel_layout_describe_bprint = av_channel_layout_describe_bprint; + vectors.av_channel_layout_extract_channel = av_channel_layout_extract_channel; + vectors.av_channel_layout_from_mask = av_channel_layout_from_mask; + vectors.av_channel_layout_from_string = av_channel_layout_from_string; + vectors.av_channel_layout_index_from_channel = av_channel_layout_index_from_channel; + vectors.av_channel_layout_index_from_string = av_channel_layout_index_from_string; + vectors.av_channel_layout_standard = av_channel_layout_standard; + vectors.av_channel_layout_subset = av_channel_layout_subset; + vectors.av_channel_layout_uninit = av_channel_layout_uninit; + vectors.av_channel_name = av_channel_name; + vectors.av_channel_name_bprint = av_channel_name_bprint; + vectors.av_chroma_location_from_name = av_chroma_location_from_name; + vectors.av_chroma_location_name = av_chroma_location_name; + vectors.av_codec_get_id = av_codec_get_id; + vectors.av_codec_get_tag = av_codec_get_tag; + vectors.av_codec_get_tag2 = av_codec_get_tag2; + vectors.av_codec_is_decoder = av_codec_is_decoder; + vectors.av_codec_is_encoder = av_codec_is_encoder; + vectors.av_codec_iterate = av_codec_iterate; + vectors.av_color_primaries_from_name = av_color_primaries_from_name; + vectors.av_color_primaries_name = av_color_primaries_name; + vectors.av_color_range_from_name = av_color_range_from_name; + vectors.av_color_range_name = av_color_range_name; + vectors.av_color_space_from_name = av_color_space_from_name; + vectors.av_color_space_name = av_color_space_name; + vectors.av_color_transfer_from_name = av_color_transfer_from_name; + vectors.av_color_transfer_name = av_color_transfer_name; + vectors.av_compare_mod = av_compare_mod; + vectors.av_compare_ts = av_compare_ts; + vectors.av_content_light_metadata_alloc = av_content_light_metadata_alloc; + vectors.av_content_light_metadata_create_side_data = av_content_light_metadata_create_side_data; + vectors.av_cpb_properties_alloc = av_cpb_properties_alloc; + vectors.av_cpu_count = av_cpu_count; + vectors.av_cpu_force_count = av_cpu_force_count; + vectors.av_cpu_max_align = av_cpu_max_align; + vectors.av_d2q = av_d2q; + vectors.av_d3d11va_alloc_context = av_d3d11va_alloc_context; + vectors.av_default_get_category = av_default_get_category; + vectors.av_default_item_name = av_default_item_name; + vectors.av_demuxer_iterate = av_demuxer_iterate; + vectors.av_dict_copy = av_dict_copy; + vectors.av_dict_count = av_dict_count; + vectors.av_dict_free = av_dict_free; + vectors.av_dict_get = av_dict_get; + vectors.av_dict_get_string = av_dict_get_string; + vectors.av_dict_parse_string = av_dict_parse_string; + vectors.av_dict_set = av_dict_set; + vectors.av_dict_set_int = av_dict_set_int; + vectors.av_disposition_from_string = av_disposition_from_string; + vectors.av_disposition_to_string = av_disposition_to_string; + vectors.av_div_q = av_div_q; + vectors.av_dump_format = av_dump_format; + vectors.av_dynamic_hdr_plus_alloc = av_dynamic_hdr_plus_alloc; + vectors.av_dynamic_hdr_plus_create_side_data = av_dynamic_hdr_plus_create_side_data; + vectors.av_dynarray_add = av_dynarray_add; + vectors.av_dynarray_add_nofree = av_dynarray_add_nofree; + vectors.av_dynarray2_add = av_dynarray2_add; + vectors.av_fast_malloc = av_fast_malloc; + vectors.av_fast_mallocz = av_fast_mallocz; + vectors.av_fast_padded_malloc = av_fast_padded_malloc; + vectors.av_fast_padded_mallocz = av_fast_padded_mallocz; + vectors.av_fast_realloc = av_fast_realloc; + vectors.av_file_map = av_file_map; + vectors.av_file_unmap = av_file_unmap; + vectors.av_filename_number_test = av_filename_number_test; + vectors.av_filter_iterate = av_filter_iterate; + vectors.av_find_best_pix_fmt_of_2 = av_find_best_pix_fmt_of_2; + vectors.av_find_best_stream = av_find_best_stream; + vectors.av_find_default_stream_index = av_find_default_stream_index; + vectors.av_find_input_format = av_find_input_format; + vectors.av_find_nearest_q_idx = av_find_nearest_q_idx; + vectors.av_find_program_from_stream = av_find_program_from_stream; + vectors.av_fmt_ctx_get_duration_estimation_method = av_fmt_ctx_get_duration_estimation_method; + vectors.av_fopen_utf8 = av_fopen_utf8; + vectors.av_force_cpu_flags = av_force_cpu_flags; + vectors.av_format_inject_global_side_data = av_format_inject_global_side_data; + vectors.av_fourcc_make_string = av_fourcc_make_string; + vectors.av_frame_alloc = av_frame_alloc; + vectors.av_frame_apply_cropping = av_frame_apply_cropping; + vectors.av_frame_clone = av_frame_clone; + vectors.av_frame_copy = av_frame_copy; + vectors.av_frame_copy_props = av_frame_copy_props; + vectors.av_frame_free = av_frame_free; + vectors.av_frame_get_buffer = av_frame_get_buffer; + vectors.av_frame_get_plane_buffer = av_frame_get_plane_buffer; + vectors.av_frame_get_side_data = av_frame_get_side_data; + vectors.av_frame_is_writable = av_frame_is_writable; + vectors.av_frame_make_writable = av_frame_make_writable; + vectors.av_frame_move_ref = av_frame_move_ref; + vectors.av_frame_new_side_data = av_frame_new_side_data; + vectors.av_frame_new_side_data_from_buf = av_frame_new_side_data_from_buf; + vectors.av_frame_ref = av_frame_ref; + vectors.av_frame_remove_side_data = av_frame_remove_side_data; + vectors.av_frame_side_data_name = av_frame_side_data_name; + vectors.av_frame_unref = av_frame_unref; + vectors.av_free = av_free; + vectors.av_freep = av_freep; + vectors.av_gcd = av_gcd; + vectors.av_gcd_q = av_gcd_q; + vectors.av_get_alt_sample_fmt = av_get_alt_sample_fmt; + vectors.av_get_audio_frame_duration = av_get_audio_frame_duration; + vectors.av_get_audio_frame_duration2 = av_get_audio_frame_duration2; + vectors.av_get_bits_per_pixel = av_get_bits_per_pixel; + vectors.av_get_bits_per_sample = av_get_bits_per_sample; + vectors.av_get_bytes_per_sample = av_get_bytes_per_sample; + vectors.av_get_channel_description = av_get_channel_description; + vectors.av_get_channel_layout = av_get_channel_layout; + vectors.av_get_channel_layout_channel_index = av_get_channel_layout_channel_index; + vectors.av_get_channel_layout_nb_channels = av_get_channel_layout_nb_channels; + vectors.av_get_channel_layout_string = av_get_channel_layout_string; + vectors.av_get_channel_name = av_get_channel_name; + vectors.av_get_colorspace_name = av_get_colorspace_name; + vectors.av_get_cpu_flags = av_get_cpu_flags; + vectors.av_get_default_channel_layout = av_get_default_channel_layout; + vectors.av_get_exact_bits_per_sample = av_get_exact_bits_per_sample; + vectors.av_get_extended_channel_layout = av_get_extended_channel_layout; + vectors.av_get_frame_filename = av_get_frame_filename; + vectors.av_get_frame_filename2 = av_get_frame_filename2; + vectors.av_get_media_type_string = av_get_media_type_string; + vectors.av_get_output_timestamp = av_get_output_timestamp; + vectors.av_get_packed_sample_fmt = av_get_packed_sample_fmt; + vectors.av_get_packet = av_get_packet; + vectors.av_get_padded_bits_per_pixel = av_get_padded_bits_per_pixel; + vectors.av_get_pcm_codec = av_get_pcm_codec; + vectors.av_get_picture_type_char = av_get_picture_type_char; + vectors.av_get_pix_fmt = av_get_pix_fmt; + vectors.av_get_pix_fmt_loss = av_get_pix_fmt_loss; + vectors.av_get_pix_fmt_name = av_get_pix_fmt_name; + vectors.av_get_pix_fmt_string = av_get_pix_fmt_string; + vectors.av_get_planar_sample_fmt = av_get_planar_sample_fmt; + vectors.av_get_profile_name = av_get_profile_name; + vectors.av_get_sample_fmt = av_get_sample_fmt; + vectors.av_get_sample_fmt_name = av_get_sample_fmt_name; + vectors.av_get_sample_fmt_string = av_get_sample_fmt_string; + vectors.av_get_standard_channel_layout = av_get_standard_channel_layout; + vectors.av_get_time_base_q = av_get_time_base_q; + vectors.av_gettime = av_gettime; + vectors.av_gettime_relative = av_gettime_relative; + vectors.av_gettime_relative_is_monotonic = av_gettime_relative_is_monotonic; + vectors.av_grow_packet = av_grow_packet; + vectors.av_guess_codec = av_guess_codec; + vectors.av_guess_format = av_guess_format; + vectors.av_guess_frame_rate = av_guess_frame_rate; + vectors.av_guess_sample_aspect_ratio = av_guess_sample_aspect_ratio; + vectors.av_hex_dump = av_hex_dump; + vectors.av_hex_dump_log = av_hex_dump_log; + vectors.av_hwdevice_ctx_alloc = av_hwdevice_ctx_alloc; + vectors.av_hwdevice_ctx_create = av_hwdevice_ctx_create; + vectors.av_hwdevice_ctx_create_derived = av_hwdevice_ctx_create_derived; + vectors.av_hwdevice_ctx_create_derived_opts = av_hwdevice_ctx_create_derived_opts; + vectors.av_hwdevice_ctx_init = av_hwdevice_ctx_init; + vectors.av_hwdevice_find_type_by_name = av_hwdevice_find_type_by_name; + vectors.av_hwdevice_get_hwframe_constraints = av_hwdevice_get_hwframe_constraints; + vectors.av_hwdevice_get_type_name = av_hwdevice_get_type_name; + vectors.av_hwdevice_hwconfig_alloc = av_hwdevice_hwconfig_alloc; + vectors.av_hwdevice_iterate_types = av_hwdevice_iterate_types; + vectors.av_hwframe_constraints_free = av_hwframe_constraints_free; + vectors.av_hwframe_ctx_alloc = av_hwframe_ctx_alloc; + vectors.av_hwframe_ctx_create_derived = av_hwframe_ctx_create_derived; + vectors.av_hwframe_ctx_init = av_hwframe_ctx_init; + vectors.av_hwframe_get_buffer = av_hwframe_get_buffer; + vectors.av_hwframe_map = av_hwframe_map; + vectors.av_hwframe_transfer_data = av_hwframe_transfer_data; + vectors.av_hwframe_transfer_get_formats = av_hwframe_transfer_get_formats; + vectors.av_image_alloc = av_image_alloc; + vectors.av_image_check_sar = av_image_check_sar; + vectors.av_image_check_size = av_image_check_size; + vectors.av_image_check_size2 = av_image_check_size2; + vectors.av_image_copy = av_image_copy; + vectors.av_image_copy_plane = av_image_copy_plane; + vectors.av_image_copy_plane_uc_from = av_image_copy_plane_uc_from; + vectors.av_image_copy_to_buffer = av_image_copy_to_buffer; + vectors.av_image_copy_uc_from = av_image_copy_uc_from; + vectors.av_image_fill_arrays = av_image_fill_arrays; + vectors.av_image_fill_black = av_image_fill_black; + vectors.av_image_fill_linesizes = av_image_fill_linesizes; + vectors.av_image_fill_max_pixsteps = av_image_fill_max_pixsteps; + vectors.av_image_fill_plane_sizes = av_image_fill_plane_sizes; + vectors.av_image_fill_pointers = av_image_fill_pointers; + vectors.av_image_get_buffer_size = av_image_get_buffer_size; + vectors.av_image_get_linesize = av_image_get_linesize; + vectors.av_index_search_timestamp = av_index_search_timestamp; + vectors.av_init_packet = av_init_packet; + vectors.av_input_audio_device_next = av_input_audio_device_next; + vectors.av_input_video_device_next = av_input_video_device_next; + vectors.av_int_list_length_for_size = av_int_list_length_for_size; + vectors.av_interleaved_write_frame = av_interleaved_write_frame; + vectors.av_interleaved_write_uncoded_frame = av_interleaved_write_uncoded_frame; + vectors.av_log = av_log; + vectors.av_log_default_callback = av_log_default_callback; + vectors.av_log_format_line = av_log_format_line; + vectors.av_log_format_line2 = av_log_format_line2; + vectors.av_log_get_flags = av_log_get_flags; + vectors.av_log_get_level = av_log_get_level; + vectors.av_log_once = av_log_once; + vectors.av_log_set_callback = av_log_set_callback; + vectors.av_log_set_flags = av_log_set_flags; + vectors.av_log_set_level = av_log_set_level; + vectors.av_log2 = av_log2; + vectors.av_log2_16bit = av_log2_16bit; + vectors.av_malloc = av_malloc; + vectors.av_malloc_array = av_malloc_array; + vectors.av_mallocz = av_mallocz; + vectors.av_mallocz_array = av_mallocz_array; + vectors.av_mastering_display_metadata_alloc = av_mastering_display_metadata_alloc; + vectors.av_mastering_display_metadata_create_side_data = av_mastering_display_metadata_create_side_data; + vectors.av_match_ext = av_match_ext; + vectors.av_max_alloc = av_max_alloc; + vectors.av_memcpy_backptr = av_memcpy_backptr; + vectors.av_memdup = av_memdup; + vectors.av_mul_q = av_mul_q; + vectors.av_muxer_iterate = av_muxer_iterate; + vectors.av_nearer_q = av_nearer_q; + vectors.av_new_packet = av_new_packet; + vectors.av_new_program = av_new_program; + vectors.av_opt_child_class_iterate = av_opt_child_class_iterate; + vectors.av_opt_child_next = av_opt_child_next; + vectors.av_opt_copy = av_opt_copy; + vectors.av_opt_eval_double = av_opt_eval_double; + vectors.av_opt_eval_flags = av_opt_eval_flags; + vectors.av_opt_eval_float = av_opt_eval_float; + vectors.av_opt_eval_int = av_opt_eval_int; + vectors.av_opt_eval_int64 = av_opt_eval_int64; + vectors.av_opt_eval_q = av_opt_eval_q; + vectors.av_opt_find = av_opt_find; + vectors.av_opt_find2 = av_opt_find2; + vectors.av_opt_flag_is_set = av_opt_flag_is_set; + vectors.av_opt_free = av_opt_free; + vectors.av_opt_freep_ranges = av_opt_freep_ranges; + vectors.av_opt_get = av_opt_get; + vectors.av_opt_get_channel_layout = av_opt_get_channel_layout; + vectors.av_opt_get_chlayout = av_opt_get_chlayout; + vectors.av_opt_get_dict_val = av_opt_get_dict_val; + vectors.av_opt_get_double = av_opt_get_double; + vectors.av_opt_get_image_size = av_opt_get_image_size; + vectors.av_opt_get_int = av_opt_get_int; + vectors.av_opt_get_key_value = av_opt_get_key_value; + vectors.av_opt_get_pixel_fmt = av_opt_get_pixel_fmt; + vectors.av_opt_get_q = av_opt_get_q; + vectors.av_opt_get_sample_fmt = av_opt_get_sample_fmt; + vectors.av_opt_get_video_rate = av_opt_get_video_rate; + vectors.av_opt_is_set_to_default = av_opt_is_set_to_default; + vectors.av_opt_is_set_to_default_by_name = av_opt_is_set_to_default_by_name; + vectors.av_opt_next = av_opt_next; + vectors.av_opt_ptr = av_opt_ptr; + vectors.av_opt_query_ranges = av_opt_query_ranges; + vectors.av_opt_query_ranges_default = av_opt_query_ranges_default; + vectors.av_opt_serialize = av_opt_serialize; + vectors.av_opt_set = av_opt_set; + vectors.av_opt_set_bin = av_opt_set_bin; + vectors.av_opt_set_channel_layout = av_opt_set_channel_layout; + vectors.av_opt_set_chlayout = av_opt_set_chlayout; + vectors.av_opt_set_defaults = av_opt_set_defaults; + vectors.av_opt_set_defaults2 = av_opt_set_defaults2; + vectors.av_opt_set_dict = av_opt_set_dict; + vectors.av_opt_set_dict_val = av_opt_set_dict_val; + vectors.av_opt_set_dict2 = av_opt_set_dict2; + vectors.av_opt_set_double = av_opt_set_double; + vectors.av_opt_set_from_string = av_opt_set_from_string; + vectors.av_opt_set_image_size = av_opt_set_image_size; + vectors.av_opt_set_int = av_opt_set_int; + vectors.av_opt_set_pixel_fmt = av_opt_set_pixel_fmt; + vectors.av_opt_set_q = av_opt_set_q; + vectors.av_opt_set_sample_fmt = av_opt_set_sample_fmt; + vectors.av_opt_set_video_rate = av_opt_set_video_rate; + vectors.av_opt_show2 = av_opt_show2; + vectors.av_output_audio_device_next = av_output_audio_device_next; + vectors.av_output_video_device_next = av_output_video_device_next; + vectors.av_packet_add_side_data = av_packet_add_side_data; + vectors.av_packet_alloc = av_packet_alloc; + vectors.av_packet_clone = av_packet_clone; + vectors.av_packet_copy_props = av_packet_copy_props; + vectors.av_packet_free = av_packet_free; + vectors.av_packet_free_side_data = av_packet_free_side_data; + vectors.av_packet_from_data = av_packet_from_data; + vectors.av_packet_get_side_data = av_packet_get_side_data; + vectors.av_packet_make_refcounted = av_packet_make_refcounted; + vectors.av_packet_make_writable = av_packet_make_writable; + vectors.av_packet_move_ref = av_packet_move_ref; + vectors.av_packet_new_side_data = av_packet_new_side_data; + vectors.av_packet_pack_dictionary = av_packet_pack_dictionary; + vectors.av_packet_ref = av_packet_ref; + vectors.av_packet_rescale_ts = av_packet_rescale_ts; + vectors.av_packet_shrink_side_data = av_packet_shrink_side_data; + vectors.av_packet_side_data_name = av_packet_side_data_name; + vectors.av_packet_unpack_dictionary = av_packet_unpack_dictionary; + vectors.av_packet_unref = av_packet_unref; + vectors.av_parse_cpu_caps = av_parse_cpu_caps; + vectors.av_parser_close = av_parser_close; + vectors.av_parser_init = av_parser_init; + vectors.av_parser_iterate = av_parser_iterate; + vectors.av_parser_parse2 = av_parser_parse2; + vectors.av_pix_fmt_count_planes = av_pix_fmt_count_planes; + vectors.av_pix_fmt_desc_get = av_pix_fmt_desc_get; + vectors.av_pix_fmt_desc_get_id = av_pix_fmt_desc_get_id; + vectors.av_pix_fmt_desc_next = av_pix_fmt_desc_next; + vectors.av_pix_fmt_get_chroma_sub_sample = av_pix_fmt_get_chroma_sub_sample; + vectors.av_pix_fmt_swap_endianness = av_pix_fmt_swap_endianness; + vectors.av_pkt_dump_log2 = av_pkt_dump_log2; + vectors.av_pkt_dump2 = av_pkt_dump2; + vectors.av_probe_input_buffer = av_probe_input_buffer; + vectors.av_probe_input_buffer2 = av_probe_input_buffer2; + vectors.av_probe_input_format = av_probe_input_format; + vectors.av_probe_input_format2 = av_probe_input_format2; + vectors.av_probe_input_format3 = av_probe_input_format3; + vectors.av_program_add_stream_index = av_program_add_stream_index; + vectors.av_q2intfloat = av_q2intfloat; + vectors.av_read_frame = av_read_frame; + vectors.av_read_image_line = av_read_image_line; + vectors.av_read_image_line2 = av_read_image_line2; + vectors.av_read_pause = av_read_pause; + vectors.av_read_play = av_read_play; + vectors.av_realloc = av_realloc; + vectors.av_realloc_array = av_realloc_array; + vectors.av_realloc_f = av_realloc_f; + vectors.av_reallocp = av_reallocp; + vectors.av_reallocp_array = av_reallocp_array; + vectors.av_reduce = av_reduce; + vectors.av_rescale = av_rescale; + vectors.av_rescale_delta = av_rescale_delta; + vectors.av_rescale_q = av_rescale_q; + vectors.av_rescale_q_rnd = av_rescale_q_rnd; + vectors.av_rescale_rnd = av_rescale_rnd; + vectors.av_sample_fmt_is_planar = av_sample_fmt_is_planar; + vectors.av_samples_alloc = av_samples_alloc; + vectors.av_samples_alloc_array_and_samples = av_samples_alloc_array_and_samples; + vectors.av_samples_copy = av_samples_copy; + vectors.av_samples_fill_arrays = av_samples_fill_arrays; + vectors.av_samples_get_buffer_size = av_samples_get_buffer_size; + vectors.av_samples_set_silence = av_samples_set_silence; + vectors.av_sdp_create = av_sdp_create; + vectors.av_seek_frame = av_seek_frame; + vectors.av_set_options_string = av_set_options_string; + vectors.av_shrink_packet = av_shrink_packet; + vectors.av_size_mult = av_size_mult; + vectors.av_strdup = av_strdup; + vectors.av_stream_add_side_data = av_stream_add_side_data; + vectors.av_stream_get_class = av_stream_get_class; + vectors.av_stream_get_codec_timebase = av_stream_get_codec_timebase; + vectors.av_stream_get_end_pts = av_stream_get_end_pts; + vectors.av_stream_get_parser = av_stream_get_parser; + vectors.av_stream_get_side_data = av_stream_get_side_data; + vectors.av_stream_new_side_data = av_stream_new_side_data; + vectors.av_strerror = av_strerror; + vectors.av_strndup = av_strndup; + vectors.av_sub_q = av_sub_q; + vectors.av_tempfile = av_tempfile; + vectors.av_timecode_adjust_ntsc_framenum2 = av_timecode_adjust_ntsc_framenum2; + vectors.av_timecode_check_frame_rate = av_timecode_check_frame_rate; + vectors.av_timecode_get_smpte = av_timecode_get_smpte; + vectors.av_timecode_get_smpte_from_framenum = av_timecode_get_smpte_from_framenum; + vectors.av_timecode_init = av_timecode_init; + vectors.av_timecode_init_from_components = av_timecode_init_from_components; + vectors.av_timecode_init_from_string = av_timecode_init_from_string; + vectors.av_timecode_make_mpeg_tc_string = av_timecode_make_mpeg_tc_string; + vectors.av_timecode_make_smpte_tc_string = av_timecode_make_smpte_tc_string; + vectors.av_timecode_make_smpte_tc_string2 = av_timecode_make_smpte_tc_string2; + vectors.av_timecode_make_string = av_timecode_make_string; + vectors.av_tree_destroy = av_tree_destroy; + vectors.av_tree_enumerate = av_tree_enumerate; + vectors.av_tree_find = av_tree_find; + vectors.av_tree_insert = av_tree_insert; + vectors.av_tree_node_alloc = av_tree_node_alloc; + vectors.av_url_split = av_url_split; + vectors.av_usleep = av_usleep; + vectors.av_version_info = av_version_info; + vectors.av_vlog = av_vlog; + vectors.av_write_frame = av_write_frame; + vectors.av_write_image_line = av_write_image_line; + vectors.av_write_image_line2 = av_write_image_line2; + vectors.av_write_trailer = av_write_trailer; + vectors.av_write_uncoded_frame = av_write_uncoded_frame; + vectors.av_write_uncoded_frame_query = av_write_uncoded_frame_query; + vectors.av_xiphlacing = av_xiphlacing; + vectors.avcodec_align_dimensions = avcodec_align_dimensions; + vectors.avcodec_align_dimensions2 = avcodec_align_dimensions2; + vectors.avcodec_alloc_context3 = avcodec_alloc_context3; + vectors.avcodec_chroma_pos_to_enum = avcodec_chroma_pos_to_enum; + vectors.avcodec_close = avcodec_close; + vectors.avcodec_configuration = avcodec_configuration; + vectors.avcodec_decode_subtitle2 = avcodec_decode_subtitle2; + vectors.avcodec_default_execute = avcodec_default_execute; + vectors.avcodec_default_execute2 = avcodec_default_execute2; + vectors.avcodec_default_get_buffer2 = avcodec_default_get_buffer2; + vectors.avcodec_default_get_encode_buffer = avcodec_default_get_encode_buffer; + vectors.avcodec_default_get_format = avcodec_default_get_format; + vectors.avcodec_descriptor_get = avcodec_descriptor_get; + vectors.avcodec_descriptor_get_by_name = avcodec_descriptor_get_by_name; + vectors.avcodec_descriptor_next = avcodec_descriptor_next; + vectors.avcodec_encode_subtitle = avcodec_encode_subtitle; + vectors.avcodec_enum_to_chroma_pos = avcodec_enum_to_chroma_pos; + vectors.avcodec_fill_audio_frame = avcodec_fill_audio_frame; + vectors.avcodec_find_best_pix_fmt_of_list = avcodec_find_best_pix_fmt_of_list; + vectors.avcodec_find_decoder = avcodec_find_decoder; + vectors.avcodec_find_decoder_by_name = avcodec_find_decoder_by_name; + vectors.avcodec_find_encoder = avcodec_find_encoder; + vectors.avcodec_find_encoder_by_name = avcodec_find_encoder_by_name; + vectors.avcodec_flush_buffers = avcodec_flush_buffers; + vectors.avcodec_free_context = avcodec_free_context; + vectors.avcodec_get_class = avcodec_get_class; + vectors.avcodec_get_frame_class = avcodec_get_frame_class; + vectors.avcodec_get_hw_config = avcodec_get_hw_config; + vectors.avcodec_get_hw_frames_parameters = avcodec_get_hw_frames_parameters; + vectors.avcodec_get_name = avcodec_get_name; + vectors.avcodec_get_subtitle_rect_class = avcodec_get_subtitle_rect_class; + vectors.avcodec_get_type = avcodec_get_type; + vectors.avcodec_is_open = avcodec_is_open; + vectors.avcodec_license = avcodec_license; + vectors.avcodec_open2 = avcodec_open2; + vectors.avcodec_parameters_alloc = avcodec_parameters_alloc; + vectors.avcodec_parameters_copy = avcodec_parameters_copy; + vectors.avcodec_parameters_free = avcodec_parameters_free; + vectors.avcodec_parameters_from_context = avcodec_parameters_from_context; + vectors.avcodec_parameters_to_context = avcodec_parameters_to_context; + vectors.avcodec_pix_fmt_to_codec_tag = avcodec_pix_fmt_to_codec_tag; + vectors.avcodec_profile_name = avcodec_profile_name; + vectors.avcodec_receive_frame = avcodec_receive_frame; + vectors.avcodec_receive_packet = avcodec_receive_packet; + vectors.avcodec_send_frame = avcodec_send_frame; + vectors.avcodec_send_packet = avcodec_send_packet; + vectors.avcodec_string = avcodec_string; + vectors.avcodec_version = avcodec_version; + vectors.avdevice_app_to_dev_control_message = avdevice_app_to_dev_control_message; + vectors.avdevice_capabilities_create = avdevice_capabilities_create; + vectors.avdevice_capabilities_free = avdevice_capabilities_free; + vectors.avdevice_configuration = avdevice_configuration; + vectors.avdevice_dev_to_app_control_message = avdevice_dev_to_app_control_message; + vectors.avdevice_free_list_devices = avdevice_free_list_devices; + vectors.avdevice_license = avdevice_license; + vectors.avdevice_list_devices = avdevice_list_devices; + vectors.avdevice_list_input_sources = avdevice_list_input_sources; + vectors.avdevice_list_output_sinks = avdevice_list_output_sinks; + vectors.avdevice_register_all = avdevice_register_all; + vectors.avdevice_version = avdevice_version; + vectors.avfilter_config_links = avfilter_config_links; + vectors.avfilter_configuration = avfilter_configuration; + vectors.avfilter_filter_pad_count = avfilter_filter_pad_count; + vectors.avfilter_free = avfilter_free; + vectors.avfilter_get_by_name = avfilter_get_by_name; + vectors.avfilter_get_class = avfilter_get_class; + vectors.avfilter_graph_alloc = avfilter_graph_alloc; + vectors.avfilter_graph_alloc_filter = avfilter_graph_alloc_filter; + vectors.avfilter_graph_config = avfilter_graph_config; + vectors.avfilter_graph_create_filter = avfilter_graph_create_filter; + vectors.avfilter_graph_dump = avfilter_graph_dump; + vectors.avfilter_graph_free = avfilter_graph_free; + vectors.avfilter_graph_get_filter = avfilter_graph_get_filter; + vectors.avfilter_graph_parse = avfilter_graph_parse; + vectors.avfilter_graph_parse_ptr = avfilter_graph_parse_ptr; + vectors.avfilter_graph_parse2 = avfilter_graph_parse2; + vectors.avfilter_graph_queue_command = avfilter_graph_queue_command; + vectors.avfilter_graph_request_oldest = avfilter_graph_request_oldest; + vectors.avfilter_graph_send_command = avfilter_graph_send_command; + vectors.avfilter_graph_set_auto_convert = avfilter_graph_set_auto_convert; + vectors.avfilter_init_dict = avfilter_init_dict; + vectors.avfilter_init_str = avfilter_init_str; + vectors.avfilter_inout_alloc = avfilter_inout_alloc; + vectors.avfilter_inout_free = avfilter_inout_free; + vectors.avfilter_insert_filter = avfilter_insert_filter; + vectors.avfilter_license = avfilter_license; + vectors.avfilter_link = avfilter_link; + vectors.avfilter_link_free = avfilter_link_free; + vectors.avfilter_pad_count = avfilter_pad_count; + vectors.avfilter_pad_get_name = avfilter_pad_get_name; + vectors.avfilter_pad_get_type = avfilter_pad_get_type; + vectors.avfilter_process_command = avfilter_process_command; + vectors.avfilter_version = avfilter_version; + vectors.avformat_alloc_context = avformat_alloc_context; + vectors.avformat_alloc_output_context2 = avformat_alloc_output_context2; + vectors.avformat_close_input = avformat_close_input; + vectors.avformat_configuration = avformat_configuration; + vectors.avformat_find_stream_info = avformat_find_stream_info; + vectors.avformat_flush = avformat_flush; + vectors.avformat_free_context = avformat_free_context; + vectors.avformat_get_class = avformat_get_class; + vectors.avformat_get_mov_audio_tags = avformat_get_mov_audio_tags; + vectors.avformat_get_mov_video_tags = avformat_get_mov_video_tags; + vectors.avformat_get_riff_audio_tags = avformat_get_riff_audio_tags; + vectors.avformat_get_riff_video_tags = avformat_get_riff_video_tags; + vectors.avformat_index_get_entries_count = avformat_index_get_entries_count; + vectors.avformat_index_get_entry = avformat_index_get_entry; + vectors.avformat_index_get_entry_from_timestamp = avformat_index_get_entry_from_timestamp; + vectors.avformat_init_output = avformat_init_output; + vectors.avformat_license = avformat_license; + vectors.avformat_match_stream_specifier = avformat_match_stream_specifier; + vectors.avformat_network_deinit = avformat_network_deinit; + vectors.avformat_network_init = avformat_network_init; + vectors.avformat_new_stream = avformat_new_stream; + vectors.avformat_open_input = avformat_open_input; + vectors.avformat_query_codec = avformat_query_codec; + vectors.avformat_queue_attached_pictures = avformat_queue_attached_pictures; + vectors.avformat_seek_file = avformat_seek_file; + vectors.avformat_transfer_internal_stream_timing_info = avformat_transfer_internal_stream_timing_info; + vectors.avformat_version = avformat_version; + vectors.avformat_write_header = avformat_write_header; + vectors.avio_accept = avio_accept; + vectors.avio_alloc_context = avio_alloc_context; + vectors.avio_check = avio_check; + vectors.avio_close = avio_close; + vectors.avio_close_dir = avio_close_dir; + vectors.avio_close_dyn_buf = avio_close_dyn_buf; + vectors.avio_closep = avio_closep; + vectors.avio_context_free = avio_context_free; + vectors.avio_enum_protocols = avio_enum_protocols; + vectors.avio_feof = avio_feof; + vectors.avio_find_protocol_name = avio_find_protocol_name; + vectors.avio_flush = avio_flush; + vectors.avio_free_directory_entry = avio_free_directory_entry; + vectors.avio_get_dyn_buf = avio_get_dyn_buf; + vectors.avio_get_str = avio_get_str; + vectors.avio_get_str16be = avio_get_str16be; + vectors.avio_get_str16le = avio_get_str16le; + vectors.avio_handshake = avio_handshake; + vectors.avio_open = avio_open; + vectors.avio_open_dir = avio_open_dir; + vectors.avio_open_dyn_buf = avio_open_dyn_buf; + vectors.avio_open2 = avio_open2; + vectors.avio_pause = avio_pause; + vectors.avio_print_string_array = avio_print_string_array; + vectors.avio_printf = avio_printf; + vectors.avio_protocol_get_class = avio_protocol_get_class; + vectors.avio_put_str = avio_put_str; + vectors.avio_put_str16be = avio_put_str16be; + vectors.avio_put_str16le = avio_put_str16le; + vectors.avio_r8 = avio_r8; + vectors.avio_rb16 = avio_rb16; + vectors.avio_rb24 = avio_rb24; + vectors.avio_rb32 = avio_rb32; + vectors.avio_rb64 = avio_rb64; + vectors.avio_read = avio_read; + vectors.avio_read_dir = avio_read_dir; + vectors.avio_read_partial = avio_read_partial; + vectors.avio_read_to_bprint = avio_read_to_bprint; + vectors.avio_rl16 = avio_rl16; + vectors.avio_rl24 = avio_rl24; + vectors.avio_rl32 = avio_rl32; + vectors.avio_rl64 = avio_rl64; + vectors.avio_seek = avio_seek; + vectors.avio_seek_time = avio_seek_time; + vectors.avio_size = avio_size; + vectors.avio_skip = avio_skip; + vectors.avio_vprintf = avio_vprintf; + vectors.avio_w8 = avio_w8; + vectors.avio_wb16 = avio_wb16; + vectors.avio_wb24 = avio_wb24; + vectors.avio_wb32 = avio_wb32; + vectors.avio_wb64 = avio_wb64; + vectors.avio_wl16 = avio_wl16; + vectors.avio_wl24 = avio_wl24; + vectors.avio_wl32 = avio_wl32; + vectors.avio_wl64 = avio_wl64; + vectors.avio_write = avio_write; + vectors.avio_write_marker = avio_write_marker; + vectors.avsubtitle_free = avsubtitle_free; + vectors.avutil_configuration = avutil_configuration; + vectors.avutil_license = avutil_license; + vectors.avutil_version = avutil_version; + vectors.postproc_configuration = postproc_configuration; + vectors.postproc_license = postproc_license; + vectors.postproc_version = postproc_version; + vectors.pp_free_context = pp_free_context; + vectors.pp_free_mode = pp_free_mode; + vectors.pp_get_context = pp_get_context; + vectors.pp_get_mode_by_name_and_quality = pp_get_mode_by_name_and_quality; + vectors.pp_postprocess = pp_postprocess; + vectors.swr_alloc = swr_alloc; + vectors.swr_alloc_set_opts = swr_alloc_set_opts; + vectors.swr_alloc_set_opts2 = swr_alloc_set_opts2; + vectors.swr_build_matrix = swr_build_matrix; + vectors.swr_build_matrix2 = swr_build_matrix2; + vectors.swr_close = swr_close; + vectors.swr_config_frame = swr_config_frame; + vectors.swr_convert = swr_convert; + vectors.swr_convert_frame = swr_convert_frame; + vectors.swr_drop_output = swr_drop_output; + vectors.swr_free = swr_free; + vectors.swr_get_class = swr_get_class; + vectors.swr_get_delay = swr_get_delay; + vectors.swr_get_out_samples = swr_get_out_samples; + vectors.swr_init = swr_init; + vectors.swr_inject_silence = swr_inject_silence; + vectors.swr_is_initialized = swr_is_initialized; + vectors.swr_next_pts = swr_next_pts; + vectors.swr_set_channel_mapping = swr_set_channel_mapping; + vectors.swr_set_compensation = swr_set_compensation; + vectors.swr_set_matrix = swr_set_matrix; + vectors.swresample_configuration = swresample_configuration; + vectors.swresample_license = swresample_license; + vectors.swresample_version = swresample_version; + vectors.sws_alloc_context = sws_alloc_context; + vectors.sws_allocVec = sws_allocVec; + vectors.sws_convertPalette8ToPacked24 = sws_convertPalette8ToPacked24; + vectors.sws_convertPalette8ToPacked32 = sws_convertPalette8ToPacked32; + vectors.sws_frame_end = sws_frame_end; + vectors.sws_frame_start = sws_frame_start; + vectors.sws_freeContext = sws_freeContext; + vectors.sws_freeFilter = sws_freeFilter; + vectors.sws_freeVec = sws_freeVec; + vectors.sws_get_class = sws_get_class; + vectors.sws_getCachedContext = sws_getCachedContext; + vectors.sws_getCoefficients = sws_getCoefficients; + vectors.sws_getColorspaceDetails = sws_getColorspaceDetails; + vectors.sws_getContext = sws_getContext; + vectors.sws_getDefaultFilter = sws_getDefaultFilter; + vectors.sws_getGaussianVec = sws_getGaussianVec; + vectors.sws_init_context = sws_init_context; + vectors.sws_isSupportedEndiannessConversion = sws_isSupportedEndiannessConversion; + vectors.sws_isSupportedInput = sws_isSupportedInput; + vectors.sws_isSupportedOutput = sws_isSupportedOutput; + vectors.sws_normalizeVec = sws_normalizeVec; + vectors.sws_receive_slice = sws_receive_slice; + vectors.sws_receive_slice_alignment = sws_receive_slice_alignment; + vectors.sws_scale = sws_scale; + vectors.sws_scale_frame = sws_scale_frame; + vectors.sws_scaleVec = sws_scaleVec; + vectors.sws_send_slice = sws_send_slice; + vectors.sws_setColorspaceDetails = sws_setColorspaceDetails; + vectors.swscale_configuration = swscale_configuration; + vectors.swscale_license = swscale_license; + vectors.swscale_version = swscale_version; + } +} diff --git a/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/DynamicallyLoadedBindings.cs b/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/DynamicallyLoadedBindings.cs new file mode 100644 index 00000000..14d56c23 --- /dev/null +++ b/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/DynamicallyLoadedBindings.cs @@ -0,0 +1,6 @@ +namespace FFmpeg.AutoGen.Bindings.DynamicallyLoaded; + +public partial class DynamicallyLoadedBindings +{ + public static string LibrariesPath { get; set; } = string.Empty; +} diff --git a/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/FFmpeg.AutoGen.Bindings.DynamicallyLoaded.csproj b/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/FFmpeg.AutoGen.Bindings.DynamicallyLoaded.csproj new file mode 100644 index 00000000..19165101 --- /dev/null +++ b/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/FFmpeg.AutoGen.Bindings.DynamicallyLoaded.csproj @@ -0,0 +1,35 @@ + + + + netstandard2.1;netstandard2.0;net45 + FFmpeg auto generated unsafe bindings for C#/.NET and Mono. Dynamicly loaded implementation. + true + + + + True + 108;169;612;618;1573;1591;1701;1702;1705 + false + + bin\$(Configuration)\$(TargetFramework)\$(AssemblyName).xml + + + + + true + snupkg + + + + + + + + + + + + + + + diff --git a/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/FunctionResolverBase.cs b/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/FunctionResolverBase.cs new file mode 100644 index 00000000..4900a69e --- /dev/null +++ b/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/FunctionResolverBase.cs @@ -0,0 +1,89 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen.Bindings.DynamicallyLoaded; + +public abstract class FunctionResolverBase : IFunctionResolver +{ + public static readonly Dictionary LibraryDependenciesMap = + new() + { + { "avcodec", new[] { "avutil", "swresample" } }, + { "avdevice", new[] { "avcodec", "avfilter", "avformat", "avutil" } }, + { "avfilter", new[] { "avcodec", "avformat", "avutil", "postproc", "swresample", "swscale" } }, + { "avformat", new[] { "avcodec", "avutil" } }, + { "avutil", new string[0] }, + { "postproc", new[] { "avutil" } }, + { "swresample", new[] { "avutil" } }, + { "swscale", new[] { "avutil" } } + }; + + private readonly Dictionary _loadedLibraries = new(); + + private readonly object _syncRoot = new(); + + public T GetFunctionDelegate(string libraryName, string functionName, bool throwOnError = true) + { + lock (_syncRoot) + { + var nativeLibraryHandle = GetOrLoadLibrary(libraryName, throwOnError); + var ptr = GetFunctionPointer(nativeLibraryHandle, functionName); + + if (ptr == IntPtr.Zero) + { + if (throwOnError) throw new EntryPointNotFoundException($"Could not find the entrypoint for {functionName}."); + return default; + } + +#if NETSTANDARD2_0_OR_GREATER + try + { + return Marshal.GetDelegateForFunctionPointer(ptr); + } + catch (MarshalDirectiveException) + { + if (throwOnError) + throw; + return default; + } +#else + return (T)(object)Marshal.GetDelegateForFunctionPointer(ptr, typeof(T)); +#endif + } + } + + protected abstract string GetNativeLibraryName(string libraryName, int version); + protected abstract IntPtr LoadNativeLibrary(string libraryName); + protected abstract IntPtr GetFunctionPointer(IntPtr nativeLibraryHandle, string functionName); + + private IntPtr GetOrLoadLibrary(string libraryName, bool throwOnError) + { + if (_loadedLibraries.TryGetValue(libraryName, out var ptr)) return ptr; + + lock (_syncRoot) + { + if (_loadedLibraries.TryGetValue(libraryName, out ptr)) return ptr; + + var dependencies = LibraryDependenciesMap[libraryName]; + dependencies.Where(n => !_loadedLibraries.ContainsKey(n) && !n.Equals(libraryName)) + .ToList() + .ForEach(n => GetOrLoadLibrary(n, false)); + + var version = DynamicallyLoadedBindings.LibraryVersionMap[libraryName]; + var nativeLibraryName = GetNativeLibraryName(libraryName, version); + + var libraryPath = Path.Combine(DynamicallyLoadedBindings.LibrariesPath, nativeLibraryName); + ptr = LoadNativeLibrary(libraryPath); + + if (ptr != IntPtr.Zero) _loadedLibraries.Add(libraryName, ptr); + else if (throwOnError) + throw new DllNotFoundException( + $"Unable to load DLL '{libraryName}.{version} under {DynamicallyLoadedBindings.LibrariesPath}': The specified module could not be found."); + + return ptr; + } + } +} diff --git a/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/FunctionResolverFactory.cs b/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/FunctionResolverFactory.cs new file mode 100644 index 00000000..42d16aef --- /dev/null +++ b/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/FunctionResolverFactory.cs @@ -0,0 +1,36 @@ +using FFmpeg.AutoGen.Bindings.DynamicallyLoaded.Native; +using System; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen.Bindings.DynamicallyLoaded; + +public static class FunctionResolverFactory +{ + public static PlatformID GetPlatformId() + { +#if NETSTANDARD2_0_OR_GREATER + if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) return PlatformID.Win32NT; + if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) return PlatformID.Unix; + if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) return PlatformID.MacOSX; + throw new PlatformNotSupportedException(); +#else + return Environment.OSVersion.Platform; + +#endif + } + + public static IFunctionResolver Create() + { + switch (GetPlatformId()) + { + case PlatformID.MacOSX: + return new MacFunctionResolver(); + case PlatformID.Unix: + return new LinuxFunctionResolver(); + case PlatformID.Win32NT: + return new WindowsFunctionResolver(); + default: + throw new PlatformNotSupportedException(); + } + } +} diff --git a/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/IFunctionResolver.cs b/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/IFunctionResolver.cs new file mode 100644 index 00000000..7688817e --- /dev/null +++ b/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/IFunctionResolver.cs @@ -0,0 +1,24 @@ +namespace FFmpeg.AutoGen.Bindings.DynamicallyLoaded; + +/// +/// Supports loading functions from native libraries. Provides a more flexible alternative to P/Invoke. +/// +public interface IFunctionResolver +{ + /// + /// Creates a delegate which invokes a native function. + /// + /// + /// The function delegate. + /// + /// + /// The library name which contains the function. + /// + /// + /// The name of the function for which to create the delegate. + /// + /// + /// A new delegate which points to the native function. + /// + T GetFunctionDelegate(string libraryName, string functionName, bool throwOnError = true); +} diff --git a/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/Native/LinuxFunctionResolver.cs b/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/Native/LinuxFunctionResolver.cs new file mode 100644 index 00000000..22efcf12 --- /dev/null +++ b/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/Native/LinuxFunctionResolver.cs @@ -0,0 +1,24 @@ +using System; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen.Bindings.DynamicallyLoaded.Native; + +public class LinuxFunctionResolver : FunctionResolverBase +{ + private const string Libdl = "libdl.so.2"; + + private const int RTLD_NOW = 0x002; + + protected override string GetNativeLibraryName(string libraryName, int version) => $"lib{libraryName}.so.{version}"; + + protected override IntPtr LoadNativeLibrary(string libraryName) => dlopen(libraryName, RTLD_NOW); + + protected override IntPtr GetFunctionPointer(IntPtr nativeLibraryHandle, string functionName) => dlsym(nativeLibraryHandle, functionName); + + + [DllImport(Libdl)] + public static extern IntPtr dlsym(IntPtr handle, string symbol); + + [DllImport(Libdl)] + public static extern IntPtr dlopen(string fileName, int flag); +} diff --git a/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/Native/MacFunctionResolver.cs b/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/Native/MacFunctionResolver.cs new file mode 100644 index 00000000..7acf1906 --- /dev/null +++ b/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/Native/MacFunctionResolver.cs @@ -0,0 +1,22 @@ +using System; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen.Bindings.DynamicallyLoaded.Native; + +public class MacFunctionResolver : FunctionResolverBase +{ + private const string Libdl = "libdl"; + + private const int RTLD_NOW = 0x002; + + protected override string GetNativeLibraryName(string libraryName, int version) => $"lib{libraryName}.{version}.dylib"; + protected override IntPtr LoadNativeLibrary(string libraryName) => dlopen(libraryName, RTLD_NOW); + protected override IntPtr GetFunctionPointer(IntPtr nativeLibraryHandle, string functionName) => dlsym(nativeLibraryHandle, functionName); + + + [DllImport(Libdl)] + public static extern IntPtr dlsym(IntPtr handle, string symbol); + + [DllImport(Libdl)] + public static extern IntPtr dlopen(string fileName, int flag); +} diff --git a/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/Native/WindowsFunctionResolver.cs b/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/Native/WindowsFunctionResolver.cs new file mode 100644 index 00000000..e00d28c7 --- /dev/null +++ b/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/Native/WindowsFunctionResolver.cs @@ -0,0 +1,59 @@ +using System; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen.Bindings.DynamicallyLoaded.Native; + +public class WindowsFunctionResolver : FunctionResolverBase +{ + private const string Kernel32 = "kernel32"; + + protected override string GetNativeLibraryName(string libraryName, int version) => $"{libraryName}-{version}.dll"; + + protected override IntPtr LoadNativeLibrary(string libraryName) => LoadLibrary(libraryName); + protected override IntPtr GetFunctionPointer(IntPtr nativeLibraryHandle, string functionName) => GetProcAddress(nativeLibraryHandle, functionName); + + [DllImport(Kernel32, CharSet = CharSet.Ansi, BestFitMapping = false)] + public static extern IntPtr GetProcAddress(IntPtr hModule, string lpProcName); + + /// + /// Loads the specified module into the address space of the calling process. The specified module may cause other + /// modules to be loaded. + /// + /// + /// + /// The name of the module. This can be either a library module (a .dll file) or an executable module (an + /// .exe file). + /// The name specified is the file name of the module and is not related to the name stored in the library module + /// itself, + /// as specified by the LIBRARY keyword in the module-definition (.def) file. + /// + /// + /// If the string specifies a full path, the function searches only that path for the module. + /// + /// + /// If the string specifies a relative path or a module name without a path, the function uses a standard search + /// strategy + /// to find the module; for more information, see the Remarks. + /// + /// + /// If the function cannot find the module, the function fails. When specifying a path, be sure to use backslashes + /// (\), + /// not forward slashes (/). For more information about paths, see Naming a File or Directory. + /// + /// + /// If the string specifies a module name without a path and the file name extension is omitted, the function + /// appends the + /// default library extension .dll to the module name. To prevent the function from appending .dll to + /// the module name, + /// include a trailing point character (.) in the module name string. + /// + /// + /// + /// If the function succeeds, the return value is a handle to the module. + /// If the function fails, the return value is . To get extended error information, call + /// . + /// + /// + [DllImport(Kernel32, SetLastError = true)] + public static extern IntPtr LoadLibrary(string dllToLoad); +} diff --git a/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/generated/DynamicallyLoadedBindings.g.cs b/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/generated/DynamicallyLoadedBindings.g.cs new file mode 100644 index 00000000..5aa9c4a4 --- /dev/null +++ b/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/generated/DynamicallyLoadedBindings.g.cs @@ -0,0 +1,4247 @@ +using System; +using System.Runtime.InteropServices; +using FFmpeg.AutoGen.Abstractions; + +namespace FFmpeg.AutoGen.Bindings.DynamicallyLoaded; + +public static unsafe partial class DynamicallyLoadedBindings +{ + public static bool ThrowErrorIfFunctionNotFound; + public static IFunctionResolver FunctionResolver; + + public unsafe static void Initialize() + { + if (FunctionResolver == null) FunctionResolver = FunctionResolverFactory.Create(); + + vectors.av_abuffersink_params_alloc = () => + { + vectors.av_abuffersink_params_alloc = FunctionResolver.GetFunctionDelegate("avfilter", "av_abuffersink_params_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_abuffersink_params_alloc(); + }; + + vectors.av_add_index_entry = (AVStream* @st, long @pos, long @timestamp, int @size, int @distance, int @flags) => + { + vectors.av_add_index_entry = FunctionResolver.GetFunctionDelegate("avformat", "av_add_index_entry", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_add_index_entry(@st, @pos, @timestamp, @size, @distance, @flags); + }; + + vectors.av_add_q = (AVRational @b, AVRational @c) => + { + vectors.av_add_q = FunctionResolver.GetFunctionDelegate("avutil", "av_add_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_add_q(@b, @c); + }; + + vectors.av_add_stable = (AVRational @ts_tb, long @ts, AVRational @inc_tb, long @inc) => + { + vectors.av_add_stable = FunctionResolver.GetFunctionDelegate("avutil", "av_add_stable", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_add_stable(@ts_tb, @ts, @inc_tb, @inc); + }; + + vectors.av_append_packet = (AVIOContext* @s, AVPacket* @pkt, int @size) => + { + vectors.av_append_packet = FunctionResolver.GetFunctionDelegate("avformat", "av_append_packet", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_append_packet(@s, @pkt, @size); + }; + + vectors.av_audio_fifo_alloc = (AVSampleFormat @sample_fmt, int @channels, int @nb_samples) => + { + vectors.av_audio_fifo_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_audio_fifo_alloc(@sample_fmt, @channels, @nb_samples); + }; + + vectors.av_audio_fifo_drain = (AVAudioFifo* @af, int @nb_samples) => + { + vectors.av_audio_fifo_drain = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_drain", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_audio_fifo_drain(@af, @nb_samples); + }; + + vectors.av_audio_fifo_free = (AVAudioFifo* @af) => + { + vectors.av_audio_fifo_free = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_audio_fifo_free(@af); + }; + + vectors.av_audio_fifo_peek = (AVAudioFifo* @af, void** @data, int @nb_samples) => + { + vectors.av_audio_fifo_peek = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_peek", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_audio_fifo_peek(@af, @data, @nb_samples); + }; + + vectors.av_audio_fifo_peek_at = (AVAudioFifo* @af, void** @data, int @nb_samples, int @offset) => + { + vectors.av_audio_fifo_peek_at = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_peek_at", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_audio_fifo_peek_at(@af, @data, @nb_samples, @offset); + }; + + vectors.av_audio_fifo_read = (AVAudioFifo* @af, void** @data, int @nb_samples) => + { + vectors.av_audio_fifo_read = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_read", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_audio_fifo_read(@af, @data, @nb_samples); + }; + + vectors.av_audio_fifo_realloc = (AVAudioFifo* @af, int @nb_samples) => + { + vectors.av_audio_fifo_realloc = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_realloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_audio_fifo_realloc(@af, @nb_samples); + }; + + vectors.av_audio_fifo_reset = (AVAudioFifo* @af) => + { + vectors.av_audio_fifo_reset = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_reset", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_audio_fifo_reset(@af); + }; + + vectors.av_audio_fifo_size = (AVAudioFifo* @af) => + { + vectors.av_audio_fifo_size = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_size", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_audio_fifo_size(@af); + }; + + vectors.av_audio_fifo_space = (AVAudioFifo* @af) => + { + vectors.av_audio_fifo_space = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_space", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_audio_fifo_space(@af); + }; + + vectors.av_audio_fifo_write = (AVAudioFifo* @af, void** @data, int @nb_samples) => + { + vectors.av_audio_fifo_write = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_write", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_audio_fifo_write(@af, @data, @nb_samples); + }; + + vectors.av_bprint_channel_layout = (AVBPrint* @bp, int @nb_channels, ulong @channel_layout) => + { + vectors.av_bprint_channel_layout = FunctionResolver.GetFunctionDelegate("avutil", "av_bprint_channel_layout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_bprint_channel_layout(@bp, @nb_channels, @channel_layout); + }; + + vectors.av_bsf_alloc = (AVBitStreamFilter* @filter, AVBSFContext** @ctx) => + { + vectors.av_bsf_alloc = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_alloc(@filter, @ctx); + }; + + vectors.av_bsf_flush = (AVBSFContext* @ctx) => + { + vectors.av_bsf_flush = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_flush", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_bsf_flush(@ctx); + }; + + vectors.av_bsf_free = (AVBSFContext** @ctx) => + { + vectors.av_bsf_free = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_bsf_free(@ctx); + }; + + vectors.av_bsf_get_by_name = (string @name) => + { + vectors.av_bsf_get_by_name = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_get_by_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_get_by_name(@name); + }; + + vectors.av_bsf_get_class = () => + { + vectors.av_bsf_get_class = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_get_class", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_get_class(); + }; + + vectors.av_bsf_get_null_filter = (AVBSFContext** @bsf) => + { + vectors.av_bsf_get_null_filter = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_get_null_filter", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_get_null_filter(@bsf); + }; + + vectors.av_bsf_init = (AVBSFContext* @ctx) => + { + vectors.av_bsf_init = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_init", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_init(@ctx); + }; + + vectors.av_bsf_iterate = (void** @opaque) => + { + vectors.av_bsf_iterate = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_iterate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_iterate(@opaque); + }; + + vectors.av_bsf_list_alloc = () => + { + vectors.av_bsf_list_alloc = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_list_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_list_alloc(); + }; + + vectors.av_bsf_list_append = (AVBSFList* @lst, AVBSFContext* @bsf) => + { + vectors.av_bsf_list_append = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_list_append", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_list_append(@lst, @bsf); + }; + + vectors.av_bsf_list_append2 = (AVBSFList* @lst, string @bsf_name, AVDictionary** @options) => + { + vectors.av_bsf_list_append2 = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_list_append2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_list_append2(@lst, @bsf_name, @options); + }; + + vectors.av_bsf_list_finalize = (AVBSFList** @lst, AVBSFContext** @bsf) => + { + vectors.av_bsf_list_finalize = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_list_finalize", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_list_finalize(@lst, @bsf); + }; + + vectors.av_bsf_list_free = (AVBSFList** @lst) => + { + vectors.av_bsf_list_free = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_list_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_bsf_list_free(@lst); + }; + + vectors.av_bsf_list_parse_str = (string @str, AVBSFContext** @bsf) => + { + vectors.av_bsf_list_parse_str = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_list_parse_str", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_list_parse_str(@str, @bsf); + }; + + vectors.av_bsf_receive_packet = (AVBSFContext* @ctx, AVPacket* @pkt) => + { + vectors.av_bsf_receive_packet = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_receive_packet", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_receive_packet(@ctx, @pkt); + }; + + vectors.av_bsf_send_packet = (AVBSFContext* @ctx, AVPacket* @pkt) => + { + vectors.av_bsf_send_packet = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_send_packet", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_send_packet(@ctx, @pkt); + }; + + vectors.av_buffer_alloc = (ulong @size) => + { + vectors.av_buffer_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_alloc(@size); + }; + + vectors.av_buffer_allocz = (ulong @size) => + { + vectors.av_buffer_allocz = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_allocz", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_allocz(@size); + }; + + vectors.av_buffer_create = (byte* @data, ulong @size, av_buffer_create_free_func @free, void* @opaque, int @flags) => + { + vectors.av_buffer_create = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_create", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_create(@data, @size, @free, @opaque, @flags); + }; + + vectors.av_buffer_default_free = (void* @opaque, byte* @data) => + { + vectors.av_buffer_default_free = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_default_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_buffer_default_free(@opaque, @data); + }; + + vectors.av_buffer_get_opaque = (AVBufferRef* @buf) => + { + vectors.av_buffer_get_opaque = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_get_opaque", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_get_opaque(@buf); + }; + + vectors.av_buffer_get_ref_count = (AVBufferRef* @buf) => + { + vectors.av_buffer_get_ref_count = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_get_ref_count", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_get_ref_count(@buf); + }; + + vectors.av_buffer_is_writable = (AVBufferRef* @buf) => + { + vectors.av_buffer_is_writable = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_is_writable", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_is_writable(@buf); + }; + + vectors.av_buffer_make_writable = (AVBufferRef** @buf) => + { + vectors.av_buffer_make_writable = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_make_writable", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_make_writable(@buf); + }; + + vectors.av_buffer_pool_buffer_get_opaque = (AVBufferRef* @ref) => + { + vectors.av_buffer_pool_buffer_get_opaque = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_pool_buffer_get_opaque", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_pool_buffer_get_opaque(@ref); + }; + + vectors.av_buffer_pool_get = (AVBufferPool* @pool) => + { + vectors.av_buffer_pool_get = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_pool_get", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_pool_get(@pool); + }; + + vectors.av_buffer_pool_init = (ulong @size, av_buffer_pool_init_alloc_func @alloc) => + { + vectors.av_buffer_pool_init = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_pool_init", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_pool_init(@size, @alloc); + }; + + vectors.av_buffer_pool_init2 = (ulong @size, void* @opaque, av_buffer_pool_init2_alloc_func @alloc, av_buffer_pool_init2_pool_free_func @pool_free) => + { + vectors.av_buffer_pool_init2 = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_pool_init2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_pool_init2(@size, @opaque, @alloc, @pool_free); + }; + + vectors.av_buffer_pool_uninit = (AVBufferPool** @pool) => + { + vectors.av_buffer_pool_uninit = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_pool_uninit", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_buffer_pool_uninit(@pool); + }; + + vectors.av_buffer_realloc = (AVBufferRef** @buf, ulong @size) => + { + vectors.av_buffer_realloc = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_realloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_realloc(@buf, @size); + }; + + vectors.av_buffer_ref = (AVBufferRef* @buf) => + { + vectors.av_buffer_ref = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_ref", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_ref(@buf); + }; + + vectors.av_buffer_replace = (AVBufferRef** @dst, AVBufferRef* @src) => + { + vectors.av_buffer_replace = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_replace", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_replace(@dst, @src); + }; + + vectors.av_buffer_unref = (AVBufferRef** @buf) => + { + vectors.av_buffer_unref = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_unref", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_buffer_unref(@buf); + }; + + vectors.av_buffersink_get_ch_layout = (AVFilterContext* @ctx, AVChannelLayout* @ch_layout) => + { + vectors.av_buffersink_get_ch_layout = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_ch_layout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_ch_layout(@ctx, @ch_layout); + }; + + vectors.av_buffersink_get_channel_layout = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_channel_layout = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_channel_layout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_channel_layout(@ctx); + }; + + vectors.av_buffersink_get_channels = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_channels = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_channels", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_channels(@ctx); + }; + + vectors.av_buffersink_get_format = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_format = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_format", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_format(@ctx); + }; + + vectors.av_buffersink_get_frame = (AVFilterContext* @ctx, AVFrame* @frame) => + { + vectors.av_buffersink_get_frame = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_frame(@ctx, @frame); + }; + + vectors.av_buffersink_get_frame_flags = (AVFilterContext* @ctx, AVFrame* @frame, int @flags) => + { + vectors.av_buffersink_get_frame_flags = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_frame_flags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_frame_flags(@ctx, @frame, @flags); + }; + + vectors.av_buffersink_get_frame_rate = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_frame_rate = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_frame_rate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_frame_rate(@ctx); + }; + + vectors.av_buffersink_get_h = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_h = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_h", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_h(@ctx); + }; + + vectors.av_buffersink_get_hw_frames_ctx = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_hw_frames_ctx = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_hw_frames_ctx", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_hw_frames_ctx(@ctx); + }; + + vectors.av_buffersink_get_sample_aspect_ratio = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_sample_aspect_ratio = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_sample_aspect_ratio", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_sample_aspect_ratio(@ctx); + }; + + vectors.av_buffersink_get_sample_rate = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_sample_rate = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_sample_rate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_sample_rate(@ctx); + }; + + vectors.av_buffersink_get_samples = (AVFilterContext* @ctx, AVFrame* @frame, int @nb_samples) => + { + vectors.av_buffersink_get_samples = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_samples", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_samples(@ctx, @frame, @nb_samples); + }; + + vectors.av_buffersink_get_time_base = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_time_base = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_time_base", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_time_base(@ctx); + }; + + vectors.av_buffersink_get_type = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_type = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_type", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_type(@ctx); + }; + + vectors.av_buffersink_get_w = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_w = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_w", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_w(@ctx); + }; + + vectors.av_buffersink_params_alloc = () => + { + vectors.av_buffersink_params_alloc = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_params_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_params_alloc(); + }; + + vectors.av_buffersink_set_frame_size = (AVFilterContext* @ctx, uint @frame_size) => + { + vectors.av_buffersink_set_frame_size = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_set_frame_size", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_buffersink_set_frame_size(@ctx, @frame_size); + }; + + vectors.av_buffersrc_add_frame = (AVFilterContext* @ctx, AVFrame* @frame) => + { + vectors.av_buffersrc_add_frame = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersrc_add_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersrc_add_frame(@ctx, @frame); + }; + + vectors.av_buffersrc_add_frame_flags = (AVFilterContext* @buffer_src, AVFrame* @frame, int @flags) => + { + vectors.av_buffersrc_add_frame_flags = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersrc_add_frame_flags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersrc_add_frame_flags(@buffer_src, @frame, @flags); + }; + + vectors.av_buffersrc_close = (AVFilterContext* @ctx, long @pts, uint @flags) => + { + vectors.av_buffersrc_close = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersrc_close", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersrc_close(@ctx, @pts, @flags); + }; + + vectors.av_buffersrc_get_nb_failed_requests = (AVFilterContext* @buffer_src) => + { + vectors.av_buffersrc_get_nb_failed_requests = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersrc_get_nb_failed_requests", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersrc_get_nb_failed_requests(@buffer_src); + }; + + vectors.av_buffersrc_parameters_alloc = () => + { + vectors.av_buffersrc_parameters_alloc = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersrc_parameters_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersrc_parameters_alloc(); + }; + + vectors.av_buffersrc_parameters_set = (AVFilterContext* @ctx, AVBufferSrcParameters* @param) => + { + vectors.av_buffersrc_parameters_set = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersrc_parameters_set", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersrc_parameters_set(@ctx, @param); + }; + + vectors.av_buffersrc_write_frame = (AVFilterContext* @ctx, AVFrame* @frame) => + { + vectors.av_buffersrc_write_frame = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersrc_write_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersrc_write_frame(@ctx, @frame); + }; + + vectors.av_calloc = (ulong @nmemb, ulong @size) => + { + vectors.av_calloc = FunctionResolver.GetFunctionDelegate("avutil", "av_calloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_calloc(@nmemb, @size); + }; + + vectors.av_channel_description = (byte* @buf, ulong @buf_size, AVChannel @channel) => + { + vectors.av_channel_description = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_description", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_description(@buf, @buf_size, @channel); + }; + + vectors.av_channel_description_bprint = (AVBPrint* @bp, AVChannel @channel_id) => + { + vectors.av_channel_description_bprint = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_description_bprint", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_channel_description_bprint(@bp, @channel_id); + }; + + vectors.av_channel_from_string = (string @name) => + { + vectors.av_channel_from_string = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_from_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_from_string(@name); + }; + + vectors.av_channel_layout_channel_from_index = (AVChannelLayout* @channel_layout, uint @idx) => + { + vectors.av_channel_layout_channel_from_index = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_channel_from_index", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_channel_from_index(@channel_layout, @idx); + }; + + vectors.av_channel_layout_channel_from_string = (AVChannelLayout* @channel_layout, string @name) => + { + vectors.av_channel_layout_channel_from_string = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_channel_from_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_channel_from_string(@channel_layout, @name); + }; + + vectors.av_channel_layout_check = (AVChannelLayout* @channel_layout) => + { + vectors.av_channel_layout_check = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_check", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_check(@channel_layout); + }; + + vectors.av_channel_layout_compare = (AVChannelLayout* @chl, AVChannelLayout* @chl1) => + { + vectors.av_channel_layout_compare = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_compare", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_compare(@chl, @chl1); + }; + + vectors.av_channel_layout_copy = (AVChannelLayout* @dst, AVChannelLayout* @src) => + { + vectors.av_channel_layout_copy = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_copy", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_copy(@dst, @src); + }; + + vectors.av_channel_layout_default = (AVChannelLayout* @ch_layout, int @nb_channels) => + { + vectors.av_channel_layout_default = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_default", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_channel_layout_default(@ch_layout, @nb_channels); + }; + + vectors.av_channel_layout_describe = (AVChannelLayout* @channel_layout, byte* @buf, ulong @buf_size) => + { + vectors.av_channel_layout_describe = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_describe", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_describe(@channel_layout, @buf, @buf_size); + }; + + vectors.av_channel_layout_describe_bprint = (AVChannelLayout* @channel_layout, AVBPrint* @bp) => + { + vectors.av_channel_layout_describe_bprint = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_describe_bprint", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_describe_bprint(@channel_layout, @bp); + }; + + vectors.av_channel_layout_extract_channel = (ulong @channel_layout, int @index) => + { + vectors.av_channel_layout_extract_channel = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_extract_channel", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_extract_channel(@channel_layout, @index); + }; + + vectors.av_channel_layout_from_mask = (AVChannelLayout* @channel_layout, ulong @mask) => + { + vectors.av_channel_layout_from_mask = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_from_mask", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_from_mask(@channel_layout, @mask); + }; + + vectors.av_channel_layout_from_string = (AVChannelLayout* @channel_layout, string @str) => + { + vectors.av_channel_layout_from_string = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_from_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_from_string(@channel_layout, @str); + }; + + vectors.av_channel_layout_index_from_channel = (AVChannelLayout* @channel_layout, AVChannel @channel) => + { + vectors.av_channel_layout_index_from_channel = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_index_from_channel", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_index_from_channel(@channel_layout, @channel); + }; + + vectors.av_channel_layout_index_from_string = (AVChannelLayout* @channel_layout, string @name) => + { + vectors.av_channel_layout_index_from_string = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_index_from_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_index_from_string(@channel_layout, @name); + }; + + vectors.av_channel_layout_standard = (void** @opaque) => + { + vectors.av_channel_layout_standard = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_standard", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_standard(@opaque); + }; + + vectors.av_channel_layout_subset = (AVChannelLayout* @channel_layout, ulong @mask) => + { + vectors.av_channel_layout_subset = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_subset", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_subset(@channel_layout, @mask); + }; + + vectors.av_channel_layout_uninit = (AVChannelLayout* @channel_layout) => + { + vectors.av_channel_layout_uninit = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_uninit", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_channel_layout_uninit(@channel_layout); + }; + + vectors.av_channel_name = (byte* @buf, ulong @buf_size, AVChannel @channel) => + { + vectors.av_channel_name = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_name(@buf, @buf_size, @channel); + }; + + vectors.av_channel_name_bprint = (AVBPrint* @bp, AVChannel @channel_id) => + { + vectors.av_channel_name_bprint = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_name_bprint", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_channel_name_bprint(@bp, @channel_id); + }; + + vectors.av_chroma_location_from_name = (string @name) => + { + vectors.av_chroma_location_from_name = FunctionResolver.GetFunctionDelegate("avutil", "av_chroma_location_from_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_chroma_location_from_name(@name); + }; + + vectors.av_chroma_location_name = (AVChromaLocation @location) => + { + vectors.av_chroma_location_name = FunctionResolver.GetFunctionDelegate("avutil", "av_chroma_location_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_chroma_location_name(@location); + }; + + vectors.av_codec_get_id = (AVCodecTag** @tags, uint @tag) => + { + vectors.av_codec_get_id = FunctionResolver.GetFunctionDelegate("avformat", "av_codec_get_id", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_codec_get_id(@tags, @tag); + }; + + vectors.av_codec_get_tag = (AVCodecTag** @tags, AVCodecID @id) => + { + vectors.av_codec_get_tag = FunctionResolver.GetFunctionDelegate("avformat", "av_codec_get_tag", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_codec_get_tag(@tags, @id); + }; + + vectors.av_codec_get_tag2 = (AVCodecTag** @tags, AVCodecID @id, uint* @tag) => + { + vectors.av_codec_get_tag2 = FunctionResolver.GetFunctionDelegate("avformat", "av_codec_get_tag2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_codec_get_tag2(@tags, @id, @tag); + }; + + vectors.av_codec_is_decoder = (AVCodec* @codec) => + { + vectors.av_codec_is_decoder = FunctionResolver.GetFunctionDelegate("avcodec", "av_codec_is_decoder", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_codec_is_decoder(@codec); + }; + + vectors.av_codec_is_encoder = (AVCodec* @codec) => + { + vectors.av_codec_is_encoder = FunctionResolver.GetFunctionDelegate("avcodec", "av_codec_is_encoder", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_codec_is_encoder(@codec); + }; + + vectors.av_codec_iterate = (void** @opaque) => + { + vectors.av_codec_iterate = FunctionResolver.GetFunctionDelegate("avcodec", "av_codec_iterate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_codec_iterate(@opaque); + }; + + vectors.av_color_primaries_from_name = (string @name) => + { + vectors.av_color_primaries_from_name = FunctionResolver.GetFunctionDelegate("avutil", "av_color_primaries_from_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_color_primaries_from_name(@name); + }; + + vectors.av_color_primaries_name = (AVColorPrimaries @primaries) => + { + vectors.av_color_primaries_name = FunctionResolver.GetFunctionDelegate("avutil", "av_color_primaries_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_color_primaries_name(@primaries); + }; + + vectors.av_color_range_from_name = (string @name) => + { + vectors.av_color_range_from_name = FunctionResolver.GetFunctionDelegate("avutil", "av_color_range_from_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_color_range_from_name(@name); + }; + + vectors.av_color_range_name = (AVColorRange @range) => + { + vectors.av_color_range_name = FunctionResolver.GetFunctionDelegate("avutil", "av_color_range_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_color_range_name(@range); + }; + + vectors.av_color_space_from_name = (string @name) => + { + vectors.av_color_space_from_name = FunctionResolver.GetFunctionDelegate("avutil", "av_color_space_from_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_color_space_from_name(@name); + }; + + vectors.av_color_space_name = (AVColorSpace @space) => + { + vectors.av_color_space_name = FunctionResolver.GetFunctionDelegate("avutil", "av_color_space_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_color_space_name(@space); + }; + + vectors.av_color_transfer_from_name = (string @name) => + { + vectors.av_color_transfer_from_name = FunctionResolver.GetFunctionDelegate("avutil", "av_color_transfer_from_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_color_transfer_from_name(@name); + }; + + vectors.av_color_transfer_name = (AVColorTransferCharacteristic @transfer) => + { + vectors.av_color_transfer_name = FunctionResolver.GetFunctionDelegate("avutil", "av_color_transfer_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_color_transfer_name(@transfer); + }; + + vectors.av_compare_mod = (ulong @a, ulong @b, ulong @mod) => + { + vectors.av_compare_mod = FunctionResolver.GetFunctionDelegate("avutil", "av_compare_mod", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_compare_mod(@a, @b, @mod); + }; + + vectors.av_compare_ts = (long @ts_a, AVRational @tb_a, long @ts_b, AVRational @tb_b) => + { + vectors.av_compare_ts = FunctionResolver.GetFunctionDelegate("avutil", "av_compare_ts", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_compare_ts(@ts_a, @tb_a, @ts_b, @tb_b); + }; + + vectors.av_content_light_metadata_alloc = (ulong* @size) => + { + vectors.av_content_light_metadata_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_content_light_metadata_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_content_light_metadata_alloc(@size); + }; + + vectors.av_content_light_metadata_create_side_data = (AVFrame* @frame) => + { + vectors.av_content_light_metadata_create_side_data = FunctionResolver.GetFunctionDelegate("avutil", "av_content_light_metadata_create_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_content_light_metadata_create_side_data(@frame); + }; + + vectors.av_cpb_properties_alloc = (ulong* @size) => + { + vectors.av_cpb_properties_alloc = FunctionResolver.GetFunctionDelegate("avcodec", "av_cpb_properties_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_cpb_properties_alloc(@size); + }; + + vectors.av_cpu_count = () => + { + vectors.av_cpu_count = FunctionResolver.GetFunctionDelegate("avutil", "av_cpu_count", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_cpu_count(); + }; + + vectors.av_cpu_force_count = (int @count) => + { + vectors.av_cpu_force_count = FunctionResolver.GetFunctionDelegate("avutil", "av_cpu_force_count", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_cpu_force_count(@count); + }; + + vectors.av_cpu_max_align = () => + { + vectors.av_cpu_max_align = FunctionResolver.GetFunctionDelegate("avutil", "av_cpu_max_align", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_cpu_max_align(); + }; + + vectors.av_d2q = (double @d, int @max) => + { + vectors.av_d2q = FunctionResolver.GetFunctionDelegate("avutil", "av_d2q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_d2q(@d, @max); + }; + + vectors.av_d3d11va_alloc_context = () => + { + vectors.av_d3d11va_alloc_context = FunctionResolver.GetFunctionDelegate("avcodec", "av_d3d11va_alloc_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_d3d11va_alloc_context(); + }; + + vectors.av_default_get_category = (void* @ptr) => + { + vectors.av_default_get_category = FunctionResolver.GetFunctionDelegate("avutil", "av_default_get_category", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_default_get_category(@ptr); + }; + + vectors.av_default_item_name = (void* @ctx) => + { + vectors.av_default_item_name = FunctionResolver.GetFunctionDelegate("avutil", "av_default_item_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_default_item_name(@ctx); + }; + + vectors.av_demuxer_iterate = (void** @opaque) => + { + vectors.av_demuxer_iterate = FunctionResolver.GetFunctionDelegate("avformat", "av_demuxer_iterate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_demuxer_iterate(@opaque); + }; + + vectors.av_dict_copy = (AVDictionary** @dst, AVDictionary* @src, int @flags) => + { + vectors.av_dict_copy = FunctionResolver.GetFunctionDelegate("avutil", "av_dict_copy", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dict_copy(@dst, @src, @flags); + }; + + vectors.av_dict_count = (AVDictionary* @m) => + { + vectors.av_dict_count = FunctionResolver.GetFunctionDelegate("avutil", "av_dict_count", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dict_count(@m); + }; + + vectors.av_dict_free = (AVDictionary** @m) => + { + vectors.av_dict_free = FunctionResolver.GetFunctionDelegate("avutil", "av_dict_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_dict_free(@m); + }; + + vectors.av_dict_get = (AVDictionary* @m, string @key, AVDictionaryEntry* @prev, int @flags) => + { + vectors.av_dict_get = FunctionResolver.GetFunctionDelegate("avutil", "av_dict_get", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dict_get(@m, @key, @prev, @flags); + }; + + vectors.av_dict_get_string = (AVDictionary* @m, byte** @buffer, byte @key_val_sep, byte @pairs_sep) => + { + vectors.av_dict_get_string = FunctionResolver.GetFunctionDelegate("avutil", "av_dict_get_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dict_get_string(@m, @buffer, @key_val_sep, @pairs_sep); + }; + + vectors.av_dict_parse_string = (AVDictionary** @pm, string @str, string @key_val_sep, string @pairs_sep, int @flags) => + { + vectors.av_dict_parse_string = FunctionResolver.GetFunctionDelegate("avutil", "av_dict_parse_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dict_parse_string(@pm, @str, @key_val_sep, @pairs_sep, @flags); + }; + + vectors.av_dict_set = (AVDictionary** @pm, string @key, string @value, int @flags) => + { + vectors.av_dict_set = FunctionResolver.GetFunctionDelegate("avutil", "av_dict_set", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dict_set(@pm, @key, @value, @flags); + }; + + vectors.av_dict_set_int = (AVDictionary** @pm, string @key, long @value, int @flags) => + { + vectors.av_dict_set_int = FunctionResolver.GetFunctionDelegate("avutil", "av_dict_set_int", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dict_set_int(@pm, @key, @value, @flags); + }; + + vectors.av_disposition_from_string = (string @disp) => + { + vectors.av_disposition_from_string = FunctionResolver.GetFunctionDelegate("avformat", "av_disposition_from_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_disposition_from_string(@disp); + }; + + vectors.av_disposition_to_string = (int @disposition) => + { + vectors.av_disposition_to_string = FunctionResolver.GetFunctionDelegate("avformat", "av_disposition_to_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_disposition_to_string(@disposition); + }; + + vectors.av_div_q = (AVRational @b, AVRational @c) => + { + vectors.av_div_q = FunctionResolver.GetFunctionDelegate("avutil", "av_div_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_div_q(@b, @c); + }; + + vectors.av_dump_format = (AVFormatContext* @ic, int @index, string @url, int @is_output) => + { + vectors.av_dump_format = FunctionResolver.GetFunctionDelegate("avformat", "av_dump_format", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_dump_format(@ic, @index, @url, @is_output); + }; + + vectors.av_dynamic_hdr_plus_alloc = (ulong* @size) => + { + vectors.av_dynamic_hdr_plus_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_dynamic_hdr_plus_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dynamic_hdr_plus_alloc(@size); + }; + + vectors.av_dynamic_hdr_plus_create_side_data = (AVFrame* @frame) => + { + vectors.av_dynamic_hdr_plus_create_side_data = FunctionResolver.GetFunctionDelegate("avutil", "av_dynamic_hdr_plus_create_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dynamic_hdr_plus_create_side_data(@frame); + }; + + vectors.av_dynarray_add = (void* @tab_ptr, int* @nb_ptr, void* @elem) => + { + vectors.av_dynarray_add = FunctionResolver.GetFunctionDelegate("avutil", "av_dynarray_add", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_dynarray_add(@tab_ptr, @nb_ptr, @elem); + }; + + vectors.av_dynarray_add_nofree = (void* @tab_ptr, int* @nb_ptr, void* @elem) => + { + vectors.av_dynarray_add_nofree = FunctionResolver.GetFunctionDelegate("avutil", "av_dynarray_add_nofree", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dynarray_add_nofree(@tab_ptr, @nb_ptr, @elem); + }; + + vectors.av_dynarray2_add = (void** @tab_ptr, int* @nb_ptr, ulong @elem_size, byte* @elem_data) => + { + vectors.av_dynarray2_add = FunctionResolver.GetFunctionDelegate("avutil", "av_dynarray2_add", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dynarray2_add(@tab_ptr, @nb_ptr, @elem_size, @elem_data); + }; + + vectors.av_fast_malloc = (void* @ptr, uint* @size, ulong @min_size) => + { + vectors.av_fast_malloc = FunctionResolver.GetFunctionDelegate("avutil", "av_fast_malloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_fast_malloc(@ptr, @size, @min_size); + }; + + vectors.av_fast_mallocz = (void* @ptr, uint* @size, ulong @min_size) => + { + vectors.av_fast_mallocz = FunctionResolver.GetFunctionDelegate("avutil", "av_fast_mallocz", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_fast_mallocz(@ptr, @size, @min_size); + }; + + vectors.av_fast_padded_malloc = (void* @ptr, uint* @size, ulong @min_size) => + { + vectors.av_fast_padded_malloc = FunctionResolver.GetFunctionDelegate("avcodec", "av_fast_padded_malloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_fast_padded_malloc(@ptr, @size, @min_size); + }; + + vectors.av_fast_padded_mallocz = (void* @ptr, uint* @size, ulong @min_size) => + { + vectors.av_fast_padded_mallocz = FunctionResolver.GetFunctionDelegate("avcodec", "av_fast_padded_mallocz", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_fast_padded_mallocz(@ptr, @size, @min_size); + }; + + vectors.av_fast_realloc = (void* @ptr, uint* @size, ulong @min_size) => + { + vectors.av_fast_realloc = FunctionResolver.GetFunctionDelegate("avutil", "av_fast_realloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_fast_realloc(@ptr, @size, @min_size); + }; + + vectors.av_file_map = (string @filename, byte** @bufptr, ulong* @size, int @log_offset, void* @log_ctx) => + { + vectors.av_file_map = FunctionResolver.GetFunctionDelegate("avutil", "av_file_map", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_file_map(@filename, @bufptr, @size, @log_offset, @log_ctx); + }; + + vectors.av_file_unmap = (byte* @bufptr, ulong @size) => + { + vectors.av_file_unmap = FunctionResolver.GetFunctionDelegate("avutil", "av_file_unmap", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_file_unmap(@bufptr, @size); + }; + + vectors.av_filename_number_test = (string @filename) => + { + vectors.av_filename_number_test = FunctionResolver.GetFunctionDelegate("avformat", "av_filename_number_test", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_filename_number_test(@filename); + }; + + vectors.av_filter_iterate = (void** @opaque) => + { + vectors.av_filter_iterate = FunctionResolver.GetFunctionDelegate("avfilter", "av_filter_iterate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_filter_iterate(@opaque); + }; + + vectors.av_find_best_pix_fmt_of_2 = (AVPixelFormat @dst_pix_fmt1, AVPixelFormat @dst_pix_fmt2, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr) => + { + vectors.av_find_best_pix_fmt_of_2 = FunctionResolver.GetFunctionDelegate("avutil", "av_find_best_pix_fmt_of_2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_find_best_pix_fmt_of_2(@dst_pix_fmt1, @dst_pix_fmt2, @src_pix_fmt, @has_alpha, @loss_ptr); + }; + + vectors.av_find_best_stream = (AVFormatContext* @ic, AVMediaType @type, int @wanted_stream_nb, int @related_stream, AVCodec** @decoder_ret, int @flags) => + { + vectors.av_find_best_stream = FunctionResolver.GetFunctionDelegate("avformat", "av_find_best_stream", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_find_best_stream(@ic, @type, @wanted_stream_nb, @related_stream, @decoder_ret, @flags); + }; + + vectors.av_find_default_stream_index = (AVFormatContext* @s) => + { + vectors.av_find_default_stream_index = FunctionResolver.GetFunctionDelegate("avformat", "av_find_default_stream_index", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_find_default_stream_index(@s); + }; + + vectors.av_find_input_format = (string @short_name) => + { + vectors.av_find_input_format = FunctionResolver.GetFunctionDelegate("avformat", "av_find_input_format", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_find_input_format(@short_name); + }; + + vectors.av_find_nearest_q_idx = (AVRational @q, AVRational* @q_list) => + { + vectors.av_find_nearest_q_idx = FunctionResolver.GetFunctionDelegate("avutil", "av_find_nearest_q_idx", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_find_nearest_q_idx(@q, @q_list); + }; + + vectors.av_find_program_from_stream = (AVFormatContext* @ic, AVProgram* @last, int @s) => + { + vectors.av_find_program_from_stream = FunctionResolver.GetFunctionDelegate("avformat", "av_find_program_from_stream", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_find_program_from_stream(@ic, @last, @s); + }; + + vectors.av_fmt_ctx_get_duration_estimation_method = (AVFormatContext* @ctx) => + { + vectors.av_fmt_ctx_get_duration_estimation_method = FunctionResolver.GetFunctionDelegate("avformat", "av_fmt_ctx_get_duration_estimation_method", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_fmt_ctx_get_duration_estimation_method(@ctx); + }; + + vectors.av_fopen_utf8 = (string @path, string @mode) => + { + vectors.av_fopen_utf8 = FunctionResolver.GetFunctionDelegate("avutil", "av_fopen_utf8", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_fopen_utf8(@path, @mode); + }; + + vectors.av_force_cpu_flags = (int @flags) => + { + vectors.av_force_cpu_flags = FunctionResolver.GetFunctionDelegate("avutil", "av_force_cpu_flags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_force_cpu_flags(@flags); + }; + + vectors.av_format_inject_global_side_data = (AVFormatContext* @s) => + { + vectors.av_format_inject_global_side_data = FunctionResolver.GetFunctionDelegate("avformat", "av_format_inject_global_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_format_inject_global_side_data(@s); + }; + + vectors.av_fourcc_make_string = (byte* @buf, uint @fourcc) => + { + vectors.av_fourcc_make_string = FunctionResolver.GetFunctionDelegate("avutil", "av_fourcc_make_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_fourcc_make_string(@buf, @fourcc); + }; + + vectors.av_frame_alloc = () => + { + vectors.av_frame_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_alloc(); + }; + + vectors.av_frame_apply_cropping = (AVFrame* @frame, int @flags) => + { + vectors.av_frame_apply_cropping = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_apply_cropping", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_apply_cropping(@frame, @flags); + }; + + vectors.av_frame_clone = (AVFrame* @src) => + { + vectors.av_frame_clone = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_clone", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_clone(@src); + }; + + vectors.av_frame_copy = (AVFrame* @dst, AVFrame* @src) => + { + vectors.av_frame_copy = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_copy", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_copy(@dst, @src); + }; + + vectors.av_frame_copy_props = (AVFrame* @dst, AVFrame* @src) => + { + vectors.av_frame_copy_props = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_copy_props", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_copy_props(@dst, @src); + }; + + vectors.av_frame_free = (AVFrame** @frame) => + { + vectors.av_frame_free = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_frame_free(@frame); + }; + + vectors.av_frame_get_buffer = (AVFrame* @frame, int @align) => + { + vectors.av_frame_get_buffer = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_get_buffer", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_get_buffer(@frame, @align); + }; + + vectors.av_frame_get_plane_buffer = (AVFrame* @frame, int @plane) => + { + vectors.av_frame_get_plane_buffer = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_get_plane_buffer", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_get_plane_buffer(@frame, @plane); + }; + + vectors.av_frame_get_side_data = (AVFrame* @frame, AVFrameSideDataType @type) => + { + vectors.av_frame_get_side_data = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_get_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_get_side_data(@frame, @type); + }; + + vectors.av_frame_is_writable = (AVFrame* @frame) => + { + vectors.av_frame_is_writable = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_is_writable", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_is_writable(@frame); + }; + + vectors.av_frame_make_writable = (AVFrame* @frame) => + { + vectors.av_frame_make_writable = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_make_writable", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_make_writable(@frame); + }; + + vectors.av_frame_move_ref = (AVFrame* @dst, AVFrame* @src) => + { + vectors.av_frame_move_ref = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_move_ref", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_frame_move_ref(@dst, @src); + }; + + vectors.av_frame_new_side_data = (AVFrame* @frame, AVFrameSideDataType @type, ulong @size) => + { + vectors.av_frame_new_side_data = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_new_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_new_side_data(@frame, @type, @size); + }; + + vectors.av_frame_new_side_data_from_buf = (AVFrame* @frame, AVFrameSideDataType @type, AVBufferRef* @buf) => + { + vectors.av_frame_new_side_data_from_buf = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_new_side_data_from_buf", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_new_side_data_from_buf(@frame, @type, @buf); + }; + + vectors.av_frame_ref = (AVFrame* @dst, AVFrame* @src) => + { + vectors.av_frame_ref = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_ref", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_ref(@dst, @src); + }; + + vectors.av_frame_remove_side_data = (AVFrame* @frame, AVFrameSideDataType @type) => + { + vectors.av_frame_remove_side_data = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_remove_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_frame_remove_side_data(@frame, @type); + }; + + vectors.av_frame_side_data_name = (AVFrameSideDataType @type) => + { + vectors.av_frame_side_data_name = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_side_data_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_side_data_name(@type); + }; + + vectors.av_frame_unref = (AVFrame* @frame) => + { + vectors.av_frame_unref = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_unref", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_frame_unref(@frame); + }; + + vectors.av_free = (void* @ptr) => + { + vectors.av_free = FunctionResolver.GetFunctionDelegate("avutil", "av_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_free(@ptr); + }; + + vectors.av_freep = (void* @ptr) => + { + vectors.av_freep = FunctionResolver.GetFunctionDelegate("avutil", "av_freep", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_freep(@ptr); + }; + + vectors.av_gcd = (long @a, long @b) => + { + vectors.av_gcd = FunctionResolver.GetFunctionDelegate("avutil", "av_gcd", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_gcd(@a, @b); + }; + + vectors.av_gcd_q = (AVRational @a, AVRational @b, int @max_den, AVRational @def) => + { + vectors.av_gcd_q = FunctionResolver.GetFunctionDelegate("avutil", "av_gcd_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_gcd_q(@a, @b, @max_den, @def); + }; + + vectors.av_get_alt_sample_fmt = (AVSampleFormat @sample_fmt, int @planar) => + { + vectors.av_get_alt_sample_fmt = FunctionResolver.GetFunctionDelegate("avutil", "av_get_alt_sample_fmt", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_alt_sample_fmt(@sample_fmt, @planar); + }; + + vectors.av_get_audio_frame_duration = (AVCodecContext* @avctx, int @frame_bytes) => + { + vectors.av_get_audio_frame_duration = FunctionResolver.GetFunctionDelegate("avcodec", "av_get_audio_frame_duration", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_audio_frame_duration(@avctx, @frame_bytes); + }; + + vectors.av_get_audio_frame_duration2 = (AVCodecParameters* @par, int @frame_bytes) => + { + vectors.av_get_audio_frame_duration2 = FunctionResolver.GetFunctionDelegate("avcodec", "av_get_audio_frame_duration2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_audio_frame_duration2(@par, @frame_bytes); + }; + + vectors.av_get_bits_per_pixel = (AVPixFmtDescriptor* @pixdesc) => + { + vectors.av_get_bits_per_pixel = FunctionResolver.GetFunctionDelegate("avutil", "av_get_bits_per_pixel", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_bits_per_pixel(@pixdesc); + }; + + vectors.av_get_bits_per_sample = (AVCodecID @codec_id) => + { + vectors.av_get_bits_per_sample = FunctionResolver.GetFunctionDelegate("avcodec", "av_get_bits_per_sample", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_bits_per_sample(@codec_id); + }; + + vectors.av_get_bytes_per_sample = (AVSampleFormat @sample_fmt) => + { + vectors.av_get_bytes_per_sample = FunctionResolver.GetFunctionDelegate("avutil", "av_get_bytes_per_sample", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_bytes_per_sample(@sample_fmt); + }; + + vectors.av_get_channel_description = (ulong @channel) => + { + vectors.av_get_channel_description = FunctionResolver.GetFunctionDelegate("avutil", "av_get_channel_description", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_channel_description(@channel); + }; + + vectors.av_get_channel_layout = (string @name) => + { + vectors.av_get_channel_layout = FunctionResolver.GetFunctionDelegate("avutil", "av_get_channel_layout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_channel_layout(@name); + }; + + vectors.av_get_channel_layout_channel_index = (ulong @channel_layout, ulong @channel) => + { + vectors.av_get_channel_layout_channel_index = FunctionResolver.GetFunctionDelegate("avutil", "av_get_channel_layout_channel_index", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_channel_layout_channel_index(@channel_layout, @channel); + }; + + vectors.av_get_channel_layout_nb_channels = (ulong @channel_layout) => + { + vectors.av_get_channel_layout_nb_channels = FunctionResolver.GetFunctionDelegate("avutil", "av_get_channel_layout_nb_channels", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_channel_layout_nb_channels(@channel_layout); + }; + + vectors.av_get_channel_layout_string = (byte* @buf, int @buf_size, int @nb_channels, ulong @channel_layout) => + { + vectors.av_get_channel_layout_string = FunctionResolver.GetFunctionDelegate("avutil", "av_get_channel_layout_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_get_channel_layout_string(@buf, @buf_size, @nb_channels, @channel_layout); + }; + + vectors.av_get_channel_name = (ulong @channel) => + { + vectors.av_get_channel_name = FunctionResolver.GetFunctionDelegate("avutil", "av_get_channel_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_channel_name(@channel); + }; + + vectors.av_get_colorspace_name = (AVColorSpace @val) => + { + vectors.av_get_colorspace_name = FunctionResolver.GetFunctionDelegate("avutil", "av_get_colorspace_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_colorspace_name(@val); + }; + + vectors.av_get_cpu_flags = () => + { + vectors.av_get_cpu_flags = FunctionResolver.GetFunctionDelegate("avutil", "av_get_cpu_flags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_cpu_flags(); + }; + + vectors.av_get_default_channel_layout = (int @nb_channels) => + { + vectors.av_get_default_channel_layout = FunctionResolver.GetFunctionDelegate("avutil", "av_get_default_channel_layout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_default_channel_layout(@nb_channels); + }; + + vectors.av_get_exact_bits_per_sample = (AVCodecID @codec_id) => + { + vectors.av_get_exact_bits_per_sample = FunctionResolver.GetFunctionDelegate("avcodec", "av_get_exact_bits_per_sample", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_exact_bits_per_sample(@codec_id); + }; + + vectors.av_get_extended_channel_layout = (string @name, ulong* @channel_layout, int* @nb_channels) => + { + vectors.av_get_extended_channel_layout = FunctionResolver.GetFunctionDelegate("avutil", "av_get_extended_channel_layout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_extended_channel_layout(@name, @channel_layout, @nb_channels); + }; + + vectors.av_get_frame_filename = (byte* @buf, int @buf_size, string @path, int @number) => + { + vectors.av_get_frame_filename = FunctionResolver.GetFunctionDelegate("avformat", "av_get_frame_filename", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_frame_filename(@buf, @buf_size, @path, @number); + }; + + vectors.av_get_frame_filename2 = (byte* @buf, int @buf_size, string @path, int @number, int @flags) => + { + vectors.av_get_frame_filename2 = FunctionResolver.GetFunctionDelegate("avformat", "av_get_frame_filename2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_frame_filename2(@buf, @buf_size, @path, @number, @flags); + }; + + vectors.av_get_media_type_string = (AVMediaType @media_type) => + { + vectors.av_get_media_type_string = FunctionResolver.GetFunctionDelegate("avutil", "av_get_media_type_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_media_type_string(@media_type); + }; + + vectors.av_get_output_timestamp = (AVFormatContext* @s, int @stream, long* @dts, long* @wall) => + { + vectors.av_get_output_timestamp = FunctionResolver.GetFunctionDelegate("avformat", "av_get_output_timestamp", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_output_timestamp(@s, @stream, @dts, @wall); + }; + + vectors.av_get_packed_sample_fmt = (AVSampleFormat @sample_fmt) => + { + vectors.av_get_packed_sample_fmt = FunctionResolver.GetFunctionDelegate("avutil", "av_get_packed_sample_fmt", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_packed_sample_fmt(@sample_fmt); + }; + + vectors.av_get_packet = (AVIOContext* @s, AVPacket* @pkt, int @size) => + { + vectors.av_get_packet = FunctionResolver.GetFunctionDelegate("avformat", "av_get_packet", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_packet(@s, @pkt, @size); + }; + + vectors.av_get_padded_bits_per_pixel = (AVPixFmtDescriptor* @pixdesc) => + { + vectors.av_get_padded_bits_per_pixel = FunctionResolver.GetFunctionDelegate("avutil", "av_get_padded_bits_per_pixel", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_padded_bits_per_pixel(@pixdesc); + }; + + vectors.av_get_pcm_codec = (AVSampleFormat @fmt, int @be) => + { + vectors.av_get_pcm_codec = FunctionResolver.GetFunctionDelegate("avcodec", "av_get_pcm_codec", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_pcm_codec(@fmt, @be); + }; + + vectors.av_get_picture_type_char = (AVPictureType @pict_type) => + { + vectors.av_get_picture_type_char = FunctionResolver.GetFunctionDelegate("avutil", "av_get_picture_type_char", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_picture_type_char(@pict_type); + }; + + vectors.av_get_pix_fmt = (string @name) => + { + vectors.av_get_pix_fmt = FunctionResolver.GetFunctionDelegate("avutil", "av_get_pix_fmt", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_pix_fmt(@name); + }; + + vectors.av_get_pix_fmt_loss = (AVPixelFormat @dst_pix_fmt, AVPixelFormat @src_pix_fmt, int @has_alpha) => + { + vectors.av_get_pix_fmt_loss = FunctionResolver.GetFunctionDelegate("avutil", "av_get_pix_fmt_loss", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_pix_fmt_loss(@dst_pix_fmt, @src_pix_fmt, @has_alpha); + }; + + vectors.av_get_pix_fmt_name = (AVPixelFormat @pix_fmt) => + { + vectors.av_get_pix_fmt_name = FunctionResolver.GetFunctionDelegate("avutil", "av_get_pix_fmt_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_pix_fmt_name(@pix_fmt); + }; + + vectors.av_get_pix_fmt_string = (byte* @buf, int @buf_size, AVPixelFormat @pix_fmt) => + { + vectors.av_get_pix_fmt_string = FunctionResolver.GetFunctionDelegate("avutil", "av_get_pix_fmt_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_pix_fmt_string(@buf, @buf_size, @pix_fmt); + }; + + vectors.av_get_planar_sample_fmt = (AVSampleFormat @sample_fmt) => + { + vectors.av_get_planar_sample_fmt = FunctionResolver.GetFunctionDelegate("avutil", "av_get_planar_sample_fmt", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_planar_sample_fmt(@sample_fmt); + }; + + vectors.av_get_profile_name = (AVCodec* @codec, int @profile) => + { + vectors.av_get_profile_name = FunctionResolver.GetFunctionDelegate("avcodec", "av_get_profile_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_profile_name(@codec, @profile); + }; + + vectors.av_get_sample_fmt = (string @name) => + { + vectors.av_get_sample_fmt = FunctionResolver.GetFunctionDelegate("avutil", "av_get_sample_fmt", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_sample_fmt(@name); + }; + + vectors.av_get_sample_fmt_name = (AVSampleFormat @sample_fmt) => + { + vectors.av_get_sample_fmt_name = FunctionResolver.GetFunctionDelegate("avutil", "av_get_sample_fmt_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_sample_fmt_name(@sample_fmt); + }; + + vectors.av_get_sample_fmt_string = (byte* @buf, int @buf_size, AVSampleFormat @sample_fmt) => + { + vectors.av_get_sample_fmt_string = FunctionResolver.GetFunctionDelegate("avutil", "av_get_sample_fmt_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_sample_fmt_string(@buf, @buf_size, @sample_fmt); + }; + + vectors.av_get_standard_channel_layout = (uint @index, ulong* @layout, byte** @name) => + { + vectors.av_get_standard_channel_layout = FunctionResolver.GetFunctionDelegate("avutil", "av_get_standard_channel_layout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_standard_channel_layout(@index, @layout, @name); + }; + + vectors.av_get_time_base_q = () => + { + vectors.av_get_time_base_q = FunctionResolver.GetFunctionDelegate("avutil", "av_get_time_base_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_time_base_q(); + }; + + vectors.av_gettime = () => + { + vectors.av_gettime = FunctionResolver.GetFunctionDelegate("avutil", "av_gettime", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_gettime(); + }; + + vectors.av_gettime_relative = () => + { + vectors.av_gettime_relative = FunctionResolver.GetFunctionDelegate("avutil", "av_gettime_relative", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_gettime_relative(); + }; + + vectors.av_gettime_relative_is_monotonic = () => + { + vectors.av_gettime_relative_is_monotonic = FunctionResolver.GetFunctionDelegate("avutil", "av_gettime_relative_is_monotonic", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_gettime_relative_is_monotonic(); + }; + + vectors.av_grow_packet = (AVPacket* @pkt, int @grow_by) => + { + vectors.av_grow_packet = FunctionResolver.GetFunctionDelegate("avcodec", "av_grow_packet", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_grow_packet(@pkt, @grow_by); + }; + + vectors.av_guess_codec = (AVOutputFormat* @fmt, string @short_name, string @filename, string @mime_type, AVMediaType @type) => + { + vectors.av_guess_codec = FunctionResolver.GetFunctionDelegate("avformat", "av_guess_codec", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_guess_codec(@fmt, @short_name, @filename, @mime_type, @type); + }; + + vectors.av_guess_format = (string @short_name, string @filename, string @mime_type) => + { + vectors.av_guess_format = FunctionResolver.GetFunctionDelegate("avformat", "av_guess_format", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_guess_format(@short_name, @filename, @mime_type); + }; + + vectors.av_guess_frame_rate = (AVFormatContext* @ctx, AVStream* @stream, AVFrame* @frame) => + { + vectors.av_guess_frame_rate = FunctionResolver.GetFunctionDelegate("avformat", "av_guess_frame_rate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_guess_frame_rate(@ctx, @stream, @frame); + }; + + vectors.av_guess_sample_aspect_ratio = (AVFormatContext* @format, AVStream* @stream, AVFrame* @frame) => + { + vectors.av_guess_sample_aspect_ratio = FunctionResolver.GetFunctionDelegate("avformat", "av_guess_sample_aspect_ratio", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_guess_sample_aspect_ratio(@format, @stream, @frame); + }; + + vectors.av_hex_dump = (_iobuf* @f, byte* @buf, int @size) => + { + vectors.av_hex_dump = FunctionResolver.GetFunctionDelegate("avformat", "av_hex_dump", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_hex_dump(@f, @buf, @size); + }; + + vectors.av_hex_dump_log = (void* @avcl, int @level, byte* @buf, int @size) => + { + vectors.av_hex_dump_log = FunctionResolver.GetFunctionDelegate("avformat", "av_hex_dump_log", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_hex_dump_log(@avcl, @level, @buf, @size); + }; + + vectors.av_hwdevice_ctx_alloc = (AVHWDeviceType @type) => + { + vectors.av_hwdevice_ctx_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_hwdevice_ctx_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwdevice_ctx_alloc(@type); + }; + + vectors.av_hwdevice_ctx_create = (AVBufferRef** @device_ctx, AVHWDeviceType @type, string @device, AVDictionary* @opts, int @flags) => + { + vectors.av_hwdevice_ctx_create = FunctionResolver.GetFunctionDelegate("avutil", "av_hwdevice_ctx_create", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwdevice_ctx_create(@device_ctx, @type, @device, @opts, @flags); + }; + + vectors.av_hwdevice_ctx_create_derived = (AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, int @flags) => + { + vectors.av_hwdevice_ctx_create_derived = FunctionResolver.GetFunctionDelegate("avutil", "av_hwdevice_ctx_create_derived", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwdevice_ctx_create_derived(@dst_ctx, @type, @src_ctx, @flags); + }; + + vectors.av_hwdevice_ctx_create_derived_opts = (AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, AVDictionary* @options, int @flags) => + { + vectors.av_hwdevice_ctx_create_derived_opts = FunctionResolver.GetFunctionDelegate("avutil", "av_hwdevice_ctx_create_derived_opts", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwdevice_ctx_create_derived_opts(@dst_ctx, @type, @src_ctx, @options, @flags); + }; + + vectors.av_hwdevice_ctx_init = (AVBufferRef* @ref) => + { + vectors.av_hwdevice_ctx_init = FunctionResolver.GetFunctionDelegate("avutil", "av_hwdevice_ctx_init", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwdevice_ctx_init(@ref); + }; + + vectors.av_hwdevice_find_type_by_name = (string @name) => + { + vectors.av_hwdevice_find_type_by_name = FunctionResolver.GetFunctionDelegate("avutil", "av_hwdevice_find_type_by_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwdevice_find_type_by_name(@name); + }; + + vectors.av_hwdevice_get_hwframe_constraints = (AVBufferRef* @ref, void* @hwconfig) => + { + vectors.av_hwdevice_get_hwframe_constraints = FunctionResolver.GetFunctionDelegate("avutil", "av_hwdevice_get_hwframe_constraints", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwdevice_get_hwframe_constraints(@ref, @hwconfig); + }; + + vectors.av_hwdevice_get_type_name = (AVHWDeviceType @type) => + { + vectors.av_hwdevice_get_type_name = FunctionResolver.GetFunctionDelegate("avutil", "av_hwdevice_get_type_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwdevice_get_type_name(@type); + }; + + vectors.av_hwdevice_hwconfig_alloc = (AVBufferRef* @device_ctx) => + { + vectors.av_hwdevice_hwconfig_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_hwdevice_hwconfig_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwdevice_hwconfig_alloc(@device_ctx); + }; + + vectors.av_hwdevice_iterate_types = (AVHWDeviceType @prev) => + { + vectors.av_hwdevice_iterate_types = FunctionResolver.GetFunctionDelegate("avutil", "av_hwdevice_iterate_types", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwdevice_iterate_types(@prev); + }; + + vectors.av_hwframe_constraints_free = (AVHWFramesConstraints** @constraints) => + { + vectors.av_hwframe_constraints_free = FunctionResolver.GetFunctionDelegate("avutil", "av_hwframe_constraints_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_hwframe_constraints_free(@constraints); + }; + + vectors.av_hwframe_ctx_alloc = (AVBufferRef* @device_ctx) => + { + vectors.av_hwframe_ctx_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_hwframe_ctx_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwframe_ctx_alloc(@device_ctx); + }; + + vectors.av_hwframe_ctx_create_derived = (AVBufferRef** @derived_frame_ctx, AVPixelFormat @format, AVBufferRef* @derived_device_ctx, AVBufferRef* @source_frame_ctx, int @flags) => + { + vectors.av_hwframe_ctx_create_derived = FunctionResolver.GetFunctionDelegate("avutil", "av_hwframe_ctx_create_derived", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwframe_ctx_create_derived(@derived_frame_ctx, @format, @derived_device_ctx, @source_frame_ctx, @flags); + }; + + vectors.av_hwframe_ctx_init = (AVBufferRef* @ref) => + { + vectors.av_hwframe_ctx_init = FunctionResolver.GetFunctionDelegate("avutil", "av_hwframe_ctx_init", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwframe_ctx_init(@ref); + }; + + vectors.av_hwframe_get_buffer = (AVBufferRef* @hwframe_ctx, AVFrame* @frame, int @flags) => + { + vectors.av_hwframe_get_buffer = FunctionResolver.GetFunctionDelegate("avutil", "av_hwframe_get_buffer", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwframe_get_buffer(@hwframe_ctx, @frame, @flags); + }; + + vectors.av_hwframe_map = (AVFrame* @dst, AVFrame* @src, int @flags) => + { + vectors.av_hwframe_map = FunctionResolver.GetFunctionDelegate("avutil", "av_hwframe_map", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwframe_map(@dst, @src, @flags); + }; + + vectors.av_hwframe_transfer_data = (AVFrame* @dst, AVFrame* @src, int @flags) => + { + vectors.av_hwframe_transfer_data = FunctionResolver.GetFunctionDelegate("avutil", "av_hwframe_transfer_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwframe_transfer_data(@dst, @src, @flags); + }; + + vectors.av_hwframe_transfer_get_formats = (AVBufferRef* @hwframe_ctx, AVHWFrameTransferDirection @dir, AVPixelFormat** @formats, int @flags) => + { + vectors.av_hwframe_transfer_get_formats = FunctionResolver.GetFunctionDelegate("avutil", "av_hwframe_transfer_get_formats", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwframe_transfer_get_formats(@hwframe_ctx, @dir, @formats, @flags); + }; + + vectors.av_image_alloc = (ref byte_ptr4 @pointers, ref int4 @linesizes, int @w, int @h, AVPixelFormat @pix_fmt, int @align) => + { + vectors.av_image_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_image_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_alloc(ref @pointers, ref @linesizes, @w, @h, @pix_fmt, @align); + }; + + vectors.av_image_check_sar = (uint @w, uint @h, AVRational @sar) => + { + vectors.av_image_check_sar = FunctionResolver.GetFunctionDelegate("avutil", "av_image_check_sar", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_check_sar(@w, @h, @sar); + }; + + vectors.av_image_check_size = (uint @w, uint @h, int @log_offset, void* @log_ctx) => + { + vectors.av_image_check_size = FunctionResolver.GetFunctionDelegate("avutil", "av_image_check_size", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_check_size(@w, @h, @log_offset, @log_ctx); + }; + + vectors.av_image_check_size2 = (uint @w, uint @h, long @max_pixels, AVPixelFormat @pix_fmt, int @log_offset, void* @log_ctx) => + { + vectors.av_image_check_size2 = FunctionResolver.GetFunctionDelegate("avutil", "av_image_check_size2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_check_size2(@w, @h, @max_pixels, @pix_fmt, @log_offset, @log_ctx); + }; + + vectors.av_image_copy = (ref byte_ptr4 @dst_data, ref int4 @dst_linesizes, in byte_ptr4 @src_data, in int4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height) => + { + vectors.av_image_copy = FunctionResolver.GetFunctionDelegate("avutil", "av_image_copy", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_image_copy(ref @dst_data, ref @dst_linesizes, @src_data, @src_linesizes, @pix_fmt, @width, @height); + }; + + vectors.av_image_copy_plane = (byte* @dst, int @dst_linesize, byte* @src, int @src_linesize, int @bytewidth, int @height) => + { + vectors.av_image_copy_plane = FunctionResolver.GetFunctionDelegate("avutil", "av_image_copy_plane", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_image_copy_plane(@dst, @dst_linesize, @src, @src_linesize, @bytewidth, @height); + }; + + vectors.av_image_copy_plane_uc_from = (byte* @dst, long @dst_linesize, byte* @src, long @src_linesize, long @bytewidth, int @height) => + { + vectors.av_image_copy_plane_uc_from = FunctionResolver.GetFunctionDelegate("avutil", "av_image_copy_plane_uc_from", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_image_copy_plane_uc_from(@dst, @dst_linesize, @src, @src_linesize, @bytewidth, @height); + }; + + vectors.av_image_copy_to_buffer = (byte* @dst, int @dst_size, in byte_ptr4 @src_data, in int4 @src_linesize, AVPixelFormat @pix_fmt, int @width, int @height, int @align) => + { + vectors.av_image_copy_to_buffer = FunctionResolver.GetFunctionDelegate("avutil", "av_image_copy_to_buffer", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_copy_to_buffer(@dst, @dst_size, @src_data, @src_linesize, @pix_fmt, @width, @height, @align); + }; + + vectors.av_image_copy_uc_from = (ref byte_ptr4 @dst_data, in long4 @dst_linesizes, in byte_ptr4 @src_data, in long4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height) => + { + vectors.av_image_copy_uc_from = FunctionResolver.GetFunctionDelegate("avutil", "av_image_copy_uc_from", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_image_copy_uc_from(ref @dst_data, @dst_linesizes, @src_data, @src_linesizes, @pix_fmt, @width, @height); + }; + + vectors.av_image_fill_arrays = (ref byte_ptr4 @dst_data, ref int4 @dst_linesize, byte* @src, AVPixelFormat @pix_fmt, int @width, int @height, int @align) => + { + vectors.av_image_fill_arrays = FunctionResolver.GetFunctionDelegate("avutil", "av_image_fill_arrays", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_fill_arrays(ref @dst_data, ref @dst_linesize, @src, @pix_fmt, @width, @height, @align); + }; + + vectors.av_image_fill_black = (ref byte_ptr4 @dst_data, in long4 @dst_linesize, AVPixelFormat @pix_fmt, AVColorRange @range, int @width, int @height) => + { + vectors.av_image_fill_black = FunctionResolver.GetFunctionDelegate("avutil", "av_image_fill_black", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_fill_black(ref @dst_data, @dst_linesize, @pix_fmt, @range, @width, @height); + }; + + vectors.av_image_fill_linesizes = (ref int4 @linesizes, AVPixelFormat @pix_fmt, int @width) => + { + vectors.av_image_fill_linesizes = FunctionResolver.GetFunctionDelegate("avutil", "av_image_fill_linesizes", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_fill_linesizes(ref @linesizes, @pix_fmt, @width); + }; + + vectors.av_image_fill_max_pixsteps = (ref int4 @max_pixsteps, ref int4 @max_pixstep_comps, AVPixFmtDescriptor* @pixdesc) => + { + vectors.av_image_fill_max_pixsteps = FunctionResolver.GetFunctionDelegate("avutil", "av_image_fill_max_pixsteps", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_image_fill_max_pixsteps(ref @max_pixsteps, ref @max_pixstep_comps, @pixdesc); + }; + + vectors.av_image_fill_plane_sizes = (ref ulong4 @size, AVPixelFormat @pix_fmt, int @height, in long4 @linesizes) => + { + vectors.av_image_fill_plane_sizes = FunctionResolver.GetFunctionDelegate("avutil", "av_image_fill_plane_sizes", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_fill_plane_sizes(ref @size, @pix_fmt, @height, @linesizes); + }; + + vectors.av_image_fill_pointers = (ref byte_ptr4 @data, AVPixelFormat @pix_fmt, int @height, byte* @ptr, in int4 @linesizes) => + { + vectors.av_image_fill_pointers = FunctionResolver.GetFunctionDelegate("avutil", "av_image_fill_pointers", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_fill_pointers(ref @data, @pix_fmt, @height, @ptr, @linesizes); + }; + + vectors.av_image_get_buffer_size = (AVPixelFormat @pix_fmt, int @width, int @height, int @align) => + { + vectors.av_image_get_buffer_size = FunctionResolver.GetFunctionDelegate("avutil", "av_image_get_buffer_size", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_get_buffer_size(@pix_fmt, @width, @height, @align); + }; + + vectors.av_image_get_linesize = (AVPixelFormat @pix_fmt, int @width, int @plane) => + { + vectors.av_image_get_linesize = FunctionResolver.GetFunctionDelegate("avutil", "av_image_get_linesize", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_get_linesize(@pix_fmt, @width, @plane); + }; + + vectors.av_index_search_timestamp = (AVStream* @st, long @timestamp, int @flags) => + { + vectors.av_index_search_timestamp = FunctionResolver.GetFunctionDelegate("avformat", "av_index_search_timestamp", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_index_search_timestamp(@st, @timestamp, @flags); + }; + + vectors.av_init_packet = (AVPacket* @pkt) => + { + vectors.av_init_packet = FunctionResolver.GetFunctionDelegate("avcodec", "av_init_packet", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_init_packet(@pkt); + }; + + vectors.av_input_audio_device_next = (AVInputFormat* @d) => + { + vectors.av_input_audio_device_next = FunctionResolver.GetFunctionDelegate("avdevice", "av_input_audio_device_next", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_input_audio_device_next(@d); + }; + + vectors.av_input_video_device_next = (AVInputFormat* @d) => + { + vectors.av_input_video_device_next = FunctionResolver.GetFunctionDelegate("avdevice", "av_input_video_device_next", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_input_video_device_next(@d); + }; + + vectors.av_int_list_length_for_size = (uint @elsize, void* @list, ulong @term) => + { + vectors.av_int_list_length_for_size = FunctionResolver.GetFunctionDelegate("avutil", "av_int_list_length_for_size", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_int_list_length_for_size(@elsize, @list, @term); + }; + + vectors.av_interleaved_write_frame = (AVFormatContext* @s, AVPacket* @pkt) => + { + vectors.av_interleaved_write_frame = FunctionResolver.GetFunctionDelegate("avformat", "av_interleaved_write_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_interleaved_write_frame(@s, @pkt); + }; + + vectors.av_interleaved_write_uncoded_frame = (AVFormatContext* @s, int @stream_index, AVFrame* @frame) => + { + vectors.av_interleaved_write_uncoded_frame = FunctionResolver.GetFunctionDelegate("avformat", "av_interleaved_write_uncoded_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_interleaved_write_uncoded_frame(@s, @stream_index, @frame); + }; + + vectors.av_log = (void* @avcl, int @level, string @fmt) => + { + vectors.av_log = FunctionResolver.GetFunctionDelegate("avutil", "av_log", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_log(@avcl, @level, @fmt); + }; + + vectors.av_log_default_callback = (void* @avcl, int @level, string @fmt, byte* @vl) => + { + vectors.av_log_default_callback = FunctionResolver.GetFunctionDelegate("avutil", "av_log_default_callback", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_log_default_callback(@avcl, @level, @fmt, @vl); + }; + + vectors.av_log_format_line = (void* @ptr, int @level, string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix) => + { + vectors.av_log_format_line = FunctionResolver.GetFunctionDelegate("avutil", "av_log_format_line", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_log_format_line(@ptr, @level, @fmt, @vl, @line, @line_size, @print_prefix); + }; + + vectors.av_log_format_line2 = (void* @ptr, int @level, string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix) => + { + vectors.av_log_format_line2 = FunctionResolver.GetFunctionDelegate("avutil", "av_log_format_line2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_log_format_line2(@ptr, @level, @fmt, @vl, @line, @line_size, @print_prefix); + }; + + vectors.av_log_get_flags = () => + { + vectors.av_log_get_flags = FunctionResolver.GetFunctionDelegate("avutil", "av_log_get_flags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_log_get_flags(); + }; + + vectors.av_log_get_level = () => + { + vectors.av_log_get_level = FunctionResolver.GetFunctionDelegate("avutil", "av_log_get_level", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_log_get_level(); + }; + + vectors.av_log_once = (void* @avcl, int @initial_level, int @subsequent_level, int* @state, string @fmt) => + { + vectors.av_log_once = FunctionResolver.GetFunctionDelegate("avutil", "av_log_once", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_log_once(@avcl, @initial_level, @subsequent_level, @state, @fmt); + }; + + vectors.av_log_set_callback = (av_log_set_callback_callback_func @callback) => + { + vectors.av_log_set_callback = FunctionResolver.GetFunctionDelegate("avutil", "av_log_set_callback", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_log_set_callback(@callback); + }; + + vectors.av_log_set_flags = (int @arg) => + { + vectors.av_log_set_flags = FunctionResolver.GetFunctionDelegate("avutil", "av_log_set_flags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_log_set_flags(@arg); + }; + + vectors.av_log_set_level = (int @level) => + { + vectors.av_log_set_level = FunctionResolver.GetFunctionDelegate("avutil", "av_log_set_level", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_log_set_level(@level); + }; + + vectors.av_log2 = (uint @v) => + { + vectors.av_log2 = FunctionResolver.GetFunctionDelegate("avutil", "av_log2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_log2(@v); + }; + + vectors.av_log2_16bit = (uint @v) => + { + vectors.av_log2_16bit = FunctionResolver.GetFunctionDelegate("avutil", "av_log2_16bit", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_log2_16bit(@v); + }; + + vectors.av_malloc = (ulong @size) => + { + vectors.av_malloc = FunctionResolver.GetFunctionDelegate("avutil", "av_malloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_malloc(@size); + }; + + vectors.av_malloc_array = (ulong @nmemb, ulong @size) => + { + vectors.av_malloc_array = FunctionResolver.GetFunctionDelegate("avutil", "av_malloc_array", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_malloc_array(@nmemb, @size); + }; + + vectors.av_mallocz = (ulong @size) => + { + vectors.av_mallocz = FunctionResolver.GetFunctionDelegate("avutil", "av_mallocz", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_mallocz(@size); + }; + + vectors.av_mallocz_array = (ulong @nmemb, ulong @size) => + { + vectors.av_mallocz_array = FunctionResolver.GetFunctionDelegate("avutil", "av_mallocz_array", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_mallocz_array(@nmemb, @size); + }; + + vectors.av_mastering_display_metadata_alloc = () => + { + vectors.av_mastering_display_metadata_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_mastering_display_metadata_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_mastering_display_metadata_alloc(); + }; + + vectors.av_mastering_display_metadata_create_side_data = (AVFrame* @frame) => + { + vectors.av_mastering_display_metadata_create_side_data = FunctionResolver.GetFunctionDelegate("avutil", "av_mastering_display_metadata_create_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_mastering_display_metadata_create_side_data(@frame); + }; + + vectors.av_match_ext = (string @filename, string @extensions) => + { + vectors.av_match_ext = FunctionResolver.GetFunctionDelegate("avformat", "av_match_ext", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_match_ext(@filename, @extensions); + }; + + vectors.av_max_alloc = (ulong @max) => + { + vectors.av_max_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_max_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_max_alloc(@max); + }; + + vectors.av_memcpy_backptr = (byte* @dst, int @back, int @cnt) => + { + vectors.av_memcpy_backptr = FunctionResolver.GetFunctionDelegate("avutil", "av_memcpy_backptr", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_memcpy_backptr(@dst, @back, @cnt); + }; + + vectors.av_memdup = (void* @p, ulong @size) => + { + vectors.av_memdup = FunctionResolver.GetFunctionDelegate("avutil", "av_memdup", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_memdup(@p, @size); + }; + + vectors.av_mul_q = (AVRational @b, AVRational @c) => + { + vectors.av_mul_q = FunctionResolver.GetFunctionDelegate("avutil", "av_mul_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_mul_q(@b, @c); + }; + + vectors.av_muxer_iterate = (void** @opaque) => + { + vectors.av_muxer_iterate = FunctionResolver.GetFunctionDelegate("avformat", "av_muxer_iterate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_muxer_iterate(@opaque); + }; + + vectors.av_nearer_q = (AVRational @q, AVRational @q1, AVRational @q2) => + { + vectors.av_nearer_q = FunctionResolver.GetFunctionDelegate("avutil", "av_nearer_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_nearer_q(@q, @q1, @q2); + }; + + vectors.av_new_packet = (AVPacket* @pkt, int @size) => + { + vectors.av_new_packet = FunctionResolver.GetFunctionDelegate("avcodec", "av_new_packet", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_new_packet(@pkt, @size); + }; + + vectors.av_new_program = (AVFormatContext* @s, int @id) => + { + vectors.av_new_program = FunctionResolver.GetFunctionDelegate("avformat", "av_new_program", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_new_program(@s, @id); + }; + + vectors.av_opt_child_class_iterate = (AVClass* @parent, void** @iter) => + { + vectors.av_opt_child_class_iterate = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_child_class_iterate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_child_class_iterate(@parent, @iter); + }; + + vectors.av_opt_child_next = (void* @obj, void* @prev) => + { + vectors.av_opt_child_next = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_child_next", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_child_next(@obj, @prev); + }; + + vectors.av_opt_copy = (void* @dest, void* @src) => + { + vectors.av_opt_copy = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_copy", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_copy(@dest, @src); + }; + + vectors.av_opt_eval_double = (void* @obj, AVOption* @o, string @val, double* @double_out) => + { + vectors.av_opt_eval_double = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_eval_double", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_eval_double(@obj, @o, @val, @double_out); + }; + + vectors.av_opt_eval_flags = (void* @obj, AVOption* @o, string @val, int* @flags_out) => + { + vectors.av_opt_eval_flags = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_eval_flags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_eval_flags(@obj, @o, @val, @flags_out); + }; + + vectors.av_opt_eval_float = (void* @obj, AVOption* @o, string @val, float* @float_out) => + { + vectors.av_opt_eval_float = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_eval_float", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_eval_float(@obj, @o, @val, @float_out); + }; + + vectors.av_opt_eval_int = (void* @obj, AVOption* @o, string @val, int* @int_out) => + { + vectors.av_opt_eval_int = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_eval_int", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_eval_int(@obj, @o, @val, @int_out); + }; + + vectors.av_opt_eval_int64 = (void* @obj, AVOption* @o, string @val, long* @int64_out) => + { + vectors.av_opt_eval_int64 = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_eval_int64", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_eval_int64(@obj, @o, @val, @int64_out); + }; + + vectors.av_opt_eval_q = (void* @obj, AVOption* @o, string @val, AVRational* @q_out) => + { + vectors.av_opt_eval_q = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_eval_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_eval_q(@obj, @o, @val, @q_out); + }; + + vectors.av_opt_find = (void* @obj, string @name, string @unit, int @opt_flags, int @search_flags) => + { + vectors.av_opt_find = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_find", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_find(@obj, @name, @unit, @opt_flags, @search_flags); + }; + + vectors.av_opt_find2 = (void* @obj, string @name, string @unit, int @opt_flags, int @search_flags, void** @target_obj) => + { + vectors.av_opt_find2 = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_find2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_find2(@obj, @name, @unit, @opt_flags, @search_flags, @target_obj); + }; + + vectors.av_opt_flag_is_set = (void* @obj, string @field_name, string @flag_name) => + { + vectors.av_opt_flag_is_set = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_flag_is_set", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_flag_is_set(@obj, @field_name, @flag_name); + }; + + vectors.av_opt_free = (void* @obj) => + { + vectors.av_opt_free = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_opt_free(@obj); + }; + + vectors.av_opt_freep_ranges = (AVOptionRanges** @ranges) => + { + vectors.av_opt_freep_ranges = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_freep_ranges", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_opt_freep_ranges(@ranges); + }; + + vectors.av_opt_get = (void* @obj, string @name, int @search_flags, byte** @out_val) => + { + vectors.av_opt_get = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get(@obj, @name, @search_flags, @out_val); + }; + + vectors.av_opt_get_channel_layout = (void* @obj, string @name, int @search_flags, long* @ch_layout) => + { + vectors.av_opt_get_channel_layout = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_channel_layout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_channel_layout(@obj, @name, @search_flags, @ch_layout); + }; + + vectors.av_opt_get_chlayout = (void* @obj, string @name, int @search_flags, AVChannelLayout* @layout) => + { + vectors.av_opt_get_chlayout = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_chlayout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_chlayout(@obj, @name, @search_flags, @layout); + }; + + vectors.av_opt_get_dict_val = (void* @obj, string @name, int @search_flags, AVDictionary** @out_val) => + { + vectors.av_opt_get_dict_val = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_dict_val", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_dict_val(@obj, @name, @search_flags, @out_val); + }; + + vectors.av_opt_get_double = (void* @obj, string @name, int @search_flags, double* @out_val) => + { + vectors.av_opt_get_double = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_double", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_double(@obj, @name, @search_flags, @out_val); + }; + + vectors.av_opt_get_image_size = (void* @obj, string @name, int @search_flags, int* @w_out, int* @h_out) => + { + vectors.av_opt_get_image_size = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_image_size", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_image_size(@obj, @name, @search_flags, @w_out, @h_out); + }; + + vectors.av_opt_get_int = (void* @obj, string @name, int @search_flags, long* @out_val) => + { + vectors.av_opt_get_int = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_int", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_int(@obj, @name, @search_flags, @out_val); + }; + + vectors.av_opt_get_key_value = (byte** @ropts, string @key_val_sep, string @pairs_sep, uint @flags, byte** @rkey, byte** @rval) => + { + vectors.av_opt_get_key_value = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_key_value", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_key_value(@ropts, @key_val_sep, @pairs_sep, @flags, @rkey, @rval); + }; + + vectors.av_opt_get_pixel_fmt = (void* @obj, string @name, int @search_flags, AVPixelFormat* @out_fmt) => + { + vectors.av_opt_get_pixel_fmt = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_pixel_fmt", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_pixel_fmt(@obj, @name, @search_flags, @out_fmt); + }; + + vectors.av_opt_get_q = (void* @obj, string @name, int @search_flags, AVRational* @out_val) => + { + vectors.av_opt_get_q = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_q(@obj, @name, @search_flags, @out_val); + }; + + vectors.av_opt_get_sample_fmt = (void* @obj, string @name, int @search_flags, AVSampleFormat* @out_fmt) => + { + vectors.av_opt_get_sample_fmt = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_sample_fmt", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_sample_fmt(@obj, @name, @search_flags, @out_fmt); + }; + + vectors.av_opt_get_video_rate = (void* @obj, string @name, int @search_flags, AVRational* @out_val) => + { + vectors.av_opt_get_video_rate = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_video_rate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_video_rate(@obj, @name, @search_flags, @out_val); + }; + + vectors.av_opt_is_set_to_default = (void* @obj, AVOption* @o) => + { + vectors.av_opt_is_set_to_default = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_is_set_to_default", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_is_set_to_default(@obj, @o); + }; + + vectors.av_opt_is_set_to_default_by_name = (void* @obj, string @name, int @search_flags) => + { + vectors.av_opt_is_set_to_default_by_name = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_is_set_to_default_by_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_is_set_to_default_by_name(@obj, @name, @search_flags); + }; + + vectors.av_opt_next = (void* @obj, AVOption* @prev) => + { + vectors.av_opt_next = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_next", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_next(@obj, @prev); + }; + + vectors.av_opt_ptr = (AVClass* @avclass, void* @obj, string @name) => + { + vectors.av_opt_ptr = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_ptr", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_ptr(@avclass, @obj, @name); + }; + + vectors.av_opt_query_ranges = (AVOptionRanges** @p0, void* @obj, string @key, int @flags) => + { + vectors.av_opt_query_ranges = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_query_ranges", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_query_ranges(@p0, @obj, @key, @flags); + }; + + vectors.av_opt_query_ranges_default = (AVOptionRanges** @p0, void* @obj, string @key, int @flags) => + { + vectors.av_opt_query_ranges_default = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_query_ranges_default", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_query_ranges_default(@p0, @obj, @key, @flags); + }; + + vectors.av_opt_serialize = (void* @obj, int @opt_flags, int @flags, byte** @buffer, byte @key_val_sep, byte @pairs_sep) => + { + vectors.av_opt_serialize = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_serialize", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_serialize(@obj, @opt_flags, @flags, @buffer, @key_val_sep, @pairs_sep); + }; + + vectors.av_opt_set = (void* @obj, string @name, string @val, int @search_flags) => + { + vectors.av_opt_set = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set(@obj, @name, @val, @search_flags); + }; + + vectors.av_opt_set_bin = (void* @obj, string @name, byte* @val, int @size, int @search_flags) => + { + vectors.av_opt_set_bin = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_bin", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_bin(@obj, @name, @val, @size, @search_flags); + }; + + vectors.av_opt_set_channel_layout = (void* @obj, string @name, long @ch_layout, int @search_flags) => + { + vectors.av_opt_set_channel_layout = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_channel_layout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_channel_layout(@obj, @name, @ch_layout, @search_flags); + }; + + vectors.av_opt_set_chlayout = (void* @obj, string @name, AVChannelLayout* @layout, int @search_flags) => + { + vectors.av_opt_set_chlayout = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_chlayout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_chlayout(@obj, @name, @layout, @search_flags); + }; + + vectors.av_opt_set_defaults = (void* @s) => + { + vectors.av_opt_set_defaults = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_defaults", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_opt_set_defaults(@s); + }; + + vectors.av_opt_set_defaults2 = (void* @s, int @mask, int @flags) => + { + vectors.av_opt_set_defaults2 = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_defaults2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_opt_set_defaults2(@s, @mask, @flags); + }; + + vectors.av_opt_set_dict = (void* @obj, AVDictionary** @options) => + { + vectors.av_opt_set_dict = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_dict", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_dict(@obj, @options); + }; + + vectors.av_opt_set_dict_val = (void* @obj, string @name, AVDictionary* @val, int @search_flags) => + { + vectors.av_opt_set_dict_val = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_dict_val", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_dict_val(@obj, @name, @val, @search_flags); + }; + + vectors.av_opt_set_dict2 = (void* @obj, AVDictionary** @options, int @search_flags) => + { + vectors.av_opt_set_dict2 = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_dict2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_dict2(@obj, @options, @search_flags); + }; + + vectors.av_opt_set_double = (void* @obj, string @name, double @val, int @search_flags) => + { + vectors.av_opt_set_double = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_double", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_double(@obj, @name, @val, @search_flags); + }; + + vectors.av_opt_set_from_string = (void* @ctx, string @opts, byte** @shorthand, string @key_val_sep, string @pairs_sep) => + { + vectors.av_opt_set_from_string = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_from_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_from_string(@ctx, @opts, @shorthand, @key_val_sep, @pairs_sep); + }; + + vectors.av_opt_set_image_size = (void* @obj, string @name, int @w, int @h, int @search_flags) => + { + vectors.av_opt_set_image_size = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_image_size", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_image_size(@obj, @name, @w, @h, @search_flags); + }; + + vectors.av_opt_set_int = (void* @obj, string @name, long @val, int @search_flags) => + { + vectors.av_opt_set_int = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_int", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_int(@obj, @name, @val, @search_flags); + }; + + vectors.av_opt_set_pixel_fmt = (void* @obj, string @name, AVPixelFormat @fmt, int @search_flags) => + { + vectors.av_opt_set_pixel_fmt = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_pixel_fmt", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_pixel_fmt(@obj, @name, @fmt, @search_flags); + }; + + vectors.av_opt_set_q = (void* @obj, string @name, AVRational @val, int @search_flags) => + { + vectors.av_opt_set_q = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_q(@obj, @name, @val, @search_flags); + }; + + vectors.av_opt_set_sample_fmt = (void* @obj, string @name, AVSampleFormat @fmt, int @search_flags) => + { + vectors.av_opt_set_sample_fmt = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_sample_fmt", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_sample_fmt(@obj, @name, @fmt, @search_flags); + }; + + vectors.av_opt_set_video_rate = (void* @obj, string @name, AVRational @val, int @search_flags) => + { + vectors.av_opt_set_video_rate = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_video_rate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_video_rate(@obj, @name, @val, @search_flags); + }; + + vectors.av_opt_show2 = (void* @obj, void* @av_log_obj, int @req_flags, int @rej_flags) => + { + vectors.av_opt_show2 = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_show2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_show2(@obj, @av_log_obj, @req_flags, @rej_flags); + }; + + vectors.av_output_audio_device_next = (AVOutputFormat* @d) => + { + vectors.av_output_audio_device_next = FunctionResolver.GetFunctionDelegate("avdevice", "av_output_audio_device_next", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_output_audio_device_next(@d); + }; + + vectors.av_output_video_device_next = (AVOutputFormat* @d) => + { + vectors.av_output_video_device_next = FunctionResolver.GetFunctionDelegate("avdevice", "av_output_video_device_next", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_output_video_device_next(@d); + }; + + vectors.av_packet_add_side_data = (AVPacket* @pkt, AVPacketSideDataType @type, byte* @data, ulong @size) => + { + vectors.av_packet_add_side_data = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_add_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_add_side_data(@pkt, @type, @data, @size); + }; + + vectors.av_packet_alloc = () => + { + vectors.av_packet_alloc = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_alloc(); + }; + + vectors.av_packet_clone = (AVPacket* @src) => + { + vectors.av_packet_clone = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_clone", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_clone(@src); + }; + + vectors.av_packet_copy_props = (AVPacket* @dst, AVPacket* @src) => + { + vectors.av_packet_copy_props = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_copy_props", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_copy_props(@dst, @src); + }; + + vectors.av_packet_free = (AVPacket** @pkt) => + { + vectors.av_packet_free = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_packet_free(@pkt); + }; + + vectors.av_packet_free_side_data = (AVPacket* @pkt) => + { + vectors.av_packet_free_side_data = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_free_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_packet_free_side_data(@pkt); + }; + + vectors.av_packet_from_data = (AVPacket* @pkt, byte* @data, int @size) => + { + vectors.av_packet_from_data = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_from_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_from_data(@pkt, @data, @size); + }; + + vectors.av_packet_get_side_data = (AVPacket* @pkt, AVPacketSideDataType @type, ulong* @size) => + { + vectors.av_packet_get_side_data = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_get_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_get_side_data(@pkt, @type, @size); + }; + + vectors.av_packet_make_refcounted = (AVPacket* @pkt) => + { + vectors.av_packet_make_refcounted = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_make_refcounted", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_make_refcounted(@pkt); + }; + + vectors.av_packet_make_writable = (AVPacket* @pkt) => + { + vectors.av_packet_make_writable = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_make_writable", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_make_writable(@pkt); + }; + + vectors.av_packet_move_ref = (AVPacket* @dst, AVPacket* @src) => + { + vectors.av_packet_move_ref = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_move_ref", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_packet_move_ref(@dst, @src); + }; + + vectors.av_packet_new_side_data = (AVPacket* @pkt, AVPacketSideDataType @type, ulong @size) => + { + vectors.av_packet_new_side_data = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_new_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_new_side_data(@pkt, @type, @size); + }; + + vectors.av_packet_pack_dictionary = (AVDictionary* @dict, ulong* @size) => + { + vectors.av_packet_pack_dictionary = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_pack_dictionary", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_pack_dictionary(@dict, @size); + }; + + vectors.av_packet_ref = (AVPacket* @dst, AVPacket* @src) => + { + vectors.av_packet_ref = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_ref", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_ref(@dst, @src); + }; + + vectors.av_packet_rescale_ts = (AVPacket* @pkt, AVRational @tb_src, AVRational @tb_dst) => + { + vectors.av_packet_rescale_ts = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_rescale_ts", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_packet_rescale_ts(@pkt, @tb_src, @tb_dst); + }; + + vectors.av_packet_shrink_side_data = (AVPacket* @pkt, AVPacketSideDataType @type, ulong @size) => + { + vectors.av_packet_shrink_side_data = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_shrink_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_shrink_side_data(@pkt, @type, @size); + }; + + vectors.av_packet_side_data_name = (AVPacketSideDataType @type) => + { + vectors.av_packet_side_data_name = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_side_data_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_side_data_name(@type); + }; + + vectors.av_packet_unpack_dictionary = (byte* @data, ulong @size, AVDictionary** @dict) => + { + vectors.av_packet_unpack_dictionary = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_unpack_dictionary", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_unpack_dictionary(@data, @size, @dict); + }; + + vectors.av_packet_unref = (AVPacket* @pkt) => + { + vectors.av_packet_unref = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_unref", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_packet_unref(@pkt); + }; + + vectors.av_parse_cpu_caps = (uint* @flags, string @s) => + { + vectors.av_parse_cpu_caps = FunctionResolver.GetFunctionDelegate("avutil", "av_parse_cpu_caps", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_parse_cpu_caps(@flags, @s); + }; + + vectors.av_parser_close = (AVCodecParserContext* @s) => + { + vectors.av_parser_close = FunctionResolver.GetFunctionDelegate("avcodec", "av_parser_close", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_parser_close(@s); + }; + + vectors.av_parser_init = (int @codec_id) => + { + vectors.av_parser_init = FunctionResolver.GetFunctionDelegate("avcodec", "av_parser_init", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_parser_init(@codec_id); + }; + + vectors.av_parser_iterate = (void** @opaque) => + { + vectors.av_parser_iterate = FunctionResolver.GetFunctionDelegate("avcodec", "av_parser_iterate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_parser_iterate(@opaque); + }; + + vectors.av_parser_parse2 = (AVCodecParserContext* @s, AVCodecContext* @avctx, byte** @poutbuf, int* @poutbuf_size, byte* @buf, int @buf_size, long @pts, long @dts, long @pos) => + { + vectors.av_parser_parse2 = FunctionResolver.GetFunctionDelegate("avcodec", "av_parser_parse2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_parser_parse2(@s, @avctx, @poutbuf, @poutbuf_size, @buf, @buf_size, @pts, @dts, @pos); + }; + + vectors.av_pix_fmt_count_planes = (AVPixelFormat @pix_fmt) => + { + vectors.av_pix_fmt_count_planes = FunctionResolver.GetFunctionDelegate("avutil", "av_pix_fmt_count_planes", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_pix_fmt_count_planes(@pix_fmt); + }; + + vectors.av_pix_fmt_desc_get = (AVPixelFormat @pix_fmt) => + { + vectors.av_pix_fmt_desc_get = FunctionResolver.GetFunctionDelegate("avutil", "av_pix_fmt_desc_get", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_pix_fmt_desc_get(@pix_fmt); + }; + + vectors.av_pix_fmt_desc_get_id = (AVPixFmtDescriptor* @desc) => + { + vectors.av_pix_fmt_desc_get_id = FunctionResolver.GetFunctionDelegate("avutil", "av_pix_fmt_desc_get_id", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_pix_fmt_desc_get_id(@desc); + }; + + vectors.av_pix_fmt_desc_next = (AVPixFmtDescriptor* @prev) => + { + vectors.av_pix_fmt_desc_next = FunctionResolver.GetFunctionDelegate("avutil", "av_pix_fmt_desc_next", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_pix_fmt_desc_next(@prev); + }; + + vectors.av_pix_fmt_get_chroma_sub_sample = (AVPixelFormat @pix_fmt, int* @h_shift, int* @v_shift) => + { + vectors.av_pix_fmt_get_chroma_sub_sample = FunctionResolver.GetFunctionDelegate("avutil", "av_pix_fmt_get_chroma_sub_sample", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_pix_fmt_get_chroma_sub_sample(@pix_fmt, @h_shift, @v_shift); + }; + + vectors.av_pix_fmt_swap_endianness = (AVPixelFormat @pix_fmt) => + { + vectors.av_pix_fmt_swap_endianness = FunctionResolver.GetFunctionDelegate("avutil", "av_pix_fmt_swap_endianness", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_pix_fmt_swap_endianness(@pix_fmt); + }; + + vectors.av_pkt_dump_log2 = (void* @avcl, int @level, AVPacket* @pkt, int @dump_payload, AVStream* @st) => + { + vectors.av_pkt_dump_log2 = FunctionResolver.GetFunctionDelegate("avformat", "av_pkt_dump_log2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_pkt_dump_log2(@avcl, @level, @pkt, @dump_payload, @st); + }; + + vectors.av_pkt_dump2 = (_iobuf* @f, AVPacket* @pkt, int @dump_payload, AVStream* @st) => + { + vectors.av_pkt_dump2 = FunctionResolver.GetFunctionDelegate("avformat", "av_pkt_dump2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_pkt_dump2(@f, @pkt, @dump_payload, @st); + }; + + vectors.av_probe_input_buffer = (AVIOContext* @pb, AVInputFormat** @fmt, string @url, void* @logctx, uint @offset, uint @max_probe_size) => + { + vectors.av_probe_input_buffer = FunctionResolver.GetFunctionDelegate("avformat", "av_probe_input_buffer", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_probe_input_buffer(@pb, @fmt, @url, @logctx, @offset, @max_probe_size); + }; + + vectors.av_probe_input_buffer2 = (AVIOContext* @pb, AVInputFormat** @fmt, string @url, void* @logctx, uint @offset, uint @max_probe_size) => + { + vectors.av_probe_input_buffer2 = FunctionResolver.GetFunctionDelegate("avformat", "av_probe_input_buffer2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_probe_input_buffer2(@pb, @fmt, @url, @logctx, @offset, @max_probe_size); + }; + + vectors.av_probe_input_format = (AVProbeData* @pd, int @is_opened) => + { + vectors.av_probe_input_format = FunctionResolver.GetFunctionDelegate("avformat", "av_probe_input_format", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_probe_input_format(@pd, @is_opened); + }; + + vectors.av_probe_input_format2 = (AVProbeData* @pd, int @is_opened, int* @score_max) => + { + vectors.av_probe_input_format2 = FunctionResolver.GetFunctionDelegate("avformat", "av_probe_input_format2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_probe_input_format2(@pd, @is_opened, @score_max); + }; + + vectors.av_probe_input_format3 = (AVProbeData* @pd, int @is_opened, int* @score_ret) => + { + vectors.av_probe_input_format3 = FunctionResolver.GetFunctionDelegate("avformat", "av_probe_input_format3", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_probe_input_format3(@pd, @is_opened, @score_ret); + }; + + vectors.av_program_add_stream_index = (AVFormatContext* @ac, int @progid, uint @idx) => + { + vectors.av_program_add_stream_index = FunctionResolver.GetFunctionDelegate("avformat", "av_program_add_stream_index", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_program_add_stream_index(@ac, @progid, @idx); + }; + + vectors.av_q2intfloat = (AVRational @q) => + { + vectors.av_q2intfloat = FunctionResolver.GetFunctionDelegate("avutil", "av_q2intfloat", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_q2intfloat(@q); + }; + + vectors.av_read_frame = (AVFormatContext* @s, AVPacket* @pkt) => + { + vectors.av_read_frame = FunctionResolver.GetFunctionDelegate("avformat", "av_read_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_read_frame(@s, @pkt); + }; + + vectors.av_read_image_line = (ushort* @dst, in byte_ptr4 @data, in int4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component) => + { + vectors.av_read_image_line = FunctionResolver.GetFunctionDelegate("avutil", "av_read_image_line", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_read_image_line(@dst, @data, @linesize, @desc, @x, @y, @c, @w, @read_pal_component); + }; + + vectors.av_read_image_line2 = (void* @dst, in byte_ptr4 @data, in int4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component, int @dst_element_size) => + { + vectors.av_read_image_line2 = FunctionResolver.GetFunctionDelegate("avutil", "av_read_image_line2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_read_image_line2(@dst, @data, @linesize, @desc, @x, @y, @c, @w, @read_pal_component, @dst_element_size); + }; + + vectors.av_read_pause = (AVFormatContext* @s) => + { + vectors.av_read_pause = FunctionResolver.GetFunctionDelegate("avformat", "av_read_pause", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_read_pause(@s); + }; + + vectors.av_read_play = (AVFormatContext* @s) => + { + vectors.av_read_play = FunctionResolver.GetFunctionDelegate("avformat", "av_read_play", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_read_play(@s); + }; + + vectors.av_realloc = (void* @ptr, ulong @size) => + { + vectors.av_realloc = FunctionResolver.GetFunctionDelegate("avutil", "av_realloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_realloc(@ptr, @size); + }; + + vectors.av_realloc_array = (void* @ptr, ulong @nmemb, ulong @size) => + { + vectors.av_realloc_array = FunctionResolver.GetFunctionDelegate("avutil", "av_realloc_array", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_realloc_array(@ptr, @nmemb, @size); + }; + + vectors.av_realloc_f = (void* @ptr, ulong @nelem, ulong @elsize) => + { + vectors.av_realloc_f = FunctionResolver.GetFunctionDelegate("avutil", "av_realloc_f", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_realloc_f(@ptr, @nelem, @elsize); + }; + + vectors.av_reallocp = (void* @ptr, ulong @size) => + { + vectors.av_reallocp = FunctionResolver.GetFunctionDelegate("avutil", "av_reallocp", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_reallocp(@ptr, @size); + }; + + vectors.av_reallocp_array = (void* @ptr, ulong @nmemb, ulong @size) => + { + vectors.av_reallocp_array = FunctionResolver.GetFunctionDelegate("avutil", "av_reallocp_array", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_reallocp_array(@ptr, @nmemb, @size); + }; + + vectors.av_reduce = (int* @dst_num, int* @dst_den, long @num, long @den, long @max) => + { + vectors.av_reduce = FunctionResolver.GetFunctionDelegate("avutil", "av_reduce", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_reduce(@dst_num, @dst_den, @num, @den, @max); + }; + + vectors.av_rescale = (long @a, long @b, long @c) => + { + vectors.av_rescale = FunctionResolver.GetFunctionDelegate("avutil", "av_rescale", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_rescale(@a, @b, @c); + }; + + vectors.av_rescale_delta = (AVRational @in_tb, long @in_ts, AVRational @fs_tb, int @duration, long* @last, AVRational @out_tb) => + { + vectors.av_rescale_delta = FunctionResolver.GetFunctionDelegate("avutil", "av_rescale_delta", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_rescale_delta(@in_tb, @in_ts, @fs_tb, @duration, @last, @out_tb); + }; + + vectors.av_rescale_q = (long @a, AVRational @bq, AVRational @cq) => + { + vectors.av_rescale_q = FunctionResolver.GetFunctionDelegate("avutil", "av_rescale_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_rescale_q(@a, @bq, @cq); + }; + + vectors.av_rescale_q_rnd = (long @a, AVRational @bq, AVRational @cq, AVRounding @rnd) => + { + vectors.av_rescale_q_rnd = FunctionResolver.GetFunctionDelegate("avutil", "av_rescale_q_rnd", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_rescale_q_rnd(@a, @bq, @cq, @rnd); + }; + + vectors.av_rescale_rnd = (long @a, long @b, long @c, AVRounding @rnd) => + { + vectors.av_rescale_rnd = FunctionResolver.GetFunctionDelegate("avutil", "av_rescale_rnd", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_rescale_rnd(@a, @b, @c, @rnd); + }; + + vectors.av_sample_fmt_is_planar = (AVSampleFormat @sample_fmt) => + { + vectors.av_sample_fmt_is_planar = FunctionResolver.GetFunctionDelegate("avutil", "av_sample_fmt_is_planar", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_sample_fmt_is_planar(@sample_fmt); + }; + + vectors.av_samples_alloc = (byte** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) => + { + vectors.av_samples_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_samples_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_samples_alloc(@audio_data, @linesize, @nb_channels, @nb_samples, @sample_fmt, @align); + }; + + vectors.av_samples_alloc_array_and_samples = (byte*** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) => + { + vectors.av_samples_alloc_array_and_samples = FunctionResolver.GetFunctionDelegate("avutil", "av_samples_alloc_array_and_samples", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_samples_alloc_array_and_samples(@audio_data, @linesize, @nb_channels, @nb_samples, @sample_fmt, @align); + }; + + vectors.av_samples_copy = (byte** @dst, byte** @src, int @dst_offset, int @src_offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt) => + { + vectors.av_samples_copy = FunctionResolver.GetFunctionDelegate("avutil", "av_samples_copy", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_samples_copy(@dst, @src, @dst_offset, @src_offset, @nb_samples, @nb_channels, @sample_fmt); + }; + + vectors.av_samples_fill_arrays = (byte** @audio_data, int* @linesize, byte* @buf, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) => + { + vectors.av_samples_fill_arrays = FunctionResolver.GetFunctionDelegate("avutil", "av_samples_fill_arrays", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_samples_fill_arrays(@audio_data, @linesize, @buf, @nb_channels, @nb_samples, @sample_fmt, @align); + }; + + vectors.av_samples_get_buffer_size = (int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) => + { + vectors.av_samples_get_buffer_size = FunctionResolver.GetFunctionDelegate("avutil", "av_samples_get_buffer_size", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_samples_get_buffer_size(@linesize, @nb_channels, @nb_samples, @sample_fmt, @align); + }; + + vectors.av_samples_set_silence = (byte** @audio_data, int @offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt) => + { + vectors.av_samples_set_silence = FunctionResolver.GetFunctionDelegate("avutil", "av_samples_set_silence", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_samples_set_silence(@audio_data, @offset, @nb_samples, @nb_channels, @sample_fmt); + }; + + vectors.av_sdp_create = (AVFormatContext** @ac, int @n_files, byte* @buf, int @size) => + { + vectors.av_sdp_create = FunctionResolver.GetFunctionDelegate("avformat", "av_sdp_create", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_sdp_create(@ac, @n_files, @buf, @size); + }; + + vectors.av_seek_frame = (AVFormatContext* @s, int @stream_index, long @timestamp, int @flags) => + { + vectors.av_seek_frame = FunctionResolver.GetFunctionDelegate("avformat", "av_seek_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_seek_frame(@s, @stream_index, @timestamp, @flags); + }; + + vectors.av_set_options_string = (void* @ctx, string @opts, string @key_val_sep, string @pairs_sep) => + { + vectors.av_set_options_string = FunctionResolver.GetFunctionDelegate("avutil", "av_set_options_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_set_options_string(@ctx, @opts, @key_val_sep, @pairs_sep); + }; + + vectors.av_shrink_packet = (AVPacket* @pkt, int @size) => + { + vectors.av_shrink_packet = FunctionResolver.GetFunctionDelegate("avcodec", "av_shrink_packet", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_shrink_packet(@pkt, @size); + }; + + vectors.av_size_mult = (ulong @a, ulong @b, ulong* @r) => + { + vectors.av_size_mult = FunctionResolver.GetFunctionDelegate("avutil", "av_size_mult", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_size_mult(@a, @b, @r); + }; + + vectors.av_strdup = (string @s) => + { + vectors.av_strdup = FunctionResolver.GetFunctionDelegate("avutil", "av_strdup", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_strdup(@s); + }; + + vectors.av_stream_add_side_data = (AVStream* @st, AVPacketSideDataType @type, byte* @data, ulong @size) => + { + vectors.av_stream_add_side_data = FunctionResolver.GetFunctionDelegate("avformat", "av_stream_add_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_stream_add_side_data(@st, @type, @data, @size); + }; + + vectors.av_stream_get_class = () => + { + vectors.av_stream_get_class = FunctionResolver.GetFunctionDelegate("avformat", "av_stream_get_class", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_stream_get_class(); + }; + + vectors.av_stream_get_codec_timebase = (AVStream* @st) => + { + vectors.av_stream_get_codec_timebase = FunctionResolver.GetFunctionDelegate("avformat", "av_stream_get_codec_timebase", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_stream_get_codec_timebase(@st); + }; + + vectors.av_stream_get_end_pts = (AVStream* @st) => + { + vectors.av_stream_get_end_pts = FunctionResolver.GetFunctionDelegate("avformat", "av_stream_get_end_pts", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_stream_get_end_pts(@st); + }; + + vectors.av_stream_get_parser = (AVStream* @s) => + { + vectors.av_stream_get_parser = FunctionResolver.GetFunctionDelegate("avformat", "av_stream_get_parser", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_stream_get_parser(@s); + }; + + vectors.av_stream_get_side_data = (AVStream* @stream, AVPacketSideDataType @type, ulong* @size) => + { + vectors.av_stream_get_side_data = FunctionResolver.GetFunctionDelegate("avformat", "av_stream_get_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_stream_get_side_data(@stream, @type, @size); + }; + + vectors.av_stream_new_side_data = (AVStream* @stream, AVPacketSideDataType @type, ulong @size) => + { + vectors.av_stream_new_side_data = FunctionResolver.GetFunctionDelegate("avformat", "av_stream_new_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_stream_new_side_data(@stream, @type, @size); + }; + + vectors.av_strerror = (int @errnum, byte* @errbuf, ulong @errbuf_size) => + { + vectors.av_strerror = FunctionResolver.GetFunctionDelegate("avutil", "av_strerror", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_strerror(@errnum, @errbuf, @errbuf_size); + }; + + vectors.av_strndup = (string @s, ulong @len) => + { + vectors.av_strndup = FunctionResolver.GetFunctionDelegate("avutil", "av_strndup", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_strndup(@s, @len); + }; + + vectors.av_sub_q = (AVRational @b, AVRational @c) => + { + vectors.av_sub_q = FunctionResolver.GetFunctionDelegate("avutil", "av_sub_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_sub_q(@b, @c); + }; + + vectors.av_tempfile = (string @prefix, byte** @filename, int @log_offset, void* @log_ctx) => + { + vectors.av_tempfile = FunctionResolver.GetFunctionDelegate("avutil", "av_tempfile", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_tempfile(@prefix, @filename, @log_offset, @log_ctx); + }; + + vectors.av_timecode_adjust_ntsc_framenum2 = (int @framenum, int @fps) => + { + vectors.av_timecode_adjust_ntsc_framenum2 = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_adjust_ntsc_framenum2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_adjust_ntsc_framenum2(@framenum, @fps); + }; + + vectors.av_timecode_check_frame_rate = (AVRational @rate) => + { + vectors.av_timecode_check_frame_rate = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_check_frame_rate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_check_frame_rate(@rate); + }; + + vectors.av_timecode_get_smpte = (AVRational @rate, int @drop, int @hh, int @mm, int @ss, int @ff) => + { + vectors.av_timecode_get_smpte = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_get_smpte", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_get_smpte(@rate, @drop, @hh, @mm, @ss, @ff); + }; + + vectors.av_timecode_get_smpte_from_framenum = (AVTimecode* @tc, int @framenum) => + { + vectors.av_timecode_get_smpte_from_framenum = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_get_smpte_from_framenum", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_get_smpte_from_framenum(@tc, @framenum); + }; + + vectors.av_timecode_init = (AVTimecode* @tc, AVRational @rate, int @flags, int @frame_start, void* @log_ctx) => + { + vectors.av_timecode_init = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_init", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_init(@tc, @rate, @flags, @frame_start, @log_ctx); + }; + + vectors.av_timecode_init_from_components = (AVTimecode* @tc, AVRational @rate, int @flags, int @hh, int @mm, int @ss, int @ff, void* @log_ctx) => + { + vectors.av_timecode_init_from_components = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_init_from_components", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_init_from_components(@tc, @rate, @flags, @hh, @mm, @ss, @ff, @log_ctx); + }; + + vectors.av_timecode_init_from_string = (AVTimecode* @tc, AVRational @rate, string @str, void* @log_ctx) => + { + vectors.av_timecode_init_from_string = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_init_from_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_init_from_string(@tc, @rate, @str, @log_ctx); + }; + + vectors.av_timecode_make_mpeg_tc_string = (byte* @buf, uint @tc25bit) => + { + vectors.av_timecode_make_mpeg_tc_string = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_make_mpeg_tc_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_make_mpeg_tc_string(@buf, @tc25bit); + }; + + vectors.av_timecode_make_smpte_tc_string = (byte* @buf, uint @tcsmpte, int @prevent_df) => + { + vectors.av_timecode_make_smpte_tc_string = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_make_smpte_tc_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_make_smpte_tc_string(@buf, @tcsmpte, @prevent_df); + }; + + vectors.av_timecode_make_smpte_tc_string2 = (byte* @buf, AVRational @rate, uint @tcsmpte, int @prevent_df, int @skip_field) => + { + vectors.av_timecode_make_smpte_tc_string2 = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_make_smpte_tc_string2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_make_smpte_tc_string2(@buf, @rate, @tcsmpte, @prevent_df, @skip_field); + }; + + vectors.av_timecode_make_string = (AVTimecode* @tc, byte* @buf, int @framenum) => + { + vectors.av_timecode_make_string = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_make_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_make_string(@tc, @buf, @framenum); + }; + + vectors.av_tree_destroy = (AVTreeNode* @t) => + { + vectors.av_tree_destroy = FunctionResolver.GetFunctionDelegate("avutil", "av_tree_destroy", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_tree_destroy(@t); + }; + + vectors.av_tree_enumerate = (AVTreeNode* @t, void* @opaque, av_tree_enumerate_cmp_func @cmp, av_tree_enumerate_enu_func @enu) => + { + vectors.av_tree_enumerate = FunctionResolver.GetFunctionDelegate("avutil", "av_tree_enumerate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_tree_enumerate(@t, @opaque, @cmp, @enu); + }; + + vectors.av_tree_find = (AVTreeNode* @root, void* @key, av_tree_find_cmp_func @cmp, ref void_ptr2 @next) => + { + vectors.av_tree_find = FunctionResolver.GetFunctionDelegate("avutil", "av_tree_find", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_tree_find(@root, @key, @cmp, ref @next); + }; + + vectors.av_tree_insert = (AVTreeNode** @rootp, void* @key, av_tree_insert_cmp_func @cmp, AVTreeNode** @next) => + { + vectors.av_tree_insert = FunctionResolver.GetFunctionDelegate("avutil", "av_tree_insert", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_tree_insert(@rootp, @key, @cmp, @next); + }; + + vectors.av_tree_node_alloc = () => + { + vectors.av_tree_node_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_tree_node_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_tree_node_alloc(); + }; + + vectors.av_url_split = (byte* @proto, int @proto_size, byte* @authorization, int @authorization_size, byte* @hostname, int @hostname_size, int* @port_ptr, byte* @path, int @path_size, string @url) => + { + vectors.av_url_split = FunctionResolver.GetFunctionDelegate("avformat", "av_url_split", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_url_split(@proto, @proto_size, @authorization, @authorization_size, @hostname, @hostname_size, @port_ptr, @path, @path_size, @url); + }; + + vectors.av_usleep = (uint @usec) => + { + vectors.av_usleep = FunctionResolver.GetFunctionDelegate("avutil", "av_usleep", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_usleep(@usec); + }; + + vectors.av_version_info = () => + { + vectors.av_version_info = FunctionResolver.GetFunctionDelegate("avutil", "av_version_info", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_version_info(); + }; + + vectors.av_vlog = (void* @avcl, int @level, string @fmt, byte* @vl) => + { + vectors.av_vlog = FunctionResolver.GetFunctionDelegate("avutil", "av_vlog", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_vlog(@avcl, @level, @fmt, @vl); + }; + + vectors.av_write_frame = (AVFormatContext* @s, AVPacket* @pkt) => + { + vectors.av_write_frame = FunctionResolver.GetFunctionDelegate("avformat", "av_write_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_write_frame(@s, @pkt); + }; + + vectors.av_write_image_line = (ushort* @src, ref byte_ptr4 @data, in int4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w) => + { + vectors.av_write_image_line = FunctionResolver.GetFunctionDelegate("avutil", "av_write_image_line", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_write_image_line(@src, ref @data, @linesize, @desc, @x, @y, @c, @w); + }; + + vectors.av_write_image_line2 = (void* @src, ref byte_ptr4 @data, in int4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @src_element_size) => + { + vectors.av_write_image_line2 = FunctionResolver.GetFunctionDelegate("avutil", "av_write_image_line2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_write_image_line2(@src, ref @data, @linesize, @desc, @x, @y, @c, @w, @src_element_size); + }; + + vectors.av_write_trailer = (AVFormatContext* @s) => + { + vectors.av_write_trailer = FunctionResolver.GetFunctionDelegate("avformat", "av_write_trailer", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_write_trailer(@s); + }; + + vectors.av_write_uncoded_frame = (AVFormatContext* @s, int @stream_index, AVFrame* @frame) => + { + vectors.av_write_uncoded_frame = FunctionResolver.GetFunctionDelegate("avformat", "av_write_uncoded_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_write_uncoded_frame(@s, @stream_index, @frame); + }; + + vectors.av_write_uncoded_frame_query = (AVFormatContext* @s, int @stream_index) => + { + vectors.av_write_uncoded_frame_query = FunctionResolver.GetFunctionDelegate("avformat", "av_write_uncoded_frame_query", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_write_uncoded_frame_query(@s, @stream_index); + }; + + vectors.av_xiphlacing = (byte* @s, uint @v) => + { + vectors.av_xiphlacing = FunctionResolver.GetFunctionDelegate("avcodec", "av_xiphlacing", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_xiphlacing(@s, @v); + }; + + vectors.avcodec_align_dimensions = (AVCodecContext* @s, int* @width, int* @height) => + { + vectors.avcodec_align_dimensions = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_align_dimensions", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avcodec_align_dimensions(@s, @width, @height); + }; + + vectors.avcodec_align_dimensions2 = (AVCodecContext* @s, int* @width, int* @height, ref int8 @linesize_align) => + { + vectors.avcodec_align_dimensions2 = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_align_dimensions2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avcodec_align_dimensions2(@s, @width, @height, ref @linesize_align); + }; + + vectors.avcodec_alloc_context3 = (AVCodec* @codec) => + { + vectors.avcodec_alloc_context3 = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_alloc_context3", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_alloc_context3(@codec); + }; + + vectors.avcodec_chroma_pos_to_enum = (int @xpos, int @ypos) => + { + vectors.avcodec_chroma_pos_to_enum = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_chroma_pos_to_enum", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_chroma_pos_to_enum(@xpos, @ypos); + }; + + vectors.avcodec_close = (AVCodecContext* @avctx) => + { + vectors.avcodec_close = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_close", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_close(@avctx); + }; + + vectors.avcodec_configuration = () => + { + vectors.avcodec_configuration = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_configuration", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_configuration(); + }; + + vectors.avcodec_decode_subtitle2 = (AVCodecContext* @avctx, AVSubtitle* @sub, int* @got_sub_ptr, AVPacket* @avpkt) => + { + vectors.avcodec_decode_subtitle2 = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_decode_subtitle2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_decode_subtitle2(@avctx, @sub, @got_sub_ptr, @avpkt); + }; + + vectors.avcodec_default_execute = (AVCodecContext* @c, avcodec_default_execute_func_func @func, void* @arg, int* @ret, int @count, int @size) => + { + vectors.avcodec_default_execute = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_default_execute", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_default_execute(@c, @func, @arg, @ret, @count, @size); + }; + + vectors.avcodec_default_execute2 = (AVCodecContext* @c, avcodec_default_execute2_func_func @func, void* @arg, int* @ret, int @count) => + { + vectors.avcodec_default_execute2 = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_default_execute2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_default_execute2(@c, @func, @arg, @ret, @count); + }; + + vectors.avcodec_default_get_buffer2 = (AVCodecContext* @s, AVFrame* @frame, int @flags) => + { + vectors.avcodec_default_get_buffer2 = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_default_get_buffer2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_default_get_buffer2(@s, @frame, @flags); + }; + + vectors.avcodec_default_get_encode_buffer = (AVCodecContext* @s, AVPacket* @pkt, int @flags) => + { + vectors.avcodec_default_get_encode_buffer = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_default_get_encode_buffer", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_default_get_encode_buffer(@s, @pkt, @flags); + }; + + vectors.avcodec_default_get_format = (AVCodecContext* @s, AVPixelFormat* @fmt) => + { + vectors.avcodec_default_get_format = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_default_get_format", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_default_get_format(@s, @fmt); + }; + + vectors.avcodec_descriptor_get = (AVCodecID @id) => + { + vectors.avcodec_descriptor_get = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_descriptor_get", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_descriptor_get(@id); + }; + + vectors.avcodec_descriptor_get_by_name = (string @name) => + { + vectors.avcodec_descriptor_get_by_name = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_descriptor_get_by_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_descriptor_get_by_name(@name); + }; + + vectors.avcodec_descriptor_next = (AVCodecDescriptor* @prev) => + { + vectors.avcodec_descriptor_next = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_descriptor_next", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_descriptor_next(@prev); + }; + + vectors.avcodec_encode_subtitle = (AVCodecContext* @avctx, byte* @buf, int @buf_size, AVSubtitle* @sub) => + { + vectors.avcodec_encode_subtitle = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_encode_subtitle", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_encode_subtitle(@avctx, @buf, @buf_size, @sub); + }; + + vectors.avcodec_enum_to_chroma_pos = (int* @xpos, int* @ypos, AVChromaLocation @pos) => + { + vectors.avcodec_enum_to_chroma_pos = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_enum_to_chroma_pos", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_enum_to_chroma_pos(@xpos, @ypos, @pos); + }; + + vectors.avcodec_fill_audio_frame = (AVFrame* @frame, int @nb_channels, AVSampleFormat @sample_fmt, byte* @buf, int @buf_size, int @align) => + { + vectors.avcodec_fill_audio_frame = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_fill_audio_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_fill_audio_frame(@frame, @nb_channels, @sample_fmt, @buf, @buf_size, @align); + }; + + vectors.avcodec_find_best_pix_fmt_of_list = (AVPixelFormat* @pix_fmt_list, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr) => + { + vectors.avcodec_find_best_pix_fmt_of_list = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_find_best_pix_fmt_of_list", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_find_best_pix_fmt_of_list(@pix_fmt_list, @src_pix_fmt, @has_alpha, @loss_ptr); + }; + + vectors.avcodec_find_decoder = (AVCodecID @id) => + { + vectors.avcodec_find_decoder = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_find_decoder", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_find_decoder(@id); + }; + + vectors.avcodec_find_decoder_by_name = (string @name) => + { + vectors.avcodec_find_decoder_by_name = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_find_decoder_by_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_find_decoder_by_name(@name); + }; + + vectors.avcodec_find_encoder = (AVCodecID @id) => + { + vectors.avcodec_find_encoder = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_find_encoder", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_find_encoder(@id); + }; + + vectors.avcodec_find_encoder_by_name = (string @name) => + { + vectors.avcodec_find_encoder_by_name = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_find_encoder_by_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_find_encoder_by_name(@name); + }; + + vectors.avcodec_flush_buffers = (AVCodecContext* @avctx) => + { + vectors.avcodec_flush_buffers = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_flush_buffers", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avcodec_flush_buffers(@avctx); + }; + + vectors.avcodec_free_context = (AVCodecContext** @avctx) => + { + vectors.avcodec_free_context = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_free_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avcodec_free_context(@avctx); + }; + + vectors.avcodec_get_class = () => + { + vectors.avcodec_get_class = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_get_class", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_get_class(); + }; + + vectors.avcodec_get_frame_class = () => + { + vectors.avcodec_get_frame_class = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_get_frame_class", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_get_frame_class(); + }; + + vectors.avcodec_get_hw_config = (AVCodec* @codec, int @index) => + { + vectors.avcodec_get_hw_config = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_get_hw_config", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_get_hw_config(@codec, @index); + }; + + vectors.avcodec_get_hw_frames_parameters = (AVCodecContext* @avctx, AVBufferRef* @device_ref, AVPixelFormat @hw_pix_fmt, AVBufferRef** @out_frames_ref) => + { + vectors.avcodec_get_hw_frames_parameters = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_get_hw_frames_parameters", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_get_hw_frames_parameters(@avctx, @device_ref, @hw_pix_fmt, @out_frames_ref); + }; + + vectors.avcodec_get_name = (AVCodecID @id) => + { + vectors.avcodec_get_name = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_get_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_get_name(@id); + }; + + vectors.avcodec_get_subtitle_rect_class = () => + { + vectors.avcodec_get_subtitle_rect_class = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_get_subtitle_rect_class", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_get_subtitle_rect_class(); + }; + + vectors.avcodec_get_type = (AVCodecID @codec_id) => + { + vectors.avcodec_get_type = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_get_type", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_get_type(@codec_id); + }; + + vectors.avcodec_is_open = (AVCodecContext* @s) => + { + vectors.avcodec_is_open = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_is_open", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_is_open(@s); + }; + + vectors.avcodec_license = () => + { + vectors.avcodec_license = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_license", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_license(); + }; + + vectors.avcodec_open2 = (AVCodecContext* @avctx, AVCodec* @codec, AVDictionary** @options) => + { + vectors.avcodec_open2 = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_open2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_open2(@avctx, @codec, @options); + }; + + vectors.avcodec_parameters_alloc = () => + { + vectors.avcodec_parameters_alloc = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_parameters_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_parameters_alloc(); + }; + + vectors.avcodec_parameters_copy = (AVCodecParameters* @dst, AVCodecParameters* @src) => + { + vectors.avcodec_parameters_copy = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_parameters_copy", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_parameters_copy(@dst, @src); + }; + + vectors.avcodec_parameters_free = (AVCodecParameters** @par) => + { + vectors.avcodec_parameters_free = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_parameters_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avcodec_parameters_free(@par); + }; + + vectors.avcodec_parameters_from_context = (AVCodecParameters* @par, AVCodecContext* @codec) => + { + vectors.avcodec_parameters_from_context = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_parameters_from_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_parameters_from_context(@par, @codec); + }; + + vectors.avcodec_parameters_to_context = (AVCodecContext* @codec, AVCodecParameters* @par) => + { + vectors.avcodec_parameters_to_context = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_parameters_to_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_parameters_to_context(@codec, @par); + }; + + vectors.avcodec_pix_fmt_to_codec_tag = (AVPixelFormat @pix_fmt) => + { + vectors.avcodec_pix_fmt_to_codec_tag = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_pix_fmt_to_codec_tag", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_pix_fmt_to_codec_tag(@pix_fmt); + }; + + vectors.avcodec_profile_name = (AVCodecID @codec_id, int @profile) => + { + vectors.avcodec_profile_name = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_profile_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_profile_name(@codec_id, @profile); + }; + + vectors.avcodec_receive_frame = (AVCodecContext* @avctx, AVFrame* @frame) => + { + vectors.avcodec_receive_frame = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_receive_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_receive_frame(@avctx, @frame); + }; + + vectors.avcodec_receive_packet = (AVCodecContext* @avctx, AVPacket* @avpkt) => + { + vectors.avcodec_receive_packet = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_receive_packet", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_receive_packet(@avctx, @avpkt); + }; + + vectors.avcodec_send_frame = (AVCodecContext* @avctx, AVFrame* @frame) => + { + vectors.avcodec_send_frame = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_send_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_send_frame(@avctx, @frame); + }; + + vectors.avcodec_send_packet = (AVCodecContext* @avctx, AVPacket* @avpkt) => + { + vectors.avcodec_send_packet = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_send_packet", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_send_packet(@avctx, @avpkt); + }; + + vectors.avcodec_string = (byte* @buf, int @buf_size, AVCodecContext* @enc, int @encode) => + { + vectors.avcodec_string = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avcodec_string(@buf, @buf_size, @enc, @encode); + }; + + vectors.avcodec_version = () => + { + vectors.avcodec_version = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_version", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_version(); + }; + + vectors.avdevice_app_to_dev_control_message = (AVFormatContext* @s, AVAppToDevMessageType @type, void* @data, ulong @data_size) => + { + vectors.avdevice_app_to_dev_control_message = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_app_to_dev_control_message", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avdevice_app_to_dev_control_message(@s, @type, @data, @data_size); + }; + + vectors.avdevice_capabilities_create = (AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s, AVDictionary** @device_options) => + { + vectors.avdevice_capabilities_create = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_capabilities_create", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avdevice_capabilities_create(@caps, @s, @device_options); + }; + + vectors.avdevice_capabilities_free = (AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s) => + { + vectors.avdevice_capabilities_free = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_capabilities_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avdevice_capabilities_free(@caps, @s); + }; + + vectors.avdevice_configuration = () => + { + vectors.avdevice_configuration = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_configuration", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avdevice_configuration(); + }; + + vectors.avdevice_dev_to_app_control_message = (AVFormatContext* @s, AVDevToAppMessageType @type, void* @data, ulong @data_size) => + { + vectors.avdevice_dev_to_app_control_message = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_dev_to_app_control_message", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avdevice_dev_to_app_control_message(@s, @type, @data, @data_size); + }; + + vectors.avdevice_free_list_devices = (AVDeviceInfoList** @device_list) => + { + vectors.avdevice_free_list_devices = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_free_list_devices", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avdevice_free_list_devices(@device_list); + }; + + vectors.avdevice_license = () => + { + vectors.avdevice_license = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_license", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avdevice_license(); + }; + + vectors.avdevice_list_devices = (AVFormatContext* @s, AVDeviceInfoList** @device_list) => + { + vectors.avdevice_list_devices = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_list_devices", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avdevice_list_devices(@s, @device_list); + }; + + vectors.avdevice_list_input_sources = (AVInputFormat* @device, string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list) => + { + vectors.avdevice_list_input_sources = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_list_input_sources", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avdevice_list_input_sources(@device, @device_name, @device_options, @device_list); + }; + + vectors.avdevice_list_output_sinks = (AVOutputFormat* @device, string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list) => + { + vectors.avdevice_list_output_sinks = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_list_output_sinks", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avdevice_list_output_sinks(@device, @device_name, @device_options, @device_list); + }; + + vectors.avdevice_register_all = () => + { + vectors.avdevice_register_all = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_register_all", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avdevice_register_all(); + }; + + vectors.avdevice_version = () => + { + vectors.avdevice_version = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_version", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avdevice_version(); + }; + + vectors.avfilter_config_links = (AVFilterContext* @filter) => + { + vectors.avfilter_config_links = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_config_links", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_config_links(@filter); + }; + + vectors.avfilter_configuration = () => + { + vectors.avfilter_configuration = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_configuration", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_configuration(); + }; + + vectors.avfilter_filter_pad_count = (AVFilter* @filter, int @is_output) => + { + vectors.avfilter_filter_pad_count = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_filter_pad_count", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_filter_pad_count(@filter, @is_output); + }; + + vectors.avfilter_free = (AVFilterContext* @filter) => + { + vectors.avfilter_free = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avfilter_free(@filter); + }; + + vectors.avfilter_get_by_name = (string @name) => + { + vectors.avfilter_get_by_name = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_get_by_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_get_by_name(@name); + }; + + vectors.avfilter_get_class = () => + { + vectors.avfilter_get_class = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_get_class", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_get_class(); + }; + + vectors.avfilter_graph_alloc = () => + { + vectors.avfilter_graph_alloc = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_alloc(); + }; + + vectors.avfilter_graph_alloc_filter = (AVFilterGraph* @graph, AVFilter* @filter, string @name) => + { + vectors.avfilter_graph_alloc_filter = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_alloc_filter", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_alloc_filter(@graph, @filter, @name); + }; + + vectors.avfilter_graph_config = (AVFilterGraph* @graphctx, void* @log_ctx) => + { + vectors.avfilter_graph_config = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_config", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_config(@graphctx, @log_ctx); + }; + + vectors.avfilter_graph_create_filter = (AVFilterContext** @filt_ctx, AVFilter* @filt, string @name, string @args, void* @opaque, AVFilterGraph* @graph_ctx) => + { + vectors.avfilter_graph_create_filter = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_create_filter", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_create_filter(@filt_ctx, @filt, @name, @args, @opaque, @graph_ctx); + }; + + vectors.avfilter_graph_dump = (AVFilterGraph* @graph, string @options) => + { + vectors.avfilter_graph_dump = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_dump", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_dump(@graph, @options); + }; + + vectors.avfilter_graph_free = (AVFilterGraph** @graph) => + { + vectors.avfilter_graph_free = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avfilter_graph_free(@graph); + }; + + vectors.avfilter_graph_get_filter = (AVFilterGraph* @graph, string @name) => + { + vectors.avfilter_graph_get_filter = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_get_filter", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_get_filter(@graph, @name); + }; + + vectors.avfilter_graph_parse = (AVFilterGraph* @graph, string @filters, AVFilterInOut* @inputs, AVFilterInOut* @outputs, void* @log_ctx) => + { + vectors.avfilter_graph_parse = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_parse", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_parse(@graph, @filters, @inputs, @outputs, @log_ctx); + }; + + vectors.avfilter_graph_parse_ptr = (AVFilterGraph* @graph, string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs, void* @log_ctx) => + { + vectors.avfilter_graph_parse_ptr = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_parse_ptr", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_parse_ptr(@graph, @filters, @inputs, @outputs, @log_ctx); + }; + + vectors.avfilter_graph_parse2 = (AVFilterGraph* @graph, string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs) => + { + vectors.avfilter_graph_parse2 = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_parse2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_parse2(@graph, @filters, @inputs, @outputs); + }; + + vectors.avfilter_graph_queue_command = (AVFilterGraph* @graph, string @target, string @cmd, string @arg, int @flags, double @ts) => + { + vectors.avfilter_graph_queue_command = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_queue_command", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_queue_command(@graph, @target, @cmd, @arg, @flags, @ts); + }; + + vectors.avfilter_graph_request_oldest = (AVFilterGraph* @graph) => + { + vectors.avfilter_graph_request_oldest = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_request_oldest", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_request_oldest(@graph); + }; + + vectors.avfilter_graph_send_command = (AVFilterGraph* @graph, string @target, string @cmd, string @arg, byte* @res, int @res_len, int @flags) => + { + vectors.avfilter_graph_send_command = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_send_command", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_send_command(@graph, @target, @cmd, @arg, @res, @res_len, @flags); + }; + + vectors.avfilter_graph_set_auto_convert = (AVFilterGraph* @graph, uint @flags) => + { + vectors.avfilter_graph_set_auto_convert = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_set_auto_convert", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avfilter_graph_set_auto_convert(@graph, @flags); + }; + + vectors.avfilter_init_dict = (AVFilterContext* @ctx, AVDictionary** @options) => + { + vectors.avfilter_init_dict = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_init_dict", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_init_dict(@ctx, @options); + }; + + vectors.avfilter_init_str = (AVFilterContext* @ctx, string @args) => + { + vectors.avfilter_init_str = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_init_str", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_init_str(@ctx, @args); + }; + + vectors.avfilter_inout_alloc = () => + { + vectors.avfilter_inout_alloc = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_inout_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_inout_alloc(); + }; + + vectors.avfilter_inout_free = (AVFilterInOut** @inout) => + { + vectors.avfilter_inout_free = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_inout_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avfilter_inout_free(@inout); + }; + + vectors.avfilter_insert_filter = (AVFilterLink* @link, AVFilterContext* @filt, uint @filt_srcpad_idx, uint @filt_dstpad_idx) => + { + vectors.avfilter_insert_filter = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_insert_filter", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_insert_filter(@link, @filt, @filt_srcpad_idx, @filt_dstpad_idx); + }; + + vectors.avfilter_license = () => + { + vectors.avfilter_license = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_license", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_license(); + }; + + vectors.avfilter_link = (AVFilterContext* @src, uint @srcpad, AVFilterContext* @dst, uint @dstpad) => + { + vectors.avfilter_link = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_link", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_link(@src, @srcpad, @dst, @dstpad); + }; + + vectors.avfilter_link_free = (AVFilterLink** @link) => + { + vectors.avfilter_link_free = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_link_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avfilter_link_free(@link); + }; + + vectors.avfilter_pad_count = (AVFilterPad* @pads) => + { + vectors.avfilter_pad_count = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_pad_count", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_pad_count(@pads); + }; + + vectors.avfilter_pad_get_name = (AVFilterPad* @pads, int @pad_idx) => + { + vectors.avfilter_pad_get_name = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_pad_get_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_pad_get_name(@pads, @pad_idx); + }; + + vectors.avfilter_pad_get_type = (AVFilterPad* @pads, int @pad_idx) => + { + vectors.avfilter_pad_get_type = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_pad_get_type", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_pad_get_type(@pads, @pad_idx); + }; + + vectors.avfilter_process_command = (AVFilterContext* @filter, string @cmd, string @arg, byte* @res, int @res_len, int @flags) => + { + vectors.avfilter_process_command = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_process_command", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_process_command(@filter, @cmd, @arg, @res, @res_len, @flags); + }; + + vectors.avfilter_version = () => + { + vectors.avfilter_version = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_version", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_version(); + }; + + vectors.avformat_alloc_context = () => + { + vectors.avformat_alloc_context = FunctionResolver.GetFunctionDelegate("avformat", "avformat_alloc_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_alloc_context(); + }; + + vectors.avformat_alloc_output_context2 = (AVFormatContext** @ctx, AVOutputFormat* @oformat, string @format_name, string @filename) => + { + vectors.avformat_alloc_output_context2 = FunctionResolver.GetFunctionDelegate("avformat", "avformat_alloc_output_context2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_alloc_output_context2(@ctx, @oformat, @format_name, @filename); + }; + + vectors.avformat_close_input = (AVFormatContext** @s) => + { + vectors.avformat_close_input = FunctionResolver.GetFunctionDelegate("avformat", "avformat_close_input", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avformat_close_input(@s); + }; + + vectors.avformat_configuration = () => + { + vectors.avformat_configuration = FunctionResolver.GetFunctionDelegate("avformat", "avformat_configuration", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_configuration(); + }; + + vectors.avformat_find_stream_info = (AVFormatContext* @ic, AVDictionary** @options) => + { + vectors.avformat_find_stream_info = FunctionResolver.GetFunctionDelegate("avformat", "avformat_find_stream_info", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_find_stream_info(@ic, @options); + }; + + vectors.avformat_flush = (AVFormatContext* @s) => + { + vectors.avformat_flush = FunctionResolver.GetFunctionDelegate("avformat", "avformat_flush", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_flush(@s); + }; + + vectors.avformat_free_context = (AVFormatContext* @s) => + { + vectors.avformat_free_context = FunctionResolver.GetFunctionDelegate("avformat", "avformat_free_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avformat_free_context(@s); + }; + + vectors.avformat_get_class = () => + { + vectors.avformat_get_class = FunctionResolver.GetFunctionDelegate("avformat", "avformat_get_class", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_get_class(); + }; + + vectors.avformat_get_mov_audio_tags = () => + { + vectors.avformat_get_mov_audio_tags = FunctionResolver.GetFunctionDelegate("avformat", "avformat_get_mov_audio_tags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_get_mov_audio_tags(); + }; + + vectors.avformat_get_mov_video_tags = () => + { + vectors.avformat_get_mov_video_tags = FunctionResolver.GetFunctionDelegate("avformat", "avformat_get_mov_video_tags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_get_mov_video_tags(); + }; + + vectors.avformat_get_riff_audio_tags = () => + { + vectors.avformat_get_riff_audio_tags = FunctionResolver.GetFunctionDelegate("avformat", "avformat_get_riff_audio_tags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_get_riff_audio_tags(); + }; + + vectors.avformat_get_riff_video_tags = () => + { + vectors.avformat_get_riff_video_tags = FunctionResolver.GetFunctionDelegate("avformat", "avformat_get_riff_video_tags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_get_riff_video_tags(); + }; + + vectors.avformat_index_get_entries_count = (AVStream* @st) => + { + vectors.avformat_index_get_entries_count = FunctionResolver.GetFunctionDelegate("avformat", "avformat_index_get_entries_count", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_index_get_entries_count(@st); + }; + + vectors.avformat_index_get_entry = (AVStream* @st, int @idx) => + { + vectors.avformat_index_get_entry = FunctionResolver.GetFunctionDelegate("avformat", "avformat_index_get_entry", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_index_get_entry(@st, @idx); + }; + + vectors.avformat_index_get_entry_from_timestamp = (AVStream* @st, long @wanted_timestamp, int @flags) => + { + vectors.avformat_index_get_entry_from_timestamp = FunctionResolver.GetFunctionDelegate("avformat", "avformat_index_get_entry_from_timestamp", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_index_get_entry_from_timestamp(@st, @wanted_timestamp, @flags); + }; + + vectors.avformat_init_output = (AVFormatContext* @s, AVDictionary** @options) => + { + vectors.avformat_init_output = FunctionResolver.GetFunctionDelegate("avformat", "avformat_init_output", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_init_output(@s, @options); + }; + + vectors.avformat_license = () => + { + vectors.avformat_license = FunctionResolver.GetFunctionDelegate("avformat", "avformat_license", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_license(); + }; + + vectors.avformat_match_stream_specifier = (AVFormatContext* @s, AVStream* @st, string @spec) => + { + vectors.avformat_match_stream_specifier = FunctionResolver.GetFunctionDelegate("avformat", "avformat_match_stream_specifier", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_match_stream_specifier(@s, @st, @spec); + }; + + vectors.avformat_network_deinit = () => + { + vectors.avformat_network_deinit = FunctionResolver.GetFunctionDelegate("avformat", "avformat_network_deinit", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_network_deinit(); + }; + + vectors.avformat_network_init = () => + { + vectors.avformat_network_init = FunctionResolver.GetFunctionDelegate("avformat", "avformat_network_init", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_network_init(); + }; + + vectors.avformat_new_stream = (AVFormatContext* @s, AVCodec* @c) => + { + vectors.avformat_new_stream = FunctionResolver.GetFunctionDelegate("avformat", "avformat_new_stream", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_new_stream(@s, @c); + }; + + vectors.avformat_open_input = (AVFormatContext** @ps, string @url, AVInputFormat* @fmt, AVDictionary** @options) => + { + vectors.avformat_open_input = FunctionResolver.GetFunctionDelegate("avformat", "avformat_open_input", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_open_input(@ps, @url, @fmt, @options); + }; + + vectors.avformat_query_codec = (AVOutputFormat* @ofmt, AVCodecID @codec_id, int @std_compliance) => + { + vectors.avformat_query_codec = FunctionResolver.GetFunctionDelegate("avformat", "avformat_query_codec", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_query_codec(@ofmt, @codec_id, @std_compliance); + }; + + vectors.avformat_queue_attached_pictures = (AVFormatContext* @s) => + { + vectors.avformat_queue_attached_pictures = FunctionResolver.GetFunctionDelegate("avformat", "avformat_queue_attached_pictures", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_queue_attached_pictures(@s); + }; + + vectors.avformat_seek_file = (AVFormatContext* @s, int @stream_index, long @min_ts, long @ts, long @max_ts, int @flags) => + { + vectors.avformat_seek_file = FunctionResolver.GetFunctionDelegate("avformat", "avformat_seek_file", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_seek_file(@s, @stream_index, @min_ts, @ts, @max_ts, @flags); + }; + + vectors.avformat_transfer_internal_stream_timing_info = (AVOutputFormat* @ofmt, AVStream* @ost, AVStream* @ist, AVTimebaseSource @copy_tb) => + { + vectors.avformat_transfer_internal_stream_timing_info = FunctionResolver.GetFunctionDelegate("avformat", "avformat_transfer_internal_stream_timing_info", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_transfer_internal_stream_timing_info(@ofmt, @ost, @ist, @copy_tb); + }; + + vectors.avformat_version = () => + { + vectors.avformat_version = FunctionResolver.GetFunctionDelegate("avformat", "avformat_version", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_version(); + }; + + vectors.avformat_write_header = (AVFormatContext* @s, AVDictionary** @options) => + { + vectors.avformat_write_header = FunctionResolver.GetFunctionDelegate("avformat", "avformat_write_header", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_write_header(@s, @options); + }; + + vectors.avio_accept = (AVIOContext* @s, AVIOContext** @c) => + { + vectors.avio_accept = FunctionResolver.GetFunctionDelegate("avformat", "avio_accept", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_accept(@s, @c); + }; + + vectors.avio_alloc_context = (byte* @buffer, int @buffer_size, int @write_flag, void* @opaque, avio_alloc_context_read_packet_func @read_packet, avio_alloc_context_write_packet_func @write_packet, avio_alloc_context_seek_func @seek) => + { + vectors.avio_alloc_context = FunctionResolver.GetFunctionDelegate("avformat", "avio_alloc_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_alloc_context(@buffer, @buffer_size, @write_flag, @opaque, @read_packet, @write_packet, @seek); + }; + + vectors.avio_check = (string @url, int @flags) => + { + vectors.avio_check = FunctionResolver.GetFunctionDelegate("avformat", "avio_check", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_check(@url, @flags); + }; + + vectors.avio_close = (AVIOContext* @s) => + { + vectors.avio_close = FunctionResolver.GetFunctionDelegate("avformat", "avio_close", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_close(@s); + }; + + vectors.avio_close_dir = (AVIODirContext** @s) => + { + vectors.avio_close_dir = FunctionResolver.GetFunctionDelegate("avformat", "avio_close_dir", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_close_dir(@s); + }; + + vectors.avio_close_dyn_buf = (AVIOContext* @s, byte** @pbuffer) => + { + vectors.avio_close_dyn_buf = FunctionResolver.GetFunctionDelegate("avformat", "avio_close_dyn_buf", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_close_dyn_buf(@s, @pbuffer); + }; + + vectors.avio_closep = (AVIOContext** @s) => + { + vectors.avio_closep = FunctionResolver.GetFunctionDelegate("avformat", "avio_closep", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_closep(@s); + }; + + vectors.avio_context_free = (AVIOContext** @s) => + { + vectors.avio_context_free = FunctionResolver.GetFunctionDelegate("avformat", "avio_context_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_context_free(@s); + }; + + vectors.avio_enum_protocols = (void** @opaque, int @output) => + { + vectors.avio_enum_protocols = FunctionResolver.GetFunctionDelegate("avformat", "avio_enum_protocols", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_enum_protocols(@opaque, @output); + }; + + vectors.avio_feof = (AVIOContext* @s) => + { + vectors.avio_feof = FunctionResolver.GetFunctionDelegate("avformat", "avio_feof", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_feof(@s); + }; + + vectors.avio_find_protocol_name = (string @url) => + { + vectors.avio_find_protocol_name = FunctionResolver.GetFunctionDelegate("avformat", "avio_find_protocol_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_find_protocol_name(@url); + }; + + vectors.avio_flush = (AVIOContext* @s) => + { + vectors.avio_flush = FunctionResolver.GetFunctionDelegate("avformat", "avio_flush", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_flush(@s); + }; + + vectors.avio_free_directory_entry = (AVIODirEntry** @entry) => + { + vectors.avio_free_directory_entry = FunctionResolver.GetFunctionDelegate("avformat", "avio_free_directory_entry", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_free_directory_entry(@entry); + }; + + vectors.avio_get_dyn_buf = (AVIOContext* @s, byte** @pbuffer) => + { + vectors.avio_get_dyn_buf = FunctionResolver.GetFunctionDelegate("avformat", "avio_get_dyn_buf", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_get_dyn_buf(@s, @pbuffer); + }; + + vectors.avio_get_str = (AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen) => + { + vectors.avio_get_str = FunctionResolver.GetFunctionDelegate("avformat", "avio_get_str", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_get_str(@pb, @maxlen, @buf, @buflen); + }; + + vectors.avio_get_str16be = (AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen) => + { + vectors.avio_get_str16be = FunctionResolver.GetFunctionDelegate("avformat", "avio_get_str16be", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_get_str16be(@pb, @maxlen, @buf, @buflen); + }; + + vectors.avio_get_str16le = (AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen) => + { + vectors.avio_get_str16le = FunctionResolver.GetFunctionDelegate("avformat", "avio_get_str16le", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_get_str16le(@pb, @maxlen, @buf, @buflen); + }; + + vectors.avio_handshake = (AVIOContext* @c) => + { + vectors.avio_handshake = FunctionResolver.GetFunctionDelegate("avformat", "avio_handshake", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_handshake(@c); + }; + + vectors.avio_open = (AVIOContext** @s, string @url, int @flags) => + { + vectors.avio_open = FunctionResolver.GetFunctionDelegate("avformat", "avio_open", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_open(@s, @url, @flags); + }; + + vectors.avio_open_dir = (AVIODirContext** @s, string @url, AVDictionary** @options) => + { + vectors.avio_open_dir = FunctionResolver.GetFunctionDelegate("avformat", "avio_open_dir", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_open_dir(@s, @url, @options); + }; + + vectors.avio_open_dyn_buf = (AVIOContext** @s) => + { + vectors.avio_open_dyn_buf = FunctionResolver.GetFunctionDelegate("avformat", "avio_open_dyn_buf", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_open_dyn_buf(@s); + }; + + vectors.avio_open2 = (AVIOContext** @s, string @url, int @flags, AVIOInterruptCB* @int_cb, AVDictionary** @options) => + { + vectors.avio_open2 = FunctionResolver.GetFunctionDelegate("avformat", "avio_open2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_open2(@s, @url, @flags, @int_cb, @options); + }; + + vectors.avio_pause = (AVIOContext* @h, int @pause) => + { + vectors.avio_pause = FunctionResolver.GetFunctionDelegate("avformat", "avio_pause", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_pause(@h, @pause); + }; + + vectors.avio_print_string_array = (AVIOContext* @s, byte*[] @strings) => + { + vectors.avio_print_string_array = FunctionResolver.GetFunctionDelegate("avformat", "avio_print_string_array", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_print_string_array(@s, @strings); + }; + + vectors.avio_printf = (AVIOContext* @s, string @fmt) => + { + vectors.avio_printf = FunctionResolver.GetFunctionDelegate("avformat", "avio_printf", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_printf(@s, @fmt); + }; + + vectors.avio_protocol_get_class = (string @name) => + { + vectors.avio_protocol_get_class = FunctionResolver.GetFunctionDelegate("avformat", "avio_protocol_get_class", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_protocol_get_class(@name); + }; + + vectors.avio_put_str = (AVIOContext* @s, string @str) => + { + vectors.avio_put_str = FunctionResolver.GetFunctionDelegate("avformat", "avio_put_str", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_put_str(@s, @str); + }; + + vectors.avio_put_str16be = (AVIOContext* @s, string @str) => + { + vectors.avio_put_str16be = FunctionResolver.GetFunctionDelegate("avformat", "avio_put_str16be", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_put_str16be(@s, @str); + }; + + vectors.avio_put_str16le = (AVIOContext* @s, string @str) => + { + vectors.avio_put_str16le = FunctionResolver.GetFunctionDelegate("avformat", "avio_put_str16le", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_put_str16le(@s, @str); + }; + + vectors.avio_r8 = (AVIOContext* @s) => + { + vectors.avio_r8 = FunctionResolver.GetFunctionDelegate("avformat", "avio_r8", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_r8(@s); + }; + + vectors.avio_rb16 = (AVIOContext* @s) => + { + vectors.avio_rb16 = FunctionResolver.GetFunctionDelegate("avformat", "avio_rb16", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_rb16(@s); + }; + + vectors.avio_rb24 = (AVIOContext* @s) => + { + vectors.avio_rb24 = FunctionResolver.GetFunctionDelegate("avformat", "avio_rb24", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_rb24(@s); + }; + + vectors.avio_rb32 = (AVIOContext* @s) => + { + vectors.avio_rb32 = FunctionResolver.GetFunctionDelegate("avformat", "avio_rb32", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_rb32(@s); + }; + + vectors.avio_rb64 = (AVIOContext* @s) => + { + vectors.avio_rb64 = FunctionResolver.GetFunctionDelegate("avformat", "avio_rb64", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_rb64(@s); + }; + + vectors.avio_read = (AVIOContext* @s, byte* @buf, int @size) => + { + vectors.avio_read = FunctionResolver.GetFunctionDelegate("avformat", "avio_read", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_read(@s, @buf, @size); + }; + + vectors.avio_read_dir = (AVIODirContext* @s, AVIODirEntry** @next) => + { + vectors.avio_read_dir = FunctionResolver.GetFunctionDelegate("avformat", "avio_read_dir", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_read_dir(@s, @next); + }; + + vectors.avio_read_partial = (AVIOContext* @s, byte* @buf, int @size) => + { + vectors.avio_read_partial = FunctionResolver.GetFunctionDelegate("avformat", "avio_read_partial", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_read_partial(@s, @buf, @size); + }; + + vectors.avio_read_to_bprint = (AVIOContext* @h, AVBPrint* @pb, ulong @max_size) => + { + vectors.avio_read_to_bprint = FunctionResolver.GetFunctionDelegate("avformat", "avio_read_to_bprint", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_read_to_bprint(@h, @pb, @max_size); + }; + + vectors.avio_rl16 = (AVIOContext* @s) => + { + vectors.avio_rl16 = FunctionResolver.GetFunctionDelegate("avformat", "avio_rl16", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_rl16(@s); + }; + + vectors.avio_rl24 = (AVIOContext* @s) => + { + vectors.avio_rl24 = FunctionResolver.GetFunctionDelegate("avformat", "avio_rl24", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_rl24(@s); + }; + + vectors.avio_rl32 = (AVIOContext* @s) => + { + vectors.avio_rl32 = FunctionResolver.GetFunctionDelegate("avformat", "avio_rl32", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_rl32(@s); + }; + + vectors.avio_rl64 = (AVIOContext* @s) => + { + vectors.avio_rl64 = FunctionResolver.GetFunctionDelegate("avformat", "avio_rl64", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_rl64(@s); + }; + + vectors.avio_seek = (AVIOContext* @s, long @offset, int @whence) => + { + vectors.avio_seek = FunctionResolver.GetFunctionDelegate("avformat", "avio_seek", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_seek(@s, @offset, @whence); + }; + + vectors.avio_seek_time = (AVIOContext* @h, int @stream_index, long @timestamp, int @flags) => + { + vectors.avio_seek_time = FunctionResolver.GetFunctionDelegate("avformat", "avio_seek_time", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_seek_time(@h, @stream_index, @timestamp, @flags); + }; + + vectors.avio_size = (AVIOContext* @s) => + { + vectors.avio_size = FunctionResolver.GetFunctionDelegate("avformat", "avio_size", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_size(@s); + }; + + vectors.avio_skip = (AVIOContext* @s, long @offset) => + { + vectors.avio_skip = FunctionResolver.GetFunctionDelegate("avformat", "avio_skip", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_skip(@s, @offset); + }; + + vectors.avio_vprintf = (AVIOContext* @s, string @fmt, byte* @ap) => + { + vectors.avio_vprintf = FunctionResolver.GetFunctionDelegate("avformat", "avio_vprintf", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_vprintf(@s, @fmt, @ap); + }; + + vectors.avio_w8 = (AVIOContext* @s, int @b) => + { + vectors.avio_w8 = FunctionResolver.GetFunctionDelegate("avformat", "avio_w8", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_w8(@s, @b); + }; + + vectors.avio_wb16 = (AVIOContext* @s, uint @val) => + { + vectors.avio_wb16 = FunctionResolver.GetFunctionDelegate("avformat", "avio_wb16", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_wb16(@s, @val); + }; + + vectors.avio_wb24 = (AVIOContext* @s, uint @val) => + { + vectors.avio_wb24 = FunctionResolver.GetFunctionDelegate("avformat", "avio_wb24", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_wb24(@s, @val); + }; + + vectors.avio_wb32 = (AVIOContext* @s, uint @val) => + { + vectors.avio_wb32 = FunctionResolver.GetFunctionDelegate("avformat", "avio_wb32", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_wb32(@s, @val); + }; + + vectors.avio_wb64 = (AVIOContext* @s, ulong @val) => + { + vectors.avio_wb64 = FunctionResolver.GetFunctionDelegate("avformat", "avio_wb64", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_wb64(@s, @val); + }; + + vectors.avio_wl16 = (AVIOContext* @s, uint @val) => + { + vectors.avio_wl16 = FunctionResolver.GetFunctionDelegate("avformat", "avio_wl16", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_wl16(@s, @val); + }; + + vectors.avio_wl24 = (AVIOContext* @s, uint @val) => + { + vectors.avio_wl24 = FunctionResolver.GetFunctionDelegate("avformat", "avio_wl24", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_wl24(@s, @val); + }; + + vectors.avio_wl32 = (AVIOContext* @s, uint @val) => + { + vectors.avio_wl32 = FunctionResolver.GetFunctionDelegate("avformat", "avio_wl32", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_wl32(@s, @val); + }; + + vectors.avio_wl64 = (AVIOContext* @s, ulong @val) => + { + vectors.avio_wl64 = FunctionResolver.GetFunctionDelegate("avformat", "avio_wl64", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_wl64(@s, @val); + }; + + vectors.avio_write = (AVIOContext* @s, byte* @buf, int @size) => + { + vectors.avio_write = FunctionResolver.GetFunctionDelegate("avformat", "avio_write", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_write(@s, @buf, @size); + }; + + vectors.avio_write_marker = (AVIOContext* @s, long @time, AVIODataMarkerType @type) => + { + vectors.avio_write_marker = FunctionResolver.GetFunctionDelegate("avformat", "avio_write_marker", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_write_marker(@s, @time, @type); + }; + + vectors.avsubtitle_free = (AVSubtitle* @sub) => + { + vectors.avsubtitle_free = FunctionResolver.GetFunctionDelegate("avcodec", "avsubtitle_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avsubtitle_free(@sub); + }; + + vectors.avutil_configuration = () => + { + vectors.avutil_configuration = FunctionResolver.GetFunctionDelegate("avutil", "avutil_configuration", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avutil_configuration(); + }; + + vectors.avutil_license = () => + { + vectors.avutil_license = FunctionResolver.GetFunctionDelegate("avutil", "avutil_license", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avutil_license(); + }; + + vectors.avutil_version = () => + { + vectors.avutil_version = FunctionResolver.GetFunctionDelegate("avutil", "avutil_version", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avutil_version(); + }; + + vectors.postproc_configuration = () => + { + vectors.postproc_configuration = FunctionResolver.GetFunctionDelegate("postproc", "postproc_configuration", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.postproc_configuration(); + }; + + vectors.postproc_license = () => + { + vectors.postproc_license = FunctionResolver.GetFunctionDelegate("postproc", "postproc_license", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.postproc_license(); + }; + + vectors.postproc_version = () => + { + vectors.postproc_version = FunctionResolver.GetFunctionDelegate("postproc", "postproc_version", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.postproc_version(); + }; + + vectors.pp_free_context = (void* @ppContext) => + { + vectors.pp_free_context = FunctionResolver.GetFunctionDelegate("postproc", "pp_free_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.pp_free_context(@ppContext); + }; + + vectors.pp_free_mode = (void* @mode) => + { + vectors.pp_free_mode = FunctionResolver.GetFunctionDelegate("postproc", "pp_free_mode", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.pp_free_mode(@mode); + }; + + vectors.pp_get_context = (int @width, int @height, int @flags) => + { + vectors.pp_get_context = FunctionResolver.GetFunctionDelegate("postproc", "pp_get_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.pp_get_context(@width, @height, @flags); + }; + + vectors.pp_get_mode_by_name_and_quality = (string @name, int @quality) => + { + vectors.pp_get_mode_by_name_and_quality = FunctionResolver.GetFunctionDelegate("postproc", "pp_get_mode_by_name_and_quality", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.pp_get_mode_by_name_and_quality(@name, @quality); + }; + + vectors.pp_postprocess = (in byte_ptr3 @src, in int3 @srcStride, ref byte_ptr3 @dst, in int3 @dstStride, int @horizontalSize, int @verticalSize, sbyte* @QP_store, int @QP_stride, void* @mode, void* @ppContext, int @pict_type) => + { + vectors.pp_postprocess = FunctionResolver.GetFunctionDelegate("postproc", "pp_postprocess", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.pp_postprocess(@src, @srcStride, ref @dst, @dstStride, @horizontalSize, @verticalSize, @QP_store, @QP_stride, @mode, @ppContext, @pict_type); + }; + + vectors.swr_alloc = () => + { + vectors.swr_alloc = FunctionResolver.GetFunctionDelegate("swresample", "swr_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_alloc(); + }; + + vectors.swr_alloc_set_opts = (SwrContext* @s, long @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, long @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx) => + { + vectors.swr_alloc_set_opts = FunctionResolver.GetFunctionDelegate("swresample", "swr_alloc_set_opts", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_alloc_set_opts(@s, @out_ch_layout, @out_sample_fmt, @out_sample_rate, @in_ch_layout, @in_sample_fmt, @in_sample_rate, @log_offset, @log_ctx); + }; + + vectors.swr_alloc_set_opts2 = (SwrContext** @ps, AVChannelLayout* @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, AVChannelLayout* @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx) => + { + vectors.swr_alloc_set_opts2 = FunctionResolver.GetFunctionDelegate("swresample", "swr_alloc_set_opts2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_alloc_set_opts2(@ps, @out_ch_layout, @out_sample_fmt, @out_sample_rate, @in_ch_layout, @in_sample_fmt, @in_sample_rate, @log_offset, @log_ctx); + }; + + vectors.swr_build_matrix = (ulong @in_layout, ulong @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @rematrix_maxval, double @rematrix_volume, double* @matrix, int @stride, AVMatrixEncoding @matrix_encoding, void* @log_ctx) => + { + vectors.swr_build_matrix = FunctionResolver.GetFunctionDelegate("swresample", "swr_build_matrix", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_build_matrix(@in_layout, @out_layout, @center_mix_level, @surround_mix_level, @lfe_mix_level, @rematrix_maxval, @rematrix_volume, @matrix, @stride, @matrix_encoding, @log_ctx); + }; + + vectors.swr_build_matrix2 = (AVChannelLayout* @in_layout, AVChannelLayout* @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @maxval, double @rematrix_volume, double* @matrix, long @stride, AVMatrixEncoding @matrix_encoding, void* @log_context) => + { + vectors.swr_build_matrix2 = FunctionResolver.GetFunctionDelegate("swresample", "swr_build_matrix2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_build_matrix2(@in_layout, @out_layout, @center_mix_level, @surround_mix_level, @lfe_mix_level, @maxval, @rematrix_volume, @matrix, @stride, @matrix_encoding, @log_context); + }; + + vectors.swr_close = (SwrContext* @s) => + { + vectors.swr_close = FunctionResolver.GetFunctionDelegate("swresample", "swr_close", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.swr_close(@s); + }; + + vectors.swr_config_frame = (SwrContext* @swr, AVFrame* @out, AVFrame* @in) => + { + vectors.swr_config_frame = FunctionResolver.GetFunctionDelegate("swresample", "swr_config_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_config_frame(@swr, @out, @in); + }; + + vectors.swr_convert = (SwrContext* @s, byte** @out, int @out_count, byte** @in, int @in_count) => + { + vectors.swr_convert = FunctionResolver.GetFunctionDelegate("swresample", "swr_convert", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_convert(@s, @out, @out_count, @in, @in_count); + }; + + vectors.swr_convert_frame = (SwrContext* @swr, AVFrame* @output, AVFrame* @input) => + { + vectors.swr_convert_frame = FunctionResolver.GetFunctionDelegate("swresample", "swr_convert_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_convert_frame(@swr, @output, @input); + }; + + vectors.swr_drop_output = (SwrContext* @s, int @count) => + { + vectors.swr_drop_output = FunctionResolver.GetFunctionDelegate("swresample", "swr_drop_output", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_drop_output(@s, @count); + }; + + vectors.swr_free = (SwrContext** @s) => + { + vectors.swr_free = FunctionResolver.GetFunctionDelegate("swresample", "swr_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.swr_free(@s); + }; + + vectors.swr_get_class = () => + { + vectors.swr_get_class = FunctionResolver.GetFunctionDelegate("swresample", "swr_get_class", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_get_class(); + }; + + vectors.swr_get_delay = (SwrContext* @s, long @base) => + { + vectors.swr_get_delay = FunctionResolver.GetFunctionDelegate("swresample", "swr_get_delay", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_get_delay(@s, @base); + }; + + vectors.swr_get_out_samples = (SwrContext* @s, int @in_samples) => + { + vectors.swr_get_out_samples = FunctionResolver.GetFunctionDelegate("swresample", "swr_get_out_samples", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_get_out_samples(@s, @in_samples); + }; + + vectors.swr_init = (SwrContext* @s) => + { + vectors.swr_init = FunctionResolver.GetFunctionDelegate("swresample", "swr_init", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_init(@s); + }; + + vectors.swr_inject_silence = (SwrContext* @s, int @count) => + { + vectors.swr_inject_silence = FunctionResolver.GetFunctionDelegate("swresample", "swr_inject_silence", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_inject_silence(@s, @count); + }; + + vectors.swr_is_initialized = (SwrContext* @s) => + { + vectors.swr_is_initialized = FunctionResolver.GetFunctionDelegate("swresample", "swr_is_initialized", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_is_initialized(@s); + }; + + vectors.swr_next_pts = (SwrContext* @s, long @pts) => + { + vectors.swr_next_pts = FunctionResolver.GetFunctionDelegate("swresample", "swr_next_pts", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_next_pts(@s, @pts); + }; + + vectors.swr_set_channel_mapping = (SwrContext* @s, int* @channel_map) => + { + vectors.swr_set_channel_mapping = FunctionResolver.GetFunctionDelegate("swresample", "swr_set_channel_mapping", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_set_channel_mapping(@s, @channel_map); + }; + + vectors.swr_set_compensation = (SwrContext* @s, int @sample_delta, int @compensation_distance) => + { + vectors.swr_set_compensation = FunctionResolver.GetFunctionDelegate("swresample", "swr_set_compensation", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_set_compensation(@s, @sample_delta, @compensation_distance); + }; + + vectors.swr_set_matrix = (SwrContext* @s, double* @matrix, int @stride) => + { + vectors.swr_set_matrix = FunctionResolver.GetFunctionDelegate("swresample", "swr_set_matrix", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_set_matrix(@s, @matrix, @stride); + }; + + vectors.swresample_configuration = () => + { + vectors.swresample_configuration = FunctionResolver.GetFunctionDelegate("swresample", "swresample_configuration", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swresample_configuration(); + }; + + vectors.swresample_license = () => + { + vectors.swresample_license = FunctionResolver.GetFunctionDelegate("swresample", "swresample_license", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swresample_license(); + }; + + vectors.swresample_version = () => + { + vectors.swresample_version = FunctionResolver.GetFunctionDelegate("swresample", "swresample_version", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swresample_version(); + }; + + vectors.sws_alloc_context = () => + { + vectors.sws_alloc_context = FunctionResolver.GetFunctionDelegate("swscale", "sws_alloc_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_alloc_context(); + }; + + vectors.sws_allocVec = (int @length) => + { + vectors.sws_allocVec = FunctionResolver.GetFunctionDelegate("swscale", "sws_allocVec", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_allocVec(@length); + }; + + vectors.sws_convertPalette8ToPacked24 = (byte* @src, byte* @dst, int @num_pixels, byte* @palette) => + { + vectors.sws_convertPalette8ToPacked24 = FunctionResolver.GetFunctionDelegate("swscale", "sws_convertPalette8ToPacked24", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.sws_convertPalette8ToPacked24(@src, @dst, @num_pixels, @palette); + }; + + vectors.sws_convertPalette8ToPacked32 = (byte* @src, byte* @dst, int @num_pixels, byte* @palette) => + { + vectors.sws_convertPalette8ToPacked32 = FunctionResolver.GetFunctionDelegate("swscale", "sws_convertPalette8ToPacked32", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.sws_convertPalette8ToPacked32(@src, @dst, @num_pixels, @palette); + }; + + vectors.sws_frame_end = (SwsContext* @c) => + { + vectors.sws_frame_end = FunctionResolver.GetFunctionDelegate("swscale", "sws_frame_end", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.sws_frame_end(@c); + }; + + vectors.sws_frame_start = (SwsContext* @c, AVFrame* @dst, AVFrame* @src) => + { + vectors.sws_frame_start = FunctionResolver.GetFunctionDelegate("swscale", "sws_frame_start", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_frame_start(@c, @dst, @src); + }; + + vectors.sws_freeContext = (SwsContext* @swsContext) => + { + vectors.sws_freeContext = FunctionResolver.GetFunctionDelegate("swscale", "sws_freeContext", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.sws_freeContext(@swsContext); + }; + + vectors.sws_freeFilter = (SwsFilter* @filter) => + { + vectors.sws_freeFilter = FunctionResolver.GetFunctionDelegate("swscale", "sws_freeFilter", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.sws_freeFilter(@filter); + }; + + vectors.sws_freeVec = (SwsVector* @a) => + { + vectors.sws_freeVec = FunctionResolver.GetFunctionDelegate("swscale", "sws_freeVec", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.sws_freeVec(@a); + }; + + vectors.sws_get_class = () => + { + vectors.sws_get_class = FunctionResolver.GetFunctionDelegate("swscale", "sws_get_class", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_get_class(); + }; + + vectors.sws_getCachedContext = (SwsContext* @context, int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param) => + { + vectors.sws_getCachedContext = FunctionResolver.GetFunctionDelegate("swscale", "sws_getCachedContext", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_getCachedContext(@context, @srcW, @srcH, @srcFormat, @dstW, @dstH, @dstFormat, @flags, @srcFilter, @dstFilter, @param); + }; + + vectors.sws_getCoefficients = (int @colorspace) => + { + vectors.sws_getCoefficients = FunctionResolver.GetFunctionDelegate("swscale", "sws_getCoefficients", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_getCoefficients(@colorspace); + }; + + vectors.sws_getColorspaceDetails = (SwsContext* @c, int** @inv_table, int* @srcRange, int** @table, int* @dstRange, int* @brightness, int* @contrast, int* @saturation) => + { + vectors.sws_getColorspaceDetails = FunctionResolver.GetFunctionDelegate("swscale", "sws_getColorspaceDetails", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_getColorspaceDetails(@c, @inv_table, @srcRange, @table, @dstRange, @brightness, @contrast, @saturation); + }; + + vectors.sws_getContext = (int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param) => + { + vectors.sws_getContext = FunctionResolver.GetFunctionDelegate("swscale", "sws_getContext", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_getContext(@srcW, @srcH, @srcFormat, @dstW, @dstH, @dstFormat, @flags, @srcFilter, @dstFilter, @param); + }; + + vectors.sws_getDefaultFilter = (float @lumaGBlur, float @chromaGBlur, float @lumaSharpen, float @chromaSharpen, float @chromaHShift, float @chromaVShift, int @verbose) => + { + vectors.sws_getDefaultFilter = FunctionResolver.GetFunctionDelegate("swscale", "sws_getDefaultFilter", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_getDefaultFilter(@lumaGBlur, @chromaGBlur, @lumaSharpen, @chromaSharpen, @chromaHShift, @chromaVShift, @verbose); + }; + + vectors.sws_getGaussianVec = (double @variance, double @quality) => + { + vectors.sws_getGaussianVec = FunctionResolver.GetFunctionDelegate("swscale", "sws_getGaussianVec", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_getGaussianVec(@variance, @quality); + }; + + vectors.sws_init_context = (SwsContext* @sws_context, SwsFilter* @srcFilter, SwsFilter* @dstFilter) => + { + vectors.sws_init_context = FunctionResolver.GetFunctionDelegate("swscale", "sws_init_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_init_context(@sws_context, @srcFilter, @dstFilter); + }; + + vectors.sws_isSupportedEndiannessConversion = (AVPixelFormat @pix_fmt) => + { + vectors.sws_isSupportedEndiannessConversion = FunctionResolver.GetFunctionDelegate("swscale", "sws_isSupportedEndiannessConversion", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_isSupportedEndiannessConversion(@pix_fmt); + }; + + vectors.sws_isSupportedInput = (AVPixelFormat @pix_fmt) => + { + vectors.sws_isSupportedInput = FunctionResolver.GetFunctionDelegate("swscale", "sws_isSupportedInput", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_isSupportedInput(@pix_fmt); + }; + + vectors.sws_isSupportedOutput = (AVPixelFormat @pix_fmt) => + { + vectors.sws_isSupportedOutput = FunctionResolver.GetFunctionDelegate("swscale", "sws_isSupportedOutput", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_isSupportedOutput(@pix_fmt); + }; + + vectors.sws_normalizeVec = (SwsVector* @a, double @height) => + { + vectors.sws_normalizeVec = FunctionResolver.GetFunctionDelegate("swscale", "sws_normalizeVec", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.sws_normalizeVec(@a, @height); + }; + + vectors.sws_receive_slice = (SwsContext* @c, uint @slice_start, uint @slice_height) => + { + vectors.sws_receive_slice = FunctionResolver.GetFunctionDelegate("swscale", "sws_receive_slice", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_receive_slice(@c, @slice_start, @slice_height); + }; + + vectors.sws_receive_slice_alignment = (SwsContext* @c) => + { + vectors.sws_receive_slice_alignment = FunctionResolver.GetFunctionDelegate("swscale", "sws_receive_slice_alignment", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_receive_slice_alignment(@c); + }; + + vectors.sws_scale = (SwsContext* @c, byte*[] @srcSlice, int[] @srcStride, int @srcSliceY, int @srcSliceH, byte*[] @dst, int[] @dstStride) => + { + vectors.sws_scale = FunctionResolver.GetFunctionDelegate("swscale", "sws_scale", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_scale(@c, @srcSlice, @srcStride, @srcSliceY, @srcSliceH, @dst, @dstStride); + }; + + vectors.sws_scale_frame = (SwsContext* @c, AVFrame* @dst, AVFrame* @src) => + { + vectors.sws_scale_frame = FunctionResolver.GetFunctionDelegate("swscale", "sws_scale_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_scale_frame(@c, @dst, @src); + }; + + vectors.sws_scaleVec = (SwsVector* @a, double @scalar) => + { + vectors.sws_scaleVec = FunctionResolver.GetFunctionDelegate("swscale", "sws_scaleVec", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.sws_scaleVec(@a, @scalar); + }; + + vectors.sws_send_slice = (SwsContext* @c, uint @slice_start, uint @slice_height) => + { + vectors.sws_send_slice = FunctionResolver.GetFunctionDelegate("swscale", "sws_send_slice", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_send_slice(@c, @slice_start, @slice_height); + }; + + vectors.sws_setColorspaceDetails = (SwsContext* @c, in int4 @inv_table, int @srcRange, in int4 @table, int @dstRange, int @brightness, int @contrast, int @saturation) => + { + vectors.sws_setColorspaceDetails = FunctionResolver.GetFunctionDelegate("swscale", "sws_setColorspaceDetails", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_setColorspaceDetails(@c, @inv_table, @srcRange, @table, @dstRange, @brightness, @contrast, @saturation); + }; + + vectors.swscale_configuration = () => + { + vectors.swscale_configuration = FunctionResolver.GetFunctionDelegate("swscale", "swscale_configuration", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swscale_configuration(); + }; + + vectors.swscale_license = () => + { + vectors.swscale_license = FunctionResolver.GetFunctionDelegate("swscale", "swscale_license", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swscale_license(); + }; + + vectors.swscale_version = () => + { + vectors.swscale_version = FunctionResolver.GetFunctionDelegate("swscale", "swscale_version", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swscale_version(); + }; + + } +} diff --git a/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/generated/DynamicallyLoadedBindings.libraries.g.cs b/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/generated/DynamicallyLoadedBindings.libraries.g.cs new file mode 100644 index 00000000..797013de --- /dev/null +++ b/FFmpeg.AutoGen.Bindings.DynamicallyLoaded/generated/DynamicallyLoadedBindings.libraries.g.cs @@ -0,0 +1,18 @@ +using System.Collections.Generic; + +namespace FFmpeg.AutoGen.Bindings.DynamicallyLoaded; + +public static unsafe partial class DynamicallyLoadedBindings +{ + public static Dictionary LibraryVersionMap = new Dictionary + { + {"avcodec", 59}, + {"avdevice", 59}, + {"avfilter", 8}, + {"avformat", 59}, + {"avutil", 57}, + {"postproc", 56}, + {"swresample", 4}, + {"swscale", 6}, + }; +} diff --git a/FFmpeg.AutoGen.Bindings.StaticallyLinked/FFmpeg.AutoGen.Bindings.StaticallyLinked.csproj b/FFmpeg.AutoGen.Bindings.StaticallyLinked/FFmpeg.AutoGen.Bindings.StaticallyLinked.csproj new file mode 100644 index 00000000..dbcc545b --- /dev/null +++ b/FFmpeg.AutoGen.Bindings.StaticallyLinked/FFmpeg.AutoGen.Bindings.StaticallyLinked.csproj @@ -0,0 +1,34 @@ + + + + netstandard2.1;netstandard2.0;net45 + FFmpeg auto generated unsafe bindings for C#/.NET and Mono. Abstractions todo + true + + + + True + 108;169;612;618;1573;1591;1701;1702;1705 + false + + bin\$(Configuration)\$(TargetFramework)\$(AssemblyName).xml + + + + + true + snupkg + + + + + + + + + + + + + + diff --git a/FFmpeg.AutoGen.Bindings.StaticallyLinked/generated/StaticallyLinkedBindings.g.cs b/FFmpeg.AutoGen.Bindings.StaticallyLinked/generated/StaticallyLinkedBindings.g.cs new file mode 100644 index 00000000..b05840cb --- /dev/null +++ b/FFmpeg.AutoGen.Bindings.StaticallyLinked/generated/StaticallyLinkedBindings.g.cs @@ -0,0 +1,5774 @@ +using System; +using System.Runtime.InteropServices; +using FFmpeg.AutoGen.Abstractions; + +namespace FFmpeg.AutoGen.Bindings.StaticallyLinked; + +public static unsafe partial class StaticallyLinkedBindings +{ + /// Create an AVABufferSinkParams structure. + [Obsolete()] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVABufferSinkParams* av_abuffersink_params_alloc(); + + /// Add an index entry into a sorted list. Update the entry if the list already contains it. + /// timestamp in the time base of the given stream + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_add_index_entry(AVStream* @st, long @pos, long @timestamp, int @size, int @distance, int @flags); + + /// Add two rationals. + /// First rational + /// Second rational + /// b+c + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_add_q(AVRational @b, AVRational @c); + + /// Add a value to a timestamp. + /// Input timestamp time base + /// Input timestamp + /// Time base of `inc` + /// Value to be added + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_add_stable(AVRational @ts_tb, long @ts, AVRational @inc_tb, long @inc); + + /// Read data and append it to the current content of the AVPacket. If pkt->size is 0 this is identical to av_get_packet. Note that this uses av_grow_packet and thus involves a realloc which is inefficient. Thus this function should only be used when there is no reasonable way to know (an upper bound of) the final size. + /// associated IO context + /// packet + /// amount of data to read + /// >0 (read size) if OK, AVERROR_xxx otherwise, previous data will not be lost even if an error occurs. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_append_packet(AVIOContext* @s, AVPacket* @pkt, int @size); + + /// Allocate an AVAudioFifo. + /// sample format + /// number of channels + /// initial allocation size, in samples + /// newly allocated AVAudioFifo, or NULL on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVAudioFifo* av_audio_fifo_alloc(AVSampleFormat @sample_fmt, int @channels, int @nb_samples); + + /// Drain data from an AVAudioFifo. + /// AVAudioFifo to drain + /// number of samples to drain + /// 0 if OK, or negative AVERROR code on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_audio_fifo_drain(AVAudioFifo* @af, int @nb_samples); + + /// Free an AVAudioFifo. + /// AVAudioFifo to free + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_audio_fifo_free(AVAudioFifo* @af); + + /// Peek data from an AVAudioFifo. + /// AVAudioFifo to read from + /// audio data plane pointers + /// number of samples to peek + /// number of samples actually peek, or negative AVERROR code on failure. The number of samples actually peek will not be greater than nb_samples, and will only be less than nb_samples if av_audio_fifo_size is less than nb_samples. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_audio_fifo_peek(AVAudioFifo* @af, void** @data, int @nb_samples); + + /// Peek data from an AVAudioFifo. + /// AVAudioFifo to read from + /// audio data plane pointers + /// number of samples to peek + /// offset from current read position + /// number of samples actually peek, or negative AVERROR code on failure. The number of samples actually peek will not be greater than nb_samples, and will only be less than nb_samples if av_audio_fifo_size is less than nb_samples. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_audio_fifo_peek_at(AVAudioFifo* @af, void** @data, int @nb_samples, int @offset); + + /// Read data from an AVAudioFifo. + /// AVAudioFifo to read from + /// audio data plane pointers + /// number of samples to read + /// number of samples actually read, or negative AVERROR code on failure. The number of samples actually read will not be greater than nb_samples, and will only be less than nb_samples if av_audio_fifo_size is less than nb_samples. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_audio_fifo_read(AVAudioFifo* @af, void** @data, int @nb_samples); + + /// Reallocate an AVAudioFifo. + /// AVAudioFifo to reallocate + /// new allocation size, in samples + /// 0 if OK, or negative AVERROR code on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_audio_fifo_realloc(AVAudioFifo* @af, int @nb_samples); + + /// Reset the AVAudioFifo buffer. + /// AVAudioFifo to reset + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_audio_fifo_reset(AVAudioFifo* @af); + + /// Get the current number of samples in the AVAudioFifo available for reading. + /// the AVAudioFifo to query + /// number of samples available for reading + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_audio_fifo_size(AVAudioFifo* @af); + + /// Get the current number of samples in the AVAudioFifo available for writing. + /// the AVAudioFifo to query + /// number of samples available for writing + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_audio_fifo_space(AVAudioFifo* @af); + + /// Write data to an AVAudioFifo. + /// AVAudioFifo to write to + /// audio data plane pointers + /// number of samples to write + /// number of samples actually written, or negative AVERROR code on failure. If successful, the number of samples actually written will always be nb_samples. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_audio_fifo_write(AVAudioFifo* @af, void** @data, int @nb_samples); + + /// Append a description of a channel layout to a bprint buffer. + [Obsolete("use av_channel_layout_describe()")] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_bprint_channel_layout(AVBPrint* @bp, int @nb_channels, ulong @channel_layout); + + /// Allocate a context for a given bitstream filter. The caller must fill in the context parameters as described in the documentation and then call av_bsf_init() before sending any data to the filter. + /// the filter for which to allocate an instance. + /// a pointer into which the pointer to the newly-allocated context will be written. It must be freed with av_bsf_free() after the filtering is done. + /// 0 on success, a negative AVERROR code on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_bsf_alloc(AVBitStreamFilter* @filter, AVBSFContext** @ctx); + + /// Reset the internal bitstream filter state. Should be called e.g. when seeking. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_bsf_flush(AVBSFContext* @ctx); + + /// Free a bitstream filter context and everything associated with it; write NULL into the supplied pointer. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_bsf_free(AVBSFContext** @ctx); + + /// Returns a bitstream filter with the specified name or NULL if no such bitstream filter exists. + /// a bitstream filter with the specified name or NULL if no such bitstream filter exists. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBitStreamFilter* av_bsf_get_by_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Get the AVClass for AVBSFContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* av_bsf_get_class(); + + /// Get null/pass-through bitstream filter. + /// Pointer to be set to new instance of pass-through bitstream filter + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_bsf_get_null_filter(AVBSFContext** @bsf); + + /// Prepare the filter for use, after all the parameters and options have been set. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_bsf_init(AVBSFContext* @ctx); + + /// Iterate over all registered bitstream filters. + /// a pointer where libavcodec will store the iteration state. Must point to NULL to start the iteration. + /// the next registered bitstream filter or NULL when the iteration is finished + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBitStreamFilter* av_bsf_iterate(void** @opaque); + + /// Allocate empty list of bitstream filters. The list must be later freed by av_bsf_list_free() or finalized by av_bsf_list_finalize(). + /// Pointer to on success, NULL in case of failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBSFList* av_bsf_list_alloc(); + + /// Append bitstream filter to the list of bitstream filters. + /// List to append to + /// Filter context to be appended + /// >=0 on success, negative AVERROR in case of failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_bsf_list_append(AVBSFList* @lst, AVBSFContext* @bsf); + + /// Construct new bitstream filter context given it's name and options and append it to the list of bitstream filters. + /// List to append to + /// Name of the bitstream filter + /// Options for the bitstream filter, can be set to NULL + /// >=0 on success, negative AVERROR in case of failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_bsf_list_append2(AVBSFList* @lst, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @bsf_name, AVDictionary** @options); + + /// Finalize list of bitstream filters. + /// Filter list structure to be transformed + /// Pointer to be set to newly created structure representing the chain of bitstream filters + /// >=0 on success, negative AVERROR in case of failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_bsf_list_finalize(AVBSFList** @lst, AVBSFContext** @bsf); + + /// Free list of bitstream filters. + /// Pointer to pointer returned by av_bsf_list_alloc() + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_bsf_list_free(AVBSFList** @lst); + + /// Parse string describing list of bitstream filters and create single AVBSFContext describing the whole chain of bitstream filters. Resulting AVBSFContext can be treated as any other AVBSFContext freshly allocated by av_bsf_alloc(). + /// String describing chain of bitstream filters in format `bsf1[=opt1=val1:opt2=val2][,bsf2]` + /// Pointer to be set to newly created structure representing the chain of bitstream filters + /// >=0 on success, negative AVERROR in case of failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_bsf_list_parse_str( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str, AVBSFContext** @bsf); + + /// Retrieve a filtered packet. + /// this struct will be filled with the contents of the filtered packet. It is owned by the caller and must be freed using av_packet_unref() when it is no longer needed. This parameter should be "clean" (i.e. freshly allocated with av_packet_alloc() or unreffed with av_packet_unref()) when this function is called. If this function returns successfully, the contents of pkt will be completely overwritten by the returned data. On failure, pkt is not touched. + /// - 0 on success. - AVERROR(EAGAIN) if more packets need to be sent to the filter (using av_bsf_send_packet()) to get more output. - AVERROR_EOF if there will be no further output from the filter. - Another negative AVERROR value if an error occurs. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_bsf_receive_packet(AVBSFContext* @ctx, AVPacket* @pkt); + + /// Submit a packet for filtering. + /// the packet to filter. The bitstream filter will take ownership of the packet and reset the contents of pkt. pkt is not touched if an error occurs. If pkt is empty (i.e. NULL, or pkt->data is NULL and pkt->side_data_elems zero), it signals the end of the stream (i.e. no more non-empty packets will be sent; sending more empty packets does nothing) and will cause the filter to output any packets it may have buffered internally. + /// - 0 on success. - AVERROR(EAGAIN) if packets need to be retrieved from the filter (using av_bsf_receive_packet()) before new input can be consumed. - Another negative AVERROR value if an error occurs. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_bsf_send_packet(AVBSFContext* @ctx, AVPacket* @pkt); + + /// Allocate an AVBuffer of the given size using av_malloc(). + /// an AVBufferRef of given size or NULL when out of memory + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferRef* av_buffer_alloc(ulong @size); + + /// Same as av_buffer_alloc(), except the returned buffer will be initialized to zero. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferRef* av_buffer_allocz(ulong @size); + + /// Create an AVBuffer from an existing array. + /// data array + /// size of data in bytes + /// a callback for freeing this buffer's data + /// parameter to be got for processing or passed to free + /// a combination of AV_BUFFER_FLAG_* + /// an AVBufferRef referring to data on success, NULL on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferRef* av_buffer_create(byte* @data, ulong @size, av_buffer_create_free_func @free, void* @opaque, int @flags); + + /// Default free callback, which calls av_free() on the buffer data. This function is meant to be passed to av_buffer_create(), not called directly. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_buffer_default_free(void* @opaque, byte* @data); + + /// Returns the opaque parameter set by av_buffer_create. + /// the opaque parameter set by av_buffer_create. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_buffer_get_opaque(AVBufferRef* @buf); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffer_get_ref_count(AVBufferRef* @buf); + + /// Returns 1 if the caller may write to the data referred to by buf (which is true if and only if buf is the only reference to the underlying AVBuffer). Return 0 otherwise. A positive answer is valid until av_buffer_ref() is called on buf. + /// 1 if the caller may write to the data referred to by buf (which is true if and only if buf is the only reference to the underlying AVBuffer). Return 0 otherwise. A positive answer is valid until av_buffer_ref() is called on buf. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffer_is_writable(AVBufferRef* @buf); + + /// Create a writable reference from a given buffer reference, avoiding data copy if possible. + /// buffer reference to make writable. On success, buf is either left untouched, or it is unreferenced and a new writable AVBufferRef is written in its place. On failure, buf is left untouched. + /// 0 on success, a negative AVERROR on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffer_make_writable(AVBufferRef** @buf); + + /// Query the original opaque parameter of an allocated buffer in the pool. + /// a buffer reference to a buffer returned by av_buffer_pool_get. + /// the opaque parameter set by the buffer allocator function of the buffer pool. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_buffer_pool_buffer_get_opaque(AVBufferRef* @ref); + + /// Allocate a new AVBuffer, reusing an old buffer from the pool when available. This function may be called simultaneously from multiple threads. + /// a reference to the new buffer on success, NULL on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferRef* av_buffer_pool_get(AVBufferPool* @pool); + + /// Allocate and initialize a buffer pool. + /// size of each buffer in this pool + /// a function that will be used to allocate new buffers when the pool is empty. May be NULL, then the default allocator will be used (av_buffer_alloc()). + /// newly created buffer pool on success, NULL on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferPool* av_buffer_pool_init(ulong @size, av_buffer_pool_init_alloc_func @alloc); + + /// Allocate and initialize a buffer pool with a more complex allocator. + /// size of each buffer in this pool + /// arbitrary user data used by the allocator + /// a function that will be used to allocate new buffers when the pool is empty. May be NULL, then the default allocator will be used (av_buffer_alloc()). + /// a function that will be called immediately before the pool is freed. I.e. after av_buffer_pool_uninit() is called by the caller and all the frames are returned to the pool and freed. It is intended to uninitialize the user opaque data. May be NULL. + /// newly created buffer pool on success, NULL on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferPool* av_buffer_pool_init2(ulong @size, void* @opaque, av_buffer_pool_init2_alloc_func @alloc, av_buffer_pool_init2_pool_free_func @pool_free); + + /// Mark the pool as being available for freeing. It will actually be freed only once all the allocated buffers associated with the pool are released. Thus it is safe to call this function while some of the allocated buffers are still in use. + /// pointer to the pool to be freed. It will be set to NULL. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_buffer_pool_uninit(AVBufferPool** @pool); + + /// Reallocate a given buffer. + /// a buffer reference to reallocate. On success, buf will be unreferenced and a new reference with the required size will be written in its place. On failure buf will be left untouched. *buf may be NULL, then a new buffer is allocated. + /// required new buffer size. + /// 0 on success, a negative AVERROR on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffer_realloc(AVBufferRef** @buf, ulong @size); + + /// Create a new reference to an AVBuffer. + /// a new AVBufferRef referring to the same AVBuffer as buf or NULL on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferRef* av_buffer_ref(AVBufferRef* @buf); + + /// Ensure dst refers to the same data as src. + /// Pointer to either a valid buffer reference or NULL. On success, this will point to a buffer reference equivalent to src. On failure, dst will be left untouched. + /// A buffer reference to replace dst with. May be NULL, then this function is equivalent to av_buffer_unref(dst). + /// 0 on success AVERROR(ENOMEM) on memory allocation failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffer_replace(AVBufferRef** @dst, AVBufferRef* @src); + + /// Free a given reference and automatically free the buffer if there are no more references to it. + /// the reference to be freed. The pointer is set to NULL on return. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_buffer_unref(AVBufferRef** @buf); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersink_get_ch_layout(AVFilterContext* @ctx, AVChannelLayout* @ch_layout); + + [Obsolete()] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern ulong av_buffersink_get_channel_layout(AVFilterContext* @ctx); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersink_get_channels(AVFilterContext* @ctx); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersink_get_format(AVFilterContext* @ctx); + + /// Get a frame with filtered data from sink and put it in frame. + /// pointer to a context of a buffersink or abuffersink AVFilter. + /// pointer to an allocated frame that will be filled with data. The data must be freed using av_frame_unref() / av_frame_free() + /// - >= 0 if a frame was successfully returned. - AVERROR(EAGAIN) if no frames are available at this point; more input frames must be added to the filtergraph to get more output. - AVERROR_EOF if there will be no more output frames on this sink. - A different negative AVERROR code in other failure cases. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersink_get_frame(AVFilterContext* @ctx, AVFrame* @frame); + + /// Get a frame with filtered data from sink and put it in frame. + /// pointer to a buffersink or abuffersink filter context. + /// pointer to an allocated frame that will be filled with data. The data must be freed using av_frame_unref() / av_frame_free() + /// a combination of AV_BUFFERSINK_FLAG_* flags + /// >= 0 in for success, a negative AVERROR code for failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersink_get_frame_flags(AVFilterContext* @ctx, AVFrame* @frame, int @flags); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_buffersink_get_frame_rate(AVFilterContext* @ctx); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersink_get_h(AVFilterContext* @ctx); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferRef* av_buffersink_get_hw_frames_ctx(AVFilterContext* @ctx); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_buffersink_get_sample_aspect_ratio(AVFilterContext* @ctx); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersink_get_sample_rate(AVFilterContext* @ctx); + + /// Same as av_buffersink_get_frame(), but with the ability to specify the number of samples read. This function is less efficient than av_buffersink_get_frame(), because it copies the data around. + /// pointer to a context of the abuffersink AVFilter. + /// pointer to an allocated frame that will be filled with data. The data must be freed using av_frame_unref() / av_frame_free() frame will contain exactly nb_samples audio samples, except at the end of stream, when it can contain less than nb_samples. + /// The return codes have the same meaning as for av_buffersink_get_frame(). + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersink_get_samples(AVFilterContext* @ctx, AVFrame* @frame, int @nb_samples); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_buffersink_get_time_base(AVFilterContext* @ctx); + + /// Get the properties of the stream @{ + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVMediaType av_buffersink_get_type(AVFilterContext* @ctx); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersink_get_w(AVFilterContext* @ctx); + + /// Create an AVBufferSinkParams structure. + [Obsolete()] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferSinkParams* av_buffersink_params_alloc(); + + /// Set the frame size for an audio buffer sink. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_buffersink_set_frame_size(AVFilterContext* @ctx, uint @frame_size); + + /// Add a frame to the buffer source. + /// an instance of the buffersrc filter + /// frame to be added. If the frame is reference counted, this function will take ownership of the reference(s) and reset the frame. Otherwise the frame data will be copied. If this function returns an error, the input frame is not touched. + /// 0 on success, a negative AVERROR on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersrc_add_frame(AVFilterContext* @ctx, AVFrame* @frame); + + /// Add a frame to the buffer source. + /// pointer to a buffer source context + /// a frame, or NULL to mark EOF + /// a combination of AV_BUFFERSRC_FLAG_* + /// >= 0 in case of success, a negative AVERROR code in case of failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersrc_add_frame_flags(AVFilterContext* @buffer_src, AVFrame* @frame, int @flags); + + /// Close the buffer source after EOF. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersrc_close(AVFilterContext* @ctx, long @pts, uint @flags); + + /// Get the number of failed requests. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint av_buffersrc_get_nb_failed_requests(AVFilterContext* @buffer_src); + + /// Allocate a new AVBufferSrcParameters instance. It should be freed by the caller with av_free(). + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferSrcParameters* av_buffersrc_parameters_alloc(); + + /// Initialize the buffersrc or abuffersrc filter with the provided parameters. This function may be called multiple times, the later calls override the previous ones. Some of the parameters may also be set through AVOptions, then whatever method is used last takes precedence. + /// an instance of the buffersrc or abuffersrc filter + /// the stream parameters. The frames later passed to this filter must conform to those parameters. All the allocated fields in param remain owned by the caller, libavfilter will make internal copies or references when necessary. + /// 0 on success, a negative AVERROR code on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersrc_parameters_set(AVFilterContext* @ctx, AVBufferSrcParameters* @param); + + /// Add a frame to the buffer source. + /// an instance of the buffersrc filter + /// frame to be added. If the frame is reference counted, this function will make a new reference to it. Otherwise the frame data will be copied. + /// 0 on success, a negative AVERROR on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_buffersrc_write_frame(AVFilterContext* @ctx, AVFrame* @frame); + + /// Allocate a memory block for an array with av_mallocz(). + /// Number of elements + /// Size of the single element + /// Pointer to the allocated block, or `NULL` if the block cannot be allocated + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_calloc(ulong @nmemb, ulong @size); + + /// Get a human readable string describing a given channel. + /// pre-allocated buffer where to put the generated string + /// size in bytes of the buffer. + /// amount of bytes needed to hold the output string, or a negative AVERROR on failure. If the returned value is bigger than buf_size, then the string was truncated. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_description(byte* @buf, ulong @buf_size, AVChannel @channel); + + /// bprint variant of av_channel_description(). + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_channel_description_bprint(AVBPrint* @bp, AVChannel @channel_id); + + /// This is the inverse function of av_channel_name(). + /// the channel with the given name AV_CHAN_NONE when name does not identify a known channel + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVChannel av_channel_from_string( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Get the channel with the given index in a channel layout. + /// input channel layout + /// channel with the index idx in channel_layout on success or AV_CHAN_NONE on failure (if idx is not valid or the channel order is unspecified) + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVChannel av_channel_layout_channel_from_index(AVChannelLayout* @channel_layout, uint @idx); + + /// Get a channel described by the given string. + /// input channel layout + /// a channel described by the given string in channel_layout on success or AV_CHAN_NONE on failure (if the string is not valid or the channel order is unspecified) + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVChannel av_channel_layout_channel_from_string(AVChannelLayout* @channel_layout, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Check whether a channel layout is valid, i.e. can possibly describe audio data. + /// input channel layout + /// 1 if channel_layout is valid, 0 otherwise. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_layout_check(AVChannelLayout* @channel_layout); + + /// Check whether two channel layouts are semantically the same, i.e. the same channels are present on the same positions in both. + /// input channel layout + /// input channel layout + /// 0 if chl and chl1 are equal, 1 if they are not equal. A negative AVERROR code if one or both are invalid. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_layout_compare(AVChannelLayout* @chl, AVChannelLayout* @chl1); + + /// Make a copy of a channel layout. This differs from just assigning src to dst in that it allocates and copies the map for AV_CHANNEL_ORDER_CUSTOM. + /// destination channel layout + /// source channel layout + /// 0 on success, a negative AVERROR on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_layout_copy(AVChannelLayout* @dst, AVChannelLayout* @src); + + /// Get the default channel layout for a given number of channels. + /// number of channels + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_channel_layout_default(AVChannelLayout* @ch_layout, int @nb_channels); + + /// Get a human-readable string describing the channel layout properties. The string will be in the same format that is accepted by av_channel_layout_from_string(), allowing to rebuild the same channel layout, except for opaque pointers. + /// channel layout to be described + /// pre-allocated buffer where to put the generated string + /// size in bytes of the buffer. + /// amount of bytes needed to hold the output string, or a negative AVERROR on failure. If the returned value is bigger than buf_size, then the string was truncated. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_layout_describe(AVChannelLayout* @channel_layout, byte* @buf, ulong @buf_size); + + /// bprint variant of av_channel_layout_describe(). + /// 0 on success, or a negative AVERROR value on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_layout_describe_bprint(AVChannelLayout* @channel_layout, AVBPrint* @bp); + + /// Get the channel with the given index in channel_layout. + [Obsolete("use av_channel_layout_channel_from_index()")] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern ulong av_channel_layout_extract_channel(ulong @channel_layout, int @index); + + /// Initialize a native channel layout from a bitmask indicating which channels are present. + /// the layout structure to be initialized + /// bitmask describing the channel layout + /// 0 on success AVERROR(EINVAL) for invalid mask values + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_layout_from_mask(AVChannelLayout* @channel_layout, ulong @mask); + + /// Initialize a channel layout from a given string description. The input string can be represented by: - the formal channel layout name (returned by av_channel_layout_describe()) - single or multiple channel names (returned by av_channel_name(), eg. "FL", or concatenated with "+", each optionally containing a custom name after a "", eg. "FL+FR+LFE") - a decimal or hexadecimal value of a native channel layout (eg. "4" or "0x4") - the number of channels with default layout (eg. "4c") - the number of unordered channels (eg. "4C" or "4 channels") - the ambisonic order followed by optional non-diegetic channels (eg. "ambisonic 2+stereo") + /// input channel layout + /// string describing the channel layout + /// 0 channel layout was detected, AVERROR_INVALIDATATA otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_layout_from_string(AVChannelLayout* @channel_layout, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str); + + /// Get the index of a given channel in a channel layout. In case multiple channels are found, only the first match will be returned. + /// input channel layout + /// index of channel in channel_layout on success or a negative number if channel is not present in channel_layout. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_layout_index_from_channel(AVChannelLayout* @channel_layout, AVChannel @channel); + + /// Get the index in a channel layout of a channel described by the given string. In case multiple channels are found, only the first match will be returned. + /// input channel layout + /// a channel index described by the given string, or a negative AVERROR value. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_layout_index_from_string(AVChannelLayout* @channel_layout, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Iterate over all standard channel layouts. + /// a pointer where libavutil will store the iteration state. Must point to NULL to start the iteration. + /// the standard channel layout or NULL when the iteration is finished + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVChannelLayout* av_channel_layout_standard(void** @opaque); + + /// Find out what channels from a given set are present in a channel layout, without regard for their positions. + /// input channel layout + /// a combination of AV_CH_* representing a set of channels + /// a bitfield representing all the channels from mask that are present in channel_layout + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern ulong av_channel_layout_subset(AVChannelLayout* @channel_layout, ulong @mask); + + /// Free any allocated data in the channel layout and reset the channel count to 0. + /// the layout structure to be uninitialized + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_channel_layout_uninit(AVChannelLayout* @channel_layout); + + /// Get a human readable string in an abbreviated form describing a given channel. This is the inverse function of av_channel_from_string(). + /// pre-allocated buffer where to put the generated string + /// size in bytes of the buffer. + /// amount of bytes needed to hold the output string, or a negative AVERROR on failure. If the returned value is bigger than buf_size, then the string was truncated. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_channel_name(byte* @buf, ulong @buf_size, AVChannel @channel); + + /// bprint variant of av_channel_name(). + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_channel_name_bprint(AVBPrint* @bp, AVChannel @channel_id); + + /// Returns the AVChromaLocation value for name or an AVError if not found. + /// the AVChromaLocation value for name or an AVError if not found. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_chroma_location_from_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Returns the name for provided chroma location or NULL if unknown. + /// the name for provided chroma location or NULL if unknown. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_chroma_location_name(AVChromaLocation @location); + + /// Get the AVCodecID for the given codec tag tag. If no codec id is found returns AV_CODEC_ID_NONE. + /// list of supported codec_id-codec_tag pairs, as stored in AVInputFormat.codec_tag and AVOutputFormat.codec_tag + /// codec tag to match to a codec ID + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecID av_codec_get_id(AVCodecTag** @tags, uint @tag); + + /// Get the codec tag for the given codec id id. If no codec tag is found returns 0. + /// list of supported codec_id-codec_tag pairs, as stored in AVInputFormat.codec_tag and AVOutputFormat.codec_tag + /// codec ID to match to a codec tag + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint av_codec_get_tag(AVCodecTag** @tags, AVCodecID @id); + + /// Get the codec tag for the given codec id. + /// list of supported codec_id - codec_tag pairs, as stored in AVInputFormat.codec_tag and AVOutputFormat.codec_tag + /// codec id that should be searched for in the list + /// A pointer to the found tag + /// 0 if id was not found in tags, > 0 if it was found + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_codec_get_tag2(AVCodecTag** @tags, AVCodecID @id, uint* @tag); + + /// Returns a non-zero number if codec is a decoder, zero otherwise + /// a non-zero number if codec is a decoder, zero otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_codec_is_decoder(AVCodec* @codec); + + /// Returns a non-zero number if codec is an encoder, zero otherwise + /// a non-zero number if codec is an encoder, zero otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_codec_is_encoder(AVCodec* @codec); + + /// Iterate over all registered codecs. + /// a pointer where libavcodec will store the iteration state. Must point to NULL to start the iteration. + /// the next registered codec or NULL when the iteration is finished + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodec* av_codec_iterate(void** @opaque); + + /// Returns the AVColorPrimaries value for name or an AVError if not found. + /// the AVColorPrimaries value for name or an AVError if not found. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_color_primaries_from_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Returns the name for provided color primaries or NULL if unknown. + /// the name for provided color primaries or NULL if unknown. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_color_primaries_name(AVColorPrimaries @primaries); + + /// Returns the AVColorRange value for name or an AVError if not found. + /// the AVColorRange value for name or an AVError if not found. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_color_range_from_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Returns the name for provided color range or NULL if unknown. + /// the name for provided color range or NULL if unknown. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_color_range_name(AVColorRange @range); + + /// Returns the AVColorSpace value for name or an AVError if not found. + /// the AVColorSpace value for name or an AVError if not found. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_color_space_from_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Returns the name for provided color space or NULL if unknown. + /// the name for provided color space or NULL if unknown. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_color_space_name(AVColorSpace @space); + + /// Returns the AVColorTransferCharacteristic value for name or an AVError if not found. + /// the AVColorTransferCharacteristic value for name or an AVError if not found. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_color_transfer_from_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Returns the name for provided color transfer or NULL if unknown. + /// the name for provided color transfer or NULL if unknown. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_color_transfer_name(AVColorTransferCharacteristic @transfer); + + /// Compare the remainders of two integer operands divided by a common divisor. + /// Divisor; must be a power of 2 + /// - a negative value if `a % mod < b % mod` - a positive value if `a % mod > b % mod` - zero if `a % mod == b % mod` + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_compare_mod(ulong @a, ulong @b, ulong @mod); + + /// Compare two timestamps each in its own time base. + /// One of the following values: - -1 if `ts_a` is before `ts_b` - 1 if `ts_a` is after `ts_b` - 0 if they represent the same position + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_compare_ts(long @ts_a, AVRational @tb_a, long @ts_b, AVRational @tb_b); + + /// Allocate an AVContentLightMetadata structure and set its fields to default values. The resulting struct can be freed using av_freep(). + /// An AVContentLightMetadata filled with default values or NULL on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVContentLightMetadata* av_content_light_metadata_alloc(ulong* @size); + + /// Allocate a complete AVContentLightMetadata and add it to the frame. + /// The frame which side data is added to. + /// The AVContentLightMetadata structure to be filled by caller. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVContentLightMetadata* av_content_light_metadata_create_side_data(AVFrame* @frame); + + /// Allocate a CPB properties structure and initialize its fields to default values. + /// if non-NULL, the size of the allocated struct will be written here. This is useful for embedding it in side data. + /// the newly allocated struct or NULL on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCPBProperties* av_cpb_properties_alloc(ulong* @size); + + /// Returns the number of logical CPU cores present. + /// the number of logical CPU cores present. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_cpu_count(); + + /// Overrides cpu count detection and forces the specified count. Count < 1 disables forcing of specific count. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_cpu_force_count(int @count); + + /// Get the maximum data alignment that may be required by FFmpeg. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern ulong av_cpu_max_align(); + + /// Convert a double precision floating point number to a rational. + /// `double` to convert + /// Maximum allowed numerator and denominator + /// `d` in AVRational form + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_d2q(double @d, int @max); + + /// Allocate an AVD3D11VAContext. + /// Newly-allocated AVD3D11VAContext or NULL on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVD3D11VAContext* av_d3d11va_alloc_context(); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClassCategory av_default_get_category(void* @ptr); + + /// Return the context name + /// The AVClass context + /// The AVClass class_name + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_default_item_name(void* @ctx); + + /// Iterate over all registered demuxers. + /// a pointer where libavformat will store the iteration state. Must point to NULL to start the iteration. + /// the next registered demuxer or NULL when the iteration is finished + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVInputFormat* av_demuxer_iterate(void** @opaque); + + /// Copy entries from one AVDictionary struct into another. + /// pointer to a pointer to a AVDictionary struct. If *dst is NULL, this function will allocate a struct for you and put it in *dst + /// pointer to source AVDictionary struct + /// flags to use when setting entries in *dst + /// 0 on success, negative AVERROR code on failure. If dst was allocated by this function, callers should free the associated memory. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_dict_copy(AVDictionary** @dst, AVDictionary* @src, int @flags); + + /// Get number of entries in dictionary. + /// dictionary + /// number of entries in dictionary + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_dict_count(AVDictionary* @m); + + /// Free all the memory allocated for an AVDictionary struct and all keys and values. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_dict_free(AVDictionary** @m); + + /// Get a dictionary entry with matching key. + /// matching key + /// Set to the previous matching element to find the next. If set to NULL the first matching element is returned. + /// a collection of AV_DICT_* flags controlling how the entry is retrieved + /// found entry or NULL in case no matching entry was found in the dictionary + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVDictionaryEntry* av_dict_get(AVDictionary* @m, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, AVDictionaryEntry* @prev, int @flags); + + /// Get dictionary entries as a string. + /// dictionary + /// Pointer to buffer that will be allocated with string containg entries. Buffer must be freed by the caller when is no longer needed. + /// character used to separate key from value + /// character used to separate two pairs from each other + /// >= 0 on success, negative on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_dict_get_string(AVDictionary* @m, byte** @buffer, byte @key_val_sep, byte @pairs_sep); + + /// Parse the key/value pairs list and add the parsed entries to a dictionary. + /// a 0-terminated list of characters used to separate key from value + /// a 0-terminated list of characters used to separate two pairs from each other + /// flags to use when adding to dictionary. AV_DICT_DONT_STRDUP_KEY and AV_DICT_DONT_STRDUP_VAL are ignored since the key/value tokens will always be duplicated. + /// 0 on success, negative AVERROR code on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_dict_parse_string(AVDictionary** @pm, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key_val_sep, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @pairs_sep, int @flags); + + /// Set the given entry in *pm, overwriting an existing entry. + /// pointer to a pointer to a dictionary struct. If *pm is NULL a dictionary struct is allocated and put in *pm. + /// entry key to add to *pm (will either be av_strduped or added as a new key depending on flags) + /// entry value to add to *pm (will be av_strduped or added as a new key depending on flags). Passing a NULL value will cause an existing entry to be deleted. + /// >= 0 on success otherwise an error code < 0 + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_dict_set(AVDictionary** @pm, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @value, int @flags); + + /// Convenience wrapper for av_dict_set that converts the value to a string and stores it. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_dict_set_int(AVDictionary** @pm, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, long @value, int @flags); + + /// Returns The AV_DISPOSITION_* flag corresponding to disp or a negative error code if disp does not correspond to a known stream disposition. + /// The AV_DISPOSITION_* flag corresponding to disp or a negative error code if disp does not correspond to a known stream disposition. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_disposition_from_string( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @disp); + + /// Returns The string description corresponding to the lowest set bit in disposition. NULL when the lowest set bit does not correspond to a known disposition or when disposition is 0. + /// a combination of AV_DISPOSITION_* values + /// The string description corresponding to the lowest set bit in disposition. NULL when the lowest set bit does not correspond to a known disposition or when disposition is 0. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_disposition_to_string(int @disposition); + + /// Divide one rational by another. + /// First rational + /// Second rational + /// b/c + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_div_q(AVRational @b, AVRational @c); + + /// Print detailed information about the input or output format, such as duration, bitrate, streams, container, programs, metadata, side data, codec and time base. + /// the context to analyze + /// index of the stream to dump information about + /// the URL to print, such as source or destination file + /// Select whether the specified context is an input(0) or output(1) + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_dump_format(AVFormatContext* @ic, int @index, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, int @is_output); + + /// Allocate an AVDynamicHDRPlus structure and set its fields to default values. The resulting struct can be freed using av_freep(). + /// An AVDynamicHDRPlus filled with default values or NULL on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVDynamicHDRPlus* av_dynamic_hdr_plus_alloc(ulong* @size); + + /// Allocate a complete AVDynamicHDRPlus and add it to the frame. + /// The frame which side data is added to. + /// The AVDynamicHDRPlus structure to be filled by caller or NULL on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVDynamicHDRPlus* av_dynamic_hdr_plus_create_side_data(AVFrame* @frame); + + /// Add the pointer to an element to a dynamic array. + /// Pointer to the array to grow + /// Pointer to the number of elements in the array + /// Element to add + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_dynarray_add(void* @tab_ptr, int* @nb_ptr, void* @elem); + + /// Add an element to a dynamic array. + /// >=0 on success, negative otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_dynarray_add_nofree(void* @tab_ptr, int* @nb_ptr, void* @elem); + + /// Add an element of size `elem_size` to a dynamic array. + /// Pointer to the array to grow + /// Pointer to the number of elements in the array + /// Size in bytes of an element in the array + /// Pointer to the data of the element to add. If `NULL`, the space of the newly added element is allocated but left uninitialized. + /// Pointer to the data of the element to copy in the newly allocated space + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_dynarray2_add(void** @tab_ptr, int* @nb_ptr, ulong @elem_size, byte* @elem_data); + + /// Allocate a buffer, reusing the given one if large enough. + /// Pointer to pointer to an already allocated buffer. `*ptr` will be overwritten with pointer to new buffer on success or `NULL` on failure + /// Pointer to the size of buffer `*ptr`. `*size` is updated to the new allocated size, in particular 0 in case of failure. + /// Desired minimal size of buffer `*ptr` + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_fast_malloc(void* @ptr, uint* @size, ulong @min_size); + + /// Allocate and clear a buffer, reusing the given one if large enough. + /// Pointer to pointer to an already allocated buffer. `*ptr` will be overwritten with pointer to new buffer on success or `NULL` on failure + /// Pointer to the size of buffer `*ptr`. `*size` is updated to the new allocated size, in particular 0 in case of failure. + /// Desired minimal size of buffer `*ptr` + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_fast_mallocz(void* @ptr, uint* @size, ulong @min_size); + + /// Same behaviour av_fast_malloc but the buffer has additional AV_INPUT_BUFFER_PADDING_SIZE at the end which will always be 0. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_fast_padded_malloc(void* @ptr, uint* @size, ulong @min_size); + + /// Same behaviour av_fast_padded_malloc except that buffer will always be 0-initialized after call. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_fast_padded_mallocz(void* @ptr, uint* @size, ulong @min_size); + + /// Reallocate the given buffer if it is not large enough, otherwise do nothing. + /// Already allocated buffer, or `NULL` + /// Pointer to the size of buffer `ptr`. `*size` is updated to the new allocated size, in particular 0 in case of failure. + /// Desired minimal size of buffer `ptr` + /// `ptr` if the buffer is large enough, a pointer to newly reallocated buffer if the buffer was not large enough, or `NULL` in case of error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_fast_realloc(void* @ptr, uint* @size, ulong @min_size); + + /// Read the file with name filename, and put its content in a newly allocated buffer or map it with mmap() when available. In case of success set *bufptr to the read or mmapped buffer, and *size to the size in bytes of the buffer in *bufptr. Unlike mmap this function succeeds with zero sized files, in this case *bufptr will be set to NULL and *size will be set to 0. The returned buffer must be released with av_file_unmap(). + /// loglevel offset used for logging + /// context used for logging + /// a non negative number in case of success, a negative value corresponding to an AVERROR error code in case of failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_file_map( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename, byte** @bufptr, ulong* @size, int @log_offset, void* @log_ctx); + + /// Unmap or free the buffer bufptr created by av_file_map(). + /// size in bytes of bufptr, must be the same as returned by av_file_map() + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_file_unmap(byte* @bufptr, ulong @size); + + /// Check whether filename actually is a numbered sequence generator. + /// possible numbered sequence string + /// 1 if a valid numbered sequence string, 0 otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_filename_number_test( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename); + + /// Iterate over all registered filters. + /// a pointer where libavfilter will store the iteration state. Must point to NULL to start the iteration. + /// the next registered filter or NULL when the iteration is finished + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFilter* av_filter_iterate(void** @opaque); + + /// Compute what kind of losses will occur when converting from one specific pixel format to another. When converting from one pixel format to another, information loss may occur. For example, when converting from RGB24 to GRAY, the color information will be lost. Similarly, other losses occur when converting from some formats to other formats. These losses can involve loss of chroma, but also loss of resolution, loss of color depth, loss due to the color space conversion, loss of the alpha bits or loss due to color quantization. av_get_fix_fmt_loss() informs you about the various types of losses which will occur when converting from one pixel format to another. + /// source pixel format + /// Whether the source pixel format alpha channel is used. + /// Combination of flags informing you what kind of losses will occur (maximum loss for an invalid dst_pix_fmt). + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVPixelFormat av_find_best_pix_fmt_of_2(AVPixelFormat @dst_pix_fmt1, AVPixelFormat @dst_pix_fmt2, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr); + + /// Find the "best" stream in the file. The best stream is determined according to various heuristics as the most likely to be what the user expects. If the decoder parameter is non-NULL, av_find_best_stream will find the default decoder for the stream's codec; streams for which no decoder can be found are ignored. + /// media file handle + /// stream type: video, audio, subtitles, etc. + /// user-requested stream number, or -1 for automatic selection + /// try to find a stream related (eg. in the same program) to this one, or -1 if none + /// if non-NULL, returns the decoder for the selected stream + /// flags; none are currently defined + /// the non-negative stream number in case of success, AVERROR_STREAM_NOT_FOUND if no stream with the requested type could be found, AVERROR_DECODER_NOT_FOUND if streams were found but no decoder + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_find_best_stream(AVFormatContext* @ic, AVMediaType @type, int @wanted_stream_nb, int @related_stream, AVCodec** @decoder_ret, int @flags); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_find_default_stream_index(AVFormatContext* @s); + + /// Find AVInputFormat based on the short name of the input format. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVInputFormat* av_find_input_format( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @short_name); + + /// Find the value in a list of rationals nearest a given reference rational. + /// Reference rational + /// Array of rationals terminated by `{0, 0}` + /// Index of the nearest value found in the array + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_find_nearest_q_idx(AVRational @q, AVRational* @q_list); + + /// Find the programs which belong to a given stream. + /// media file handle + /// the last found program, the search will start after this program, or from the beginning if it is NULL + /// stream index + /// the next program which belongs to s, NULL if no program is found or the last program is not among the programs of ic. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVProgram* av_find_program_from_stream(AVFormatContext* @ic, AVProgram* @last, int @s); + + /// Returns the method used to set ctx->duration. + /// AVFMT_DURATION_FROM_PTS, AVFMT_DURATION_FROM_STREAM, or AVFMT_DURATION_FROM_BITRATE. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVDurationEstimationMethod av_fmt_ctx_get_duration_estimation_method(AVFormatContext* @ctx); + + /// Open a file using a UTF-8 filename. The API of this function matches POSIX fopen(), errors are returned through errno. + [Obsolete("Avoid using it, as on Windows, the FILE* allocated by this function may be allocated with a different CRT than the caller who uses the FILE*. No replacement provided in public API.")] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern _iobuf* av_fopen_utf8( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @path, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @mode); + + /// Disables cpu detection and forces the specified flags. -1 is a special case that disables forcing of specific flags. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_force_cpu_flags(int @flags); + + /// This function will cause global side data to be injected in the next packet of each stream as well as after any subsequent seek. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_format_inject_global_side_data(AVFormatContext* @s); + + /// Fill the provided buffer with a string containing a FourCC (four-character code) representation. + /// a buffer with size in bytes of at least AV_FOURCC_MAX_STRING_SIZE + /// the fourcc to represent + /// the buffer in input + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_fourcc_make_string(byte* @buf, uint @fourcc); + + /// Allocate an AVFrame and set its fields to default values. The resulting struct must be freed using av_frame_free(). + /// An AVFrame filled with default values or NULL on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFrame* av_frame_alloc(); + + /// Crop the given video AVFrame according to its crop_left/crop_top/crop_right/ crop_bottom fields. If cropping is successful, the function will adjust the data pointers and the width/height fields, and set the crop fields to 0. + /// the frame which should be cropped + /// Some combination of AV_FRAME_CROP_* flags, or 0. + /// >= 0 on success, a negative AVERROR on error. If the cropping fields were invalid, AVERROR(ERANGE) is returned, and nothing is changed. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_frame_apply_cropping(AVFrame* @frame, int @flags); + + /// Create a new frame that references the same data as src. + /// newly created AVFrame on success, NULL on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFrame* av_frame_clone(AVFrame* @src); + + /// Copy the frame data from src to dst. + /// >= 0 on success, a negative AVERROR on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_frame_copy(AVFrame* @dst, AVFrame* @src); + + /// Copy only "metadata" fields from src to dst. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_frame_copy_props(AVFrame* @dst, AVFrame* @src); + + /// Free the frame and any dynamically allocated objects in it, e.g. extended_data. If the frame is reference counted, it will be unreferenced first. + /// frame to be freed. The pointer will be set to NULL. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_frame_free(AVFrame** @frame); + + /// Allocate new buffer(s) for audio or video data. + /// frame in which to store the new buffers. + /// Required buffer size alignment. If equal to 0, alignment will be chosen automatically for the current CPU. It is highly recommended to pass 0 here unless you know what you are doing. + /// 0 on success, a negative AVERROR on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_frame_get_buffer(AVFrame* @frame, int @align); + + /// Get the buffer reference a given data plane is stored in. + /// index of the data plane of interest in frame->extended_data. + /// the buffer reference that contains the plane or NULL if the input frame is not valid. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferRef* av_frame_get_plane_buffer(AVFrame* @frame, int @plane); + + /// Returns a pointer to the side data of a given type on success, NULL if there is no side data with such type in this frame. + /// a pointer to the side data of a given type on success, NULL if there is no side data with such type in this frame. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFrameSideData* av_frame_get_side_data(AVFrame* @frame, AVFrameSideDataType @type); + + /// Check if the frame data is writable. + /// A positive value if the frame data is writable (which is true if and only if each of the underlying buffers has only one reference, namely the one stored in this frame). Return 0 otherwise. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_frame_is_writable(AVFrame* @frame); + + /// Ensure that the frame data is writable, avoiding data copy if possible. + /// 0 on success, a negative AVERROR on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_frame_make_writable(AVFrame* @frame); + + /// Move everything contained in src to dst and reset src. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_frame_move_ref(AVFrame* @dst, AVFrame* @src); + + /// Add a new side data to a frame. + /// a frame to which the side data should be added + /// type of the added side data + /// size of the side data + /// newly added side data on success, NULL on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFrameSideData* av_frame_new_side_data(AVFrame* @frame, AVFrameSideDataType @type, ulong @size); + + /// Add a new side data to a frame from an existing AVBufferRef + /// a frame to which the side data should be added + /// the type of the added side data + /// an AVBufferRef to add as side data. The ownership of the reference is transferred to the frame. + /// newly added side data on success, NULL on error. On failure the frame is unchanged and the AVBufferRef remains owned by the caller. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFrameSideData* av_frame_new_side_data_from_buf(AVFrame* @frame, AVFrameSideDataType @type, AVBufferRef* @buf); + + /// Set up a new reference to the data described by the source frame. + /// 0 on success, a negative AVERROR on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_frame_ref(AVFrame* @dst, AVFrame* @src); + + /// Remove and free all side data instances of the given type. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_frame_remove_side_data(AVFrame* @frame, AVFrameSideDataType @type); + + /// Returns a string identifying the side data type + /// a string identifying the side data type + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_frame_side_data_name(AVFrameSideDataType @type); + + /// Unreference all the buffers referenced by frame and reset the frame fields. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_frame_unref(AVFrame* @frame); + + /// Free a memory block which has been allocated with a function of av_malloc() or av_realloc() family. + /// Pointer to the memory block which should be freed. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_free(void* @ptr); + + /// Free a memory block which has been allocated with a function of av_malloc() or av_realloc() family, and set the pointer pointing to it to `NULL`. + /// Pointer to the pointer to the memory block which should be freed + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_freep(void* @ptr); + + /// Compute the greatest common divisor of two integer operands. + /// GCD of a and b up to sign; if a >= 0 and b >= 0, return value is >= 0; if a == 0 and b == 0, returns 0. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_gcd(long @a, long @b); + + /// Return the best rational so that a and b are multiple of it. If the resulting denominator is larger than max_den, return def. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_gcd_q(AVRational @a, AVRational @b, int @max_den, AVRational @def); + + /// Return the planar<->packed alternative form of the given sample format, or AV_SAMPLE_FMT_NONE on error. If the passed sample_fmt is already in the requested planar/packed format, the format returned is the same as the input. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVSampleFormat av_get_alt_sample_fmt(AVSampleFormat @sample_fmt, int @planar); + + /// Return audio frame duration. + /// codec context + /// size of the frame, or 0 if unknown + /// frame duration, in samples, if known. 0 if not able to determine. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_audio_frame_duration(AVCodecContext* @avctx, int @frame_bytes); + + /// This function is the same as av_get_audio_frame_duration(), except it works with AVCodecParameters instead of an AVCodecContext. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_audio_frame_duration2(AVCodecParameters* @par, int @frame_bytes); + + /// Return the number of bits per pixel used by the pixel format described by pixdesc. Note that this is not the same as the number of bits per sample. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_bits_per_pixel(AVPixFmtDescriptor* @pixdesc); + + /// Return codec bits per sample. + /// the codec + /// Number of bits per sample or zero if unknown for the given codec. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_bits_per_sample(AVCodecID @codec_id); + + /// Return number of bytes per sample. + /// the sample format + /// number of bytes per sample or zero if unknown for the given sample format + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_bytes_per_sample(AVSampleFormat @sample_fmt); + + /// Get the description of a given channel. + /// a channel layout with a single channel + /// channel description on success, NULL on error + [Obsolete("use av_channel_description()")] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_get_channel_description(ulong @channel); + + /// Return a channel layout id that matches name, or 0 if no match is found. + [Obsolete("use av_channel_layout_from_string()")] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern ulong av_get_channel_layout( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Get the index of a channel in channel_layout. + /// a channel layout describing exactly one channel which must be present in channel_layout. + /// index of channel in channel_layout on success, a negative AVERROR on error. + [Obsolete("use av_channel_layout_index_from_channel()")] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_channel_layout_channel_index(ulong @channel_layout, ulong @channel); + + /// Return the number of channels in the channel layout. + [Obsolete("use AVChannelLayout.nb_channels")] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_channel_layout_nb_channels(ulong @channel_layout); + + /// Return a description of a channel layout. If nb_channels is <= 0, it is guessed from the channel_layout. + /// put here the string containing the channel layout + /// size in bytes of the buffer + [Obsolete("use av_channel_layout_describe()")] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_get_channel_layout_string(byte* @buf, int @buf_size, int @nb_channels, ulong @channel_layout); + + /// Get the name of a given channel. + /// channel name on success, NULL on error. + [Obsolete("use av_channel_name()")] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_get_channel_name(ulong @channel); + + /// Get the name of a colorspace. + /// a static string identifying the colorspace; can be NULL. + [Obsolete("use av_color_space_name()")] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_get_colorspace_name(AVColorSpace @val); + + /// Return the flags which specify extensions supported by the CPU. The returned value is affected by av_force_cpu_flags() if that was used before. So av_get_cpu_flags() can easily be used in an application to detect the enabled cpu flags. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_cpu_flags(); + + /// Return default channel layout for a given number of channels. + [Obsolete("use av_channel_layout_default()")] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_get_default_channel_layout(int @nb_channels); + + /// Return codec bits per sample. Only return non-zero if the bits per sample is exactly correct, not an approximation. + /// the codec + /// Number of bits per sample or zero if unknown for the given codec. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_exact_bits_per_sample(AVCodecID @codec_id); + + /// Return a channel layout and the number of channels based on the specified name. + /// channel layout specification string + /// parsed channel layout (0 if unknown) + /// number of channels + /// 0 on success, AVERROR(EINVAL) if the parsing fails. + [Obsolete("use av_channel_layout_from_string()")] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_extended_channel_layout( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, ulong* @channel_layout, int* @nb_channels); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_frame_filename(byte* @buf, int @buf_size, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @path, int @number); + + /// Return in 'buf' the path with '%d' replaced by a number. + /// destination buffer + /// destination buffer size + /// numbered sequence string + /// frame number + /// AV_FRAME_FILENAME_FLAGS_* + /// 0 if OK, -1 on format error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_frame_filename2(byte* @buf, int @buf_size, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @path, int @number, int @flags); + + /// Return a string describing the media_type enum, NULL if media_type is unknown. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_get_media_type_string(AVMediaType @media_type); + + /// Get timing information for the data currently output. The exact meaning of "currently output" depends on the format. It is mostly relevant for devices that have an internal buffer and/or work in real time. + /// media file handle + /// stream in the media file + /// DTS of the last packet output for the stream, in stream time_base units + /// absolute time when that packet whas output, in microsecond + /// 0 if OK, AVERROR(ENOSYS) if the format does not support it Note: some formats or devices may not allow to measure dts and wall atomically. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_output_timestamp(AVFormatContext* @s, int @stream, long* @dts, long* @wall); + + /// Get the packed alternative form of the given sample format. + /// the packed alternative form of the given sample format or AV_SAMPLE_FMT_NONE on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVSampleFormat av_get_packed_sample_fmt(AVSampleFormat @sample_fmt); + + /// Allocate and read the payload of a packet and initialize its fields with default values. + /// associated IO context + /// packet + /// desired payload size + /// >0 (read size) if OK, AVERROR_xxx otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_packet(AVIOContext* @s, AVPacket* @pkt, int @size); + + /// Return the number of bits per pixel for the pixel format described by pixdesc, including any padding or unused bits. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_padded_bits_per_pixel(AVPixFmtDescriptor* @pixdesc); + + /// Return the PCM codec associated with a sample format. + /// endianness, 0 for little, 1 for big, -1 (or anything else) for native + /// AV_CODEC_ID_PCM_* or AV_CODEC_ID_NONE + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecID av_get_pcm_codec(AVSampleFormat @fmt, int @be); + + /// Return a single letter to describe the given picture type pict_type. + /// the picture type + /// a single character representing the picture type, '?' if pict_type is unknown + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern byte av_get_picture_type_char(AVPictureType @pict_type); + + /// Return the pixel format corresponding to name. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVPixelFormat av_get_pix_fmt( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Compute what kind of losses will occur when converting from one specific pixel format to another. When converting from one pixel format to another, information loss may occur. For example, when converting from RGB24 to GRAY, the color information will be lost. Similarly, other losses occur when converting from some formats to other formats. These losses can involve loss of chroma, but also loss of resolution, loss of color depth, loss due to the color space conversion, loss of the alpha bits or loss due to color quantization. av_get_fix_fmt_loss() informs you about the various types of losses which will occur when converting from one pixel format to another. + /// destination pixel format + /// source pixel format + /// Whether the source pixel format alpha channel is used. + /// Combination of flags informing you what kind of losses will occur (maximum loss for an invalid dst_pix_fmt). + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_pix_fmt_loss(AVPixelFormat @dst_pix_fmt, AVPixelFormat @src_pix_fmt, int @has_alpha); + + /// Return the short name for a pixel format, NULL in case pix_fmt is unknown. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_get_pix_fmt_name(AVPixelFormat @pix_fmt); + + /// Print in buf the string corresponding to the pixel format with number pix_fmt, or a header if pix_fmt is negative. + /// the buffer where to write the string + /// the size of buf + /// the number of the pixel format to print the corresponding info string, or a negative value to print the corresponding header. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_get_pix_fmt_string(byte* @buf, int @buf_size, AVPixelFormat @pix_fmt); + + /// Get the planar alternative form of the given sample format. + /// the planar alternative form of the given sample format or AV_SAMPLE_FMT_NONE on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVSampleFormat av_get_planar_sample_fmt(AVSampleFormat @sample_fmt); + + /// Return a name for the specified profile, if available. + /// the codec that is searched for the given profile + /// the profile value for which a name is requested + /// A name for the profile if found, NULL otherwise. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_get_profile_name(AVCodec* @codec, int @profile); + + /// Return a sample format corresponding to name, or AV_SAMPLE_FMT_NONE on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVSampleFormat av_get_sample_fmt( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Return the name of sample_fmt, or NULL if sample_fmt is not recognized. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_get_sample_fmt_name(AVSampleFormat @sample_fmt); + + /// Generate a string corresponding to the sample format with sample_fmt, or a header if sample_fmt is negative. + /// the buffer where to write the string + /// the size of buf + /// the number of the sample format to print the corresponding info string, or a negative value to print the corresponding header. + /// the pointer to the filled buffer or NULL if sample_fmt is unknown or in case of other errors + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_get_sample_fmt_string(byte* @buf, int @buf_size, AVSampleFormat @sample_fmt); + + /// Get the value and name of a standard channel layout. + /// index in an internal list, starting at 0 + /// channel layout mask + /// name of the layout + /// 0 if the layout exists, < 0 if index is beyond the limits + [Obsolete("use av_channel_layout_standard()")] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_get_standard_channel_layout(uint @index, ulong* @layout, byte** @name); + + /// Return the fractional representation of the internal time base. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_get_time_base_q(); + + /// Get the current time in microseconds. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_gettime(); + + /// Get the current time in microseconds since some unspecified starting point. On platforms that support it, the time comes from a monotonic clock This property makes this time source ideal for measuring relative time. The returned values may not be monotonic on platforms where a monotonic clock is not available. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_gettime_relative(); + + /// Indicates with a boolean result if the av_gettime_relative() time source is monotonic. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_gettime_relative_is_monotonic(); + + /// Increase packet size, correctly zeroing padding + /// packet + /// number of bytes by which to increase the size of the packet + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_grow_packet(AVPacket* @pkt, int @grow_by); + + /// Guess the codec ID based upon muxer and filename. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecID av_guess_codec(AVOutputFormat* @fmt, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @short_name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @mime_type, AVMediaType @type); + + /// Return the output format in the list of registered output formats which best matches the provided parameters, or return NULL if there is no match. + /// if non-NULL checks if short_name matches with the names of the registered formats + /// if non-NULL checks if filename terminates with the extensions of the registered formats + /// if non-NULL checks if mime_type matches with the MIME type of the registered formats + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVOutputFormat* av_guess_format( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @short_name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @mime_type); + + /// Guess the frame rate, based on both the container and codec information. + /// the format context which the stream is part of + /// the stream which the frame is part of + /// the frame for which the frame rate should be determined, may be NULL + /// the guessed (valid) frame rate, 0/1 if no idea + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_guess_frame_rate(AVFormatContext* @ctx, AVStream* @stream, AVFrame* @frame); + + /// Guess the sample aspect ratio of a frame, based on both the stream and the frame aspect ratio. + /// the format context which the stream is part of + /// the stream which the frame is part of + /// the frame with the aspect ratio to be determined + /// the guessed (valid) sample_aspect_ratio, 0/1 if no idea + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_guess_sample_aspect_ratio(AVFormatContext* @format, AVStream* @stream, AVFrame* @frame); + + /// Send a nice hexadecimal dump of a buffer to the specified file stream. + /// The file stream pointer where the dump should be sent to. + /// buffer + /// buffer size + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_hex_dump(_iobuf* @f, byte* @buf, int @size); + + /// Send a nice hexadecimal dump of a buffer to the log. + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct. + /// The importance level of the message, lower values signifying higher importance. + /// buffer + /// buffer size + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_hex_dump_log(void* @avcl, int @level, byte* @buf, int @size); + + /// Allocate an AVHWDeviceContext for a given hardware type. + /// the type of the hardware device to allocate. + /// a reference to the newly created AVHWDeviceContext on success or NULL on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferRef* av_hwdevice_ctx_alloc(AVHWDeviceType @type); + + /// Open a device of the specified type and create an AVHWDeviceContext for it. + /// On success, a reference to the newly-created device context will be written here. The reference is owned by the caller and must be released with av_buffer_unref() when no longer needed. On failure, NULL will be written to this pointer. + /// The type of the device to create. + /// A type-specific string identifying the device to open. + /// A dictionary of additional (type-specific) options to use in opening the device. The dictionary remains owned by the caller. + /// currently unused + /// 0 on success, a negative AVERROR code on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_hwdevice_ctx_create(AVBufferRef** @device_ctx, AVHWDeviceType @type, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @device, AVDictionary* @opts, int @flags); + + /// Create a new device of the specified type from an existing device. + /// On success, a reference to the newly-created AVHWDeviceContext. + /// The type of the new device to create. + /// A reference to an existing AVHWDeviceContext which will be used to create the new device. + /// Currently unused; should be set to zero. + /// Zero on success, a negative AVERROR code on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_hwdevice_ctx_create_derived(AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, int @flags); + + /// Create a new device of the specified type from an existing device. + /// On success, a reference to the newly-created AVHWDeviceContext. + /// The type of the new device to create. + /// A reference to an existing AVHWDeviceContext which will be used to create the new device. + /// Options for the new device to create, same format as in av_hwdevice_ctx_create. + /// Currently unused; should be set to zero. + /// Zero on success, a negative AVERROR code on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_hwdevice_ctx_create_derived_opts(AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, AVDictionary* @options, int @flags); + + /// Finalize the device context before use. This function must be called after the context is filled with all the required information and before it is used in any way. + /// a reference to the AVHWDeviceContext + /// 0 on success, a negative AVERROR code on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_hwdevice_ctx_init(AVBufferRef* @ref); + + /// Look up an AVHWDeviceType by name. + /// String name of the device type (case-insensitive). + /// The type from enum AVHWDeviceType, or AV_HWDEVICE_TYPE_NONE if not found. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVHWDeviceType av_hwdevice_find_type_by_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Get the constraints on HW frames given a device and the HW-specific configuration to be used with that device. If no HW-specific configuration is provided, returns the maximum possible capabilities of the device. + /// a reference to the associated AVHWDeviceContext. + /// a filled HW-specific configuration structure, or NULL to return the maximum possible capabilities of the device. + /// AVHWFramesConstraints structure describing the constraints on the device, or NULL if not available. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVHWFramesConstraints* av_hwdevice_get_hwframe_constraints(AVBufferRef* @ref, void* @hwconfig); + + /// Get the string name of an AVHWDeviceType. + /// Type from enum AVHWDeviceType. + /// Pointer to a static string containing the name, or NULL if the type is not valid. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_hwdevice_get_type_name(AVHWDeviceType @type); + + /// Allocate a HW-specific configuration structure for a given HW device. After use, the user must free all members as required by the specific hardware structure being used, then free the structure itself with av_free(). + /// a reference to the associated AVHWDeviceContext. + /// The newly created HW-specific configuration structure on success or NULL on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_hwdevice_hwconfig_alloc(AVBufferRef* @device_ctx); + + /// Iterate over supported device types. + /// The next usable device type from enum AVHWDeviceType, or AV_HWDEVICE_TYPE_NONE if there are no more. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVHWDeviceType av_hwdevice_iterate_types(AVHWDeviceType @prev); + + /// Free an AVHWFrameConstraints structure. + /// The (filled or unfilled) AVHWFrameConstraints structure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_hwframe_constraints_free(AVHWFramesConstraints** @constraints); + + /// Allocate an AVHWFramesContext tied to a given device context. + /// a reference to a AVHWDeviceContext. This function will make a new reference for internal use, the one passed to the function remains owned by the caller. + /// a reference to the newly created AVHWFramesContext on success or NULL on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVBufferRef* av_hwframe_ctx_alloc(AVBufferRef* @device_ctx); + + /// Create and initialise an AVHWFramesContext as a mapping of another existing AVHWFramesContext on a different device. + /// On success, a reference to the newly created AVHWFramesContext. + /// A reference to the device to create the new AVHWFramesContext on. + /// A reference to an existing AVHWFramesContext which will be mapped to the derived context. + /// Some combination of AV_HWFRAME_MAP_* flags, defining the mapping parameters to apply to frames which are allocated in the derived device. + /// Zero on success, negative AVERROR code on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_hwframe_ctx_create_derived(AVBufferRef** @derived_frame_ctx, AVPixelFormat @format, AVBufferRef* @derived_device_ctx, AVBufferRef* @source_frame_ctx, int @flags); + + /// Finalize the context before use. This function must be called after the context is filled with all the required information and before it is attached to any frames. + /// a reference to the AVHWFramesContext + /// 0 on success, a negative AVERROR code on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_hwframe_ctx_init(AVBufferRef* @ref); + + /// Allocate a new frame attached to the given AVHWFramesContext. + /// a reference to an AVHWFramesContext + /// an empty (freshly allocated or unreffed) frame to be filled with newly allocated buffers. + /// currently unused, should be set to zero + /// 0 on success, a negative AVERROR code on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_hwframe_get_buffer(AVBufferRef* @hwframe_ctx, AVFrame* @frame, int @flags); + + /// Map a hardware frame. + /// Destination frame, to contain the mapping. + /// Source frame, to be mapped. + /// Some combination of AV_HWFRAME_MAP_* flags. + /// Zero on success, negative AVERROR code on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_hwframe_map(AVFrame* @dst, AVFrame* @src, int @flags); + + /// Copy data to or from a hw surface. At least one of dst/src must have an AVHWFramesContext attached. + /// the destination frame. dst is not touched on failure. + /// the source frame. + /// currently unused, should be set to zero + /// 0 on success, a negative AVERROR error code on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_hwframe_transfer_data(AVFrame* @dst, AVFrame* @src, int @flags); + + /// Get a list of possible source or target formats usable in av_hwframe_transfer_data(). + /// the frame context to obtain the information for + /// the direction of the transfer + /// the pointer to the output format list will be written here. The list is terminated with AV_PIX_FMT_NONE and must be freed by the caller when no longer needed using av_free(). If this function returns successfully, the format list will have at least one item (not counting the terminator). On failure, the contents of this pointer are unspecified. + /// currently unused, should be set to zero + /// 0 on success, a negative AVERROR code on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_hwframe_transfer_get_formats(AVBufferRef* @hwframe_ctx, AVHWFrameTransferDirection @dir, AVPixelFormat** @formats, int @flags); + + /// Allocate an image with size w and h and pixel format pix_fmt, and fill pointers and linesizes accordingly. The allocated image buffer has to be freed by using av_freep(&pointers[0]). + /// the value to use for buffer size alignment + /// the size in bytes required for the image buffer, a negative error code in case of failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_alloc(ref byte_ptr4 @pointers, ref int4 @linesizes, int @w, int @h, AVPixelFormat @pix_fmt, int @align); + + /// Check if the given sample aspect ratio of an image is valid. + /// width of the image + /// height of the image + /// sample aspect ratio of the image + /// 0 if valid, a negative AVERROR code otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_check_sar(uint @w, uint @h, AVRational @sar); + + /// Check if the given dimension of an image is valid, meaning that all bytes of the image can be addressed with a signed int. + /// the width of the picture + /// the height of the picture + /// the offset to sum to the log level for logging with log_ctx + /// the parent logging context, it may be NULL + /// >= 0 if valid, a negative error code otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_check_size(uint @w, uint @h, int @log_offset, void* @log_ctx); + + /// Check if the given dimension of an image is valid, meaning that all bytes of a plane of an image with the specified pix_fmt can be addressed with a signed int. + /// the width of the picture + /// the height of the picture + /// the maximum number of pixels the user wants to accept + /// the pixel format, can be AV_PIX_FMT_NONE if unknown. + /// the offset to sum to the log level for logging with log_ctx + /// the parent logging context, it may be NULL + /// >= 0 if valid, a negative error code otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_check_size2(uint @w, uint @h, long @max_pixels, AVPixelFormat @pix_fmt, int @log_offset, void* @log_ctx); + + /// Copy image in src_data to dst_data. + /// linesizes for the image in dst_data + /// linesizes for the image in src_data + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_image_copy(ref byte_ptr4 @dst_data, ref int4 @dst_linesizes, in byte_ptr4 @src_data, in int4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height); + + /// Copy image plane from src to dst. That is, copy "height" number of lines of "bytewidth" bytes each. The first byte of each successive line is separated by *_linesize bytes. + /// linesize for the image plane in dst + /// linesize for the image plane in src + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_image_copy_plane(byte* @dst, int @dst_linesize, byte* @src, int @src_linesize, int @bytewidth, int @height); + + /// Copy image data located in uncacheable (e.g. GPU mapped) memory. Where available, this function will use special functionality for reading from such memory, which may result in greatly improved performance compared to plain av_image_copy_plane(). + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_image_copy_plane_uc_from(byte* @dst, long @dst_linesize, byte* @src, long @src_linesize, long @bytewidth, int @height); + + /// Copy image data from an image into a buffer. + /// a buffer into which picture data will be copied + /// the size in bytes of dst + /// pointers containing the source image data + /// linesizes for the image in src_data + /// the pixel format of the source image + /// the width of the source image in pixels + /// the height of the source image in pixels + /// the assumed linesize alignment for dst + /// the number of bytes written to dst, or a negative value (error code) on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_copy_to_buffer(byte* @dst, int @dst_size, in byte_ptr4 @src_data, in int4 @src_linesize, AVPixelFormat @pix_fmt, int @width, int @height, int @align); + + /// Copy image data located in uncacheable (e.g. GPU mapped) memory. Where available, this function will use special functionality for reading from such memory, which may result in greatly improved performance compared to plain av_image_copy(). + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_image_copy_uc_from(ref byte_ptr4 @dst_data, in long4 @dst_linesizes, in byte_ptr4 @src_data, in long4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height); + + /// Setup the data pointers and linesizes based on the specified image parameters and the provided array. + /// data pointers to be filled in + /// linesizes for the image in dst_data to be filled in + /// buffer which will contain or contains the actual image data, can be NULL + /// the pixel format of the image + /// the width of the image in pixels + /// the height of the image in pixels + /// the value used in src for linesize alignment + /// the size in bytes required for src, a negative error code in case of failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_fill_arrays(ref byte_ptr4 @dst_data, ref int4 @dst_linesize, byte* @src, AVPixelFormat @pix_fmt, int @width, int @height, int @align); + + /// Overwrite the image data with black. This is suitable for filling a sub-rectangle of an image, meaning the padding between the right most pixel and the left most pixel on the next line will not be overwritten. For some formats, the image size might be rounded up due to inherent alignment. + /// data pointers to destination image + /// linesizes for the destination image + /// the pixel format of the image + /// the color range of the image (important for colorspaces such as YUV) + /// the width of the image in pixels + /// the height of the image in pixels + /// 0 if the image data was cleared, a negative AVERROR code otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_fill_black(ref byte_ptr4 @dst_data, in long4 @dst_linesize, AVPixelFormat @pix_fmt, AVColorRange @range, int @width, int @height); + + /// Fill plane linesizes for an image with pixel format pix_fmt and width width. + /// array to be filled with the linesize for each plane + /// >= 0 in case of success, a negative error code otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_fill_linesizes(ref int4 @linesizes, AVPixelFormat @pix_fmt, int @width); + + /// Compute the max pixel step for each plane of an image with a format described by pixdesc. + /// an array which is filled with the max pixel step for each plane. Since a plane may contain different pixel components, the computed max_pixsteps[plane] is relative to the component in the plane with the max pixel step. + /// an array which is filled with the component for each plane which has the max pixel step. May be NULL. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_image_fill_max_pixsteps(ref int4 @max_pixsteps, ref int4 @max_pixstep_comps, AVPixFmtDescriptor* @pixdesc); + + /// Fill plane sizes for an image with pixel format pix_fmt and height height. + /// the array to be filled with the size of each image plane + /// the array containing the linesize for each plane, should be filled by av_image_fill_linesizes() + /// >= 0 in case of success, a negative error code otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_fill_plane_sizes(ref ulong4 @size, AVPixelFormat @pix_fmt, int @height, in long4 @linesizes); + + /// Fill plane data pointers for an image with pixel format pix_fmt and height height. + /// pointers array to be filled with the pointer for each image plane + /// the pointer to a buffer which will contain the image + /// the array containing the linesize for each plane, should be filled by av_image_fill_linesizes() + /// the size in bytes required for the image buffer, a negative error code in case of failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_fill_pointers(ref byte_ptr4 @data, AVPixelFormat @pix_fmt, int @height, byte* @ptr, in int4 @linesizes); + + /// Return the size in bytes of the amount of data required to store an image with the given parameters. + /// the pixel format of the image + /// the width of the image in pixels + /// the height of the image in pixels + /// the assumed linesize alignment + /// the buffer size in bytes, a negative error code in case of failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_get_buffer_size(AVPixelFormat @pix_fmt, int @width, int @height, int @align); + + /// Compute the size of an image line with format pix_fmt and width width for the plane plane. + /// the computed size in bytes + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_image_get_linesize(AVPixelFormat @pix_fmt, int @width, int @plane); + + /// Get the index for a specific timestamp. + /// stream that the timestamp belongs to + /// timestamp to retrieve the index for + /// if AVSEEK_FLAG_BACKWARD then the returned index will correspond to the timestamp which is < = the requested one, if backward is 0, then it will be >= if AVSEEK_FLAG_ANY seek to any frame, only keyframes otherwise + /// < 0 if no such timestamp could be found + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_index_search_timestamp(AVStream* @st, long @timestamp, int @flags); + + /// Initialize optional fields of a packet with default values. + /// packet + [Obsolete("This function is deprecated. Once it's removed, sizeof(AVPacket) will not be a part of the ABI anymore.")] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_init_packet(AVPacket* @pkt); + + /// Audio input devices iterator. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVInputFormat* av_input_audio_device_next(AVInputFormat* @d); + + /// Video input devices iterator. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVInputFormat* av_input_video_device_next(AVInputFormat* @d); + + /// Compute the length of an integer list. + /// size in bytes of each list element (only 1, 2, 4 or 8) + /// pointer to the list + /// list terminator (usually 0 or -1) + /// length of the list, in elements, not counting the terminator + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint av_int_list_length_for_size(uint @elsize, void* @list, ulong @term); + + /// Write a packet to an output media file ensuring correct interleaving. + /// media file handle + /// The packet containing the data to be written. If the packet is reference-counted, this function will take ownership of this reference and unreference it later when it sees fit. If the packet is not reference-counted, libavformat will make a copy. The returned packet will be blank (as if returned from av_packet_alloc()), even on error. This parameter can be NULL (at any time, not just at the end), to flush the interleaving queues. Packet's "stream_index" field must be set to the index of the corresponding stream in "s->streams". The timestamps ( "pts", "dts") must be set to correct values in the stream's timebase (unless the output format is flagged with the AVFMT_NOTIMESTAMPS flag, then they can be set to AV_NOPTS_VALUE). The dts for subsequent packets in one stream must be strictly increasing (unless the output format is flagged with the AVFMT_TS_NONSTRICT, then they merely have to be nondecreasing). "duration" should also be set if known. + /// 0 on success, a negative AVERROR on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_interleaved_write_frame(AVFormatContext* @s, AVPacket* @pkt); + + /// Write an uncoded frame to an output media file. + /// >=0 for success, a negative code on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_interleaved_write_uncoded_frame(AVFormatContext* @s, int @stream_index, AVFrame* @frame); + + /// Send the specified message to the log if the level is less than or equal to the current av_log_level. By default, all logging messages are sent to stderr. This behavior can be altered by setting a different logging callback function. + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct or NULL if general log. + /// The importance level of the message expressed using a "Logging Constant". + /// The format string (printf-compatible) that specifies how subsequent arguments are converted to output. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_log(void* @avcl, int @level, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt); + + /// Default logging callback + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct. + /// The importance level of the message expressed using a "Logging Constant". + /// The format string (printf-compatible) that specifies how subsequent arguments are converted to output. + /// The arguments referenced by the format string. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_log_default_callback(void* @avcl, int @level, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt, byte* @vl); + + /// Format a line of log the same way as the default callback. + /// buffer to receive the formatted line + /// size of the buffer + /// used to store whether the prefix must be printed; must point to a persistent integer initially set to 1 + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_log_format_line(void* @ptr, int @level, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix); + + /// Format a line of log the same way as the default callback. + /// buffer to receive the formatted line; may be NULL if line_size is 0 + /// size of the buffer; at most line_size-1 characters will be written to the buffer, plus one null terminator + /// used to store whether the prefix must be printed; must point to a persistent integer initially set to 1 + /// Returns a negative value if an error occurred, otherwise returns the number of characters that would have been written for a sufficiently large buffer, not including the terminating null character. If the return value is not less than line_size, it means that the log message was truncated to fit the buffer. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_log_format_line2(void* @ptr, int @level, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_log_get_flags(); + + /// Get the current log level + /// Current log level + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_log_get_level(); + + /// Send the specified message to the log once with the initial_level and then with the subsequent_level. By default, all logging messages are sent to stderr. This behavior can be altered by setting a different logging callback function. + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct or NULL if general log. + /// importance level of the message expressed using a "Logging Constant" for the first occurance. + /// importance level of the message expressed using a "Logging Constant" after the first occurance. + /// a variable to keep trak of if a message has already been printed this must be initialized to 0 before the first use. The same state must not be accessed by 2 Threads simultaneously. + /// The format string (printf-compatible) that specifies how subsequent arguments are converted to output. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_log_once(void* @avcl, int @initial_level, int @subsequent_level, int* @state, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt); + + /// Set the logging callback + /// A logging function with a compatible signature. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_log_set_callback(av_log_set_callback_callback_func @callback); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_log_set_flags(int @arg); + + /// Set the log level + /// Logging level + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_log_set_level(int @level); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_log2(uint @v); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_log2_16bit(uint @v); + + /// Allocate a memory block with alignment suitable for all memory accesses (including vectors if available on the CPU). + /// Size in bytes for the memory block to be allocated + /// Pointer to the allocated block, or `NULL` if the block cannot be allocated + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_malloc(ulong @size); + + /// Allocate a memory block for an array with av_malloc(). + /// Number of element + /// Size of a single element + /// Pointer to the allocated block, or `NULL` if the block cannot be allocated + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_malloc_array(ulong @nmemb, ulong @size); + + /// Allocate a memory block with alignment suitable for all memory accesses (including vectors if available on the CPU) and zero all the bytes of the block. + /// Size in bytes for the memory block to be allocated + /// Pointer to the allocated block, or `NULL` if it cannot be allocated + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_mallocz(ulong @size); + + [Obsolete("use av_calloc()")] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_mallocz_array(ulong @nmemb, ulong @size); + + /// Allocate an AVMasteringDisplayMetadata structure and set its fields to default values. The resulting struct can be freed using av_freep(). + /// An AVMasteringDisplayMetadata filled with default values or NULL on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVMasteringDisplayMetadata* av_mastering_display_metadata_alloc(); + + /// Allocate a complete AVMasteringDisplayMetadata and add it to the frame. + /// The frame which side data is added to. + /// The AVMasteringDisplayMetadata structure to be filled by caller. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVMasteringDisplayMetadata* av_mastering_display_metadata_create_side_data(AVFrame* @frame); + + /// Return a positive value if the given filename has one of the given extensions, 0 otherwise. + /// file name to check against the given extensions + /// a comma-separated list of filename extensions + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_match_ext( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @extensions); + + /// Set the maximum size that may be allocated in one block. + /// Value to be set as the new maximum size + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_max_alloc(ulong @max); + + /// Overlapping memcpy() implementation. + /// Destination buffer + /// Number of bytes back to start copying (i.e. the initial size of the overlapping window); must be > 0 + /// Number of bytes to copy; must be >= 0 + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_memcpy_backptr(byte* @dst, int @back, int @cnt); + + /// Duplicate a buffer with av_malloc(). + /// Buffer to be duplicated + /// Size in bytes of the buffer copied + /// Pointer to a newly allocated buffer containing a copy of `p` or `NULL` if the buffer cannot be allocated + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_memdup(void* @p, ulong @size); + + /// Multiply two rationals. + /// First rational + /// Second rational + /// b*c + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_mul_q(AVRational @b, AVRational @c); + + /// Iterate over all registered muxers. + /// a pointer where libavformat will store the iteration state. Must point to NULL to start the iteration. + /// the next registered muxer or NULL when the iteration is finished + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVOutputFormat* av_muxer_iterate(void** @opaque); + + /// Find which of the two rationals is closer to another rational. + /// Rational to be compared against + /// One of the following values: - 1 if `q1` is nearer to `q` than `q2` - -1 if `q2` is nearer to `q` than `q1` - 0 if they have the same distance + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_nearer_q(AVRational @q, AVRational @q1, AVRational @q2); + + /// Allocate the payload of a packet and initialize its fields with default values. + /// packet + /// wanted payload size + /// 0 if OK, AVERROR_xxx otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_new_packet(AVPacket* @pkt, int @size); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVProgram* av_new_program(AVFormatContext* @s, int @id); + + /// Iterate over potential AVOptions-enabled children of parent. + /// a pointer where iteration state is stored. + /// AVClass corresponding to next potential child or NULL + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* av_opt_child_class_iterate(AVClass* @parent, void** @iter); + + /// Iterate over AVOptions-enabled children of obj. + /// result of a previous call to this function or NULL + /// next AVOptions-enabled child or NULL + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_opt_child_next(void* @obj, void* @prev); + + /// Copy options from src object into dest object. + /// Object to copy from + /// Object to copy into + /// 0 on success, negative on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_copy(void* @dest, void* @src); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_eval_double(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, double* @double_out); + + /// @{ This group of functions can be used to evaluate option strings and get numbers out of them. They do the same thing as av_opt_set(), except the result is written into the caller-supplied pointer. + /// a struct whose first element is a pointer to AVClass. + /// an option for which the string is to be evaluated. + /// string to be evaluated. + /// 0 on success, a negative number on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_eval_flags(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, int* @flags_out); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_eval_float(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, float* @float_out); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_eval_int(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, int* @int_out); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_eval_int64(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, long* @int64_out); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_eval_q(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, AVRational* @q_out); + + /// Look for an option in an object. Consider only options which have all the specified flags set. + /// A pointer to a struct whose first element is a pointer to an AVClass. Alternatively a double pointer to an AVClass, if AV_OPT_SEARCH_FAKE_OBJ search flag is set. + /// The name of the option to look for. + /// When searching for named constants, name of the unit it belongs to. + /// Find only options with all the specified flags set (AV_OPT_FLAG). + /// A combination of AV_OPT_SEARCH_*. + /// A pointer to the option found, or NULL if no option was found. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVOption* av_opt_find(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @unit, int @opt_flags, int @search_flags); + + /// Look for an option in an object. Consider only options which have all the specified flags set. + /// A pointer to a struct whose first element is a pointer to an AVClass. Alternatively a double pointer to an AVClass, if AV_OPT_SEARCH_FAKE_OBJ search flag is set. + /// The name of the option to look for. + /// When searching for named constants, name of the unit it belongs to. + /// Find only options with all the specified flags set (AV_OPT_FLAG). + /// A combination of AV_OPT_SEARCH_*. + /// if non-NULL, an object to which the option belongs will be written here. It may be different from obj if AV_OPT_SEARCH_CHILDREN is present in search_flags. This parameter is ignored if search_flags contain AV_OPT_SEARCH_FAKE_OBJ. + /// A pointer to the option found, or NULL if no option was found. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVOption* av_opt_find2(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @unit, int @opt_flags, int @search_flags, void** @target_obj); + + /// Check whether a particular flag is set in a flags field. + /// the name of the flag field option + /// the name of the flag to check + /// non-zero if the flag is set, zero if the flag isn't set, isn't of the right type, or the flags field doesn't exist. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_flag_is_set(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @field_name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @flag_name); + + /// Free all allocated objects in obj. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_opt_free(void* @obj); + + /// Free an AVOptionRanges struct and set it to NULL. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_opt_freep_ranges(AVOptionRanges** @ranges); + + /// @{ Those functions get a value of the option with the given name from an object. + /// a struct whose first element is a pointer to an AVClass. + /// name of the option to get. + /// flags passed to av_opt_find2. I.e. if AV_OPT_SEARCH_CHILDREN is passed here, then the option may be found in a child of obj. + /// value of the option will be written here + /// >=0 on success, a negative error code otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, byte** @out_val); + + [Obsolete()] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_channel_layout(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, long* @ch_layout); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_chlayout(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVChannelLayout* @layout); + + /// The returned dictionary is a copy of the actual value and must be freed with av_dict_free() by the caller + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_dict_val(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVDictionary** @out_val); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_double(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, double* @out_val); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_image_size(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, int* @w_out, int* @h_out); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_int(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, long* @out_val); + + /// Extract a key-value pair from the beginning of a string. + /// pointer to the options string, will be updated to point to the rest of the string (one of the pairs_sep or the final NUL) + /// a 0-terminated list of characters used to separate key from value, for example '=' + /// a 0-terminated list of characters used to separate two pairs from each other, for example ':' or ',' + /// flags; see the AV_OPT_FLAG_* values below + /// parsed key; must be freed using av_free() + /// parsed value; must be freed using av_free() + /// >=0 for success, or a negative value corresponding to an AVERROR code in case of error; in particular: AVERROR(EINVAL) if no key is present + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_key_value(byte** @ropts, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key_val_sep, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @pairs_sep, uint @flags, byte** @rkey, byte** @rval); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_pixel_fmt(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVPixelFormat* @out_fmt); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_q(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVRational* @out_val); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_sample_fmt(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVSampleFormat* @out_fmt); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_get_video_rate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVRational* @out_val); + + /// Check if given option is set to its default value. + /// AVClass object to check option on + /// option to be checked + /// >0 when option is set to its default, 0 when option is not set its default, < 0 on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_is_set_to_default(void* @obj, AVOption* @o); + + /// Check if given option is set to its default value. + /// AVClass object to check option on + /// option name + /// combination of AV_OPT_SEARCH_* + /// >0 when option is set to its default, 0 when option is not set its default, < 0 on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_is_set_to_default_by_name(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags); + + /// Iterate over all AVOptions belonging to obj. + /// an AVOptions-enabled struct or a double pointer to an AVClass describing it. + /// result of the previous call to av_opt_next() on this object or NULL + /// next AVOption or NULL + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVOption* av_opt_next(void* @obj, AVOption* @prev); + + /// @} + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_opt_ptr(AVClass* @avclass, void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Get a list of allowed ranges for the given option. + /// is a bitmask of flags, undefined flags should not be set and should be ignored AV_OPT_SEARCH_FAKE_OBJ indicates that the obj is a double pointer to a AVClass instead of a full instance AV_OPT_MULTI_COMPONENT_RANGE indicates that function may return more than one component, + /// number of compontents returned on success, a negative errro code otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_query_ranges(AVOptionRanges** @p0, void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, int @flags); + + /// Get a default list of allowed ranges for the given option. + /// is a bitmask of flags, undefined flags should not be set and should be ignored AV_OPT_SEARCH_FAKE_OBJ indicates that the obj is a double pointer to a AVClass instead of a full instance AV_OPT_MULTI_COMPONENT_RANGE indicates that function may return more than one component, + /// number of compontents returned on success, a negative errro code otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_query_ranges_default(AVOptionRanges** @p0, void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, int @flags); + + /// Serialize object's options. + /// AVClass object to serialize + /// serialize options with all the specified flags set (AV_OPT_FLAG) + /// combination of AV_OPT_SERIALIZE_* flags + /// Pointer to buffer that will be allocated with string containg serialized options. Buffer must be freed by the caller when is no longer needed. + /// character used to separate key from value + /// character used to separate two pairs from each other + /// >= 0 on success, negative on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_serialize(void* @obj, int @opt_flags, int @flags, byte** @buffer, byte @key_val_sep, byte @pairs_sep); + + /// @{ Those functions set the field of obj with the given name to value. + /// A struct whose first element is a pointer to an AVClass. + /// the name of the field to set + /// The value to set. In case of av_opt_set() if the field is not of a string type, then the given string is parsed. SI postfixes and some named scalars are supported. If the field is of a numeric type, it has to be a numeric or named scalar. Behavior with more than one scalar and +- infix operators is undefined. If the field is of a flags type, it has to be a sequence of numeric scalars or named flags separated by '+' or '-'. Prefixing a flag with '+' causes it to be set without affecting the other flags; similarly, '-' unsets a flag. If the field is of a dictionary type, it has to be a ':' separated list of key=value parameters. Values containing ':' special characters must be escaped. + /// flags passed to av_opt_find2. I.e. if AV_OPT_SEARCH_CHILDREN is passed here, then the option may be set on a child of obj. + /// 0 if the value has been set, or an AVERROR code in case of error: AVERROR_OPTION_NOT_FOUND if no matching option exists AVERROR(ERANGE) if the value is out of range AVERROR(EINVAL) if the value is not valid + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, int @search_flags); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_bin(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, byte* @val, int @size, int @search_flags); + + [Obsolete()] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_channel_layout(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, long @ch_layout, int @search_flags); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_chlayout(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVChannelLayout* @layout, int @search_flags); + + /// Set the values of all AVOption fields to their default values. + /// an AVOption-enabled struct (its first member must be a pointer to AVClass) + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_opt_set_defaults(void* @s); + + /// Set the values of all AVOption fields to their default values. Only these AVOption fields for which (opt->flags & mask) == flags will have their default applied to s. + /// an AVOption-enabled struct (its first member must be a pointer to AVClass) + /// combination of AV_OPT_FLAG_* + /// combination of AV_OPT_FLAG_* + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_opt_set_defaults2(void* @s, int @mask, int @flags); + + /// Set all the options from a given dictionary on an object. + /// a struct whose first element is a pointer to AVClass + /// options to process. This dictionary will be freed and replaced by a new one containing all options not found in obj. Of course this new dictionary needs to be freed by caller with av_dict_free(). + /// 0 on success, a negative AVERROR if some option was found in obj, but could not be set. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_dict(void* @obj, AVDictionary** @options); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_dict_val(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVDictionary* @val, int @search_flags); + + /// Set all the options from a given dictionary on an object. + /// a struct whose first element is a pointer to AVClass + /// options to process. This dictionary will be freed and replaced by a new one containing all options not found in obj. Of course this new dictionary needs to be freed by caller with av_dict_free(). + /// A combination of AV_OPT_SEARCH_*. + /// 0 on success, a negative AVERROR if some option was found in obj, but could not be set. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_dict2(void* @obj, AVDictionary** @options, int @search_flags); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_double(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, double @val, int @search_flags); + + /// Parse the key-value pairs list in opts. For each key=value pair found, set the value of the corresponding option in ctx. + /// the AVClass object to set options on + /// the options string, key-value pairs separated by a delimiter + /// a NULL-terminated array of options names for shorthand notation: if the first field in opts has no key part, the key is taken from the first element of shorthand; then again for the second, etc., until either opts is finished, shorthand is finished or a named option is found; after that, all options must be named + /// a 0-terminated list of characters used to separate key from value, for example '=' + /// a 0-terminated list of characters used to separate two pairs from each other, for example ':' or ',' + /// the number of successfully set key=value pairs, or a negative value corresponding to an AVERROR code in case of error: AVERROR(EINVAL) if opts cannot be parsed, the error code issued by av_set_string3() if a key/value pair cannot be set + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_from_string(void* @ctx, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @opts, byte** @shorthand, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key_val_sep, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @pairs_sep); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_image_size(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @w, int @h, int @search_flags); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_int(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, long @val, int @search_flags); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_pixel_fmt(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVPixelFormat @fmt, int @search_flags); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_q(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVRational @val, int @search_flags); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_sample_fmt(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVSampleFormat @fmt, int @search_flags); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_set_video_rate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVRational @val, int @search_flags); + + /// Show the obj options. + /// log context to use for showing the options + /// requested flags for the options to show. Show only the options for which it is opt->flags & req_flags. + /// rejected flags for the options to show. Show only the options for which it is !(opt->flags & req_flags). + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_opt_show2(void* @obj, void* @av_log_obj, int @req_flags, int @rej_flags); + + /// Audio output devices iterator. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVOutputFormat* av_output_audio_device_next(AVOutputFormat* @d); + + /// Video output devices iterator. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVOutputFormat* av_output_video_device_next(AVOutputFormat* @d); + + /// Wrap an existing array as a packet side data. + /// packet + /// side information type + /// the side data array. It must be allocated with the av_malloc() family of functions. The ownership of the data is transferred to pkt. + /// side information size + /// a non-negative number on success, a negative AVERROR code on failure. On failure, the packet is unchanged and the data remains owned by the caller. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_packet_add_side_data(AVPacket* @pkt, AVPacketSideDataType @type, byte* @data, ulong @size); + + /// Allocate an AVPacket and set its fields to default values. The resulting struct must be freed using av_packet_free(). + /// An AVPacket filled with default values or NULL on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVPacket* av_packet_alloc(); + + /// Create a new packet that references the same data as src. + /// newly created AVPacket on success, NULL on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVPacket* av_packet_clone(AVPacket* @src); + + /// Copy only "properties" fields from src to dst. + /// Destination packet + /// Source packet + /// 0 on success AVERROR on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_packet_copy_props(AVPacket* @dst, AVPacket* @src); + + /// Free the packet, if the packet is reference counted, it will be unreferenced first. + /// packet to be freed. The pointer will be set to NULL. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_packet_free(AVPacket** @pkt); + + /// Convenience function to free all the side data stored. All the other fields stay untouched. + /// packet + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_packet_free_side_data(AVPacket* @pkt); + + /// Initialize a reference-counted packet from av_malloc()ed data. + /// packet to be initialized. This function will set the data, size, and buf fields, all others are left untouched. + /// Data allocated by av_malloc() to be used as packet data. If this function returns successfully, the data is owned by the underlying AVBuffer. The caller may not access the data through other means. + /// size of data in bytes, without the padding. I.e. the full buffer size is assumed to be size + AV_INPUT_BUFFER_PADDING_SIZE. + /// 0 on success, a negative AVERROR on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_packet_from_data(AVPacket* @pkt, byte* @data, int @size); + + /// Get side information from packet. + /// packet + /// desired side information type + /// If supplied, *size will be set to the size of the side data or to zero if the desired side data is not present. + /// pointer to data if present or NULL otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_packet_get_side_data(AVPacket* @pkt, AVPacketSideDataType @type, ulong* @size); + + /// Ensure the data described by a given packet is reference counted. + /// packet whose data should be made reference counted. + /// 0 on success, a negative AVERROR on error. On failure, the packet is unchanged. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_packet_make_refcounted(AVPacket* @pkt); + + /// Create a writable reference for the data described by a given packet, avoiding data copy if possible. + /// Packet whose data should be made writable. + /// 0 on success, a negative AVERROR on failure. On failure, the packet is unchanged. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_packet_make_writable(AVPacket* @pkt); + + /// Move every field in src to dst and reset src. + /// Destination packet + /// Source packet, will be reset + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_packet_move_ref(AVPacket* @dst, AVPacket* @src); + + /// Allocate new information of a packet. + /// packet + /// side information type + /// side information size + /// pointer to fresh allocated data or NULL otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_packet_new_side_data(AVPacket* @pkt, AVPacketSideDataType @type, ulong @size); + + /// Pack a dictionary for use in side_data. + /// The dictionary to pack. + /// pointer to store the size of the returned data + /// pointer to data if successful, NULL otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_packet_pack_dictionary(AVDictionary* @dict, ulong* @size); + + /// Setup a new reference to the data described by a given packet + /// Destination packet. Will be completely overwritten. + /// Source packet + /// 0 on success, a negative AVERROR on error. On error, dst will be blank (as if returned by av_packet_alloc()). + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_packet_ref(AVPacket* @dst, AVPacket* @src); + + /// Convert valid timing fields (timestamps / durations) in a packet from one timebase to another. Timestamps with unknown values (AV_NOPTS_VALUE) will be ignored. + /// packet on which the conversion will be performed + /// source timebase, in which the timing fields in pkt are expressed + /// destination timebase, to which the timing fields will be converted + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_packet_rescale_ts(AVPacket* @pkt, AVRational @tb_src, AVRational @tb_dst); + + /// Shrink the already allocated side data buffer + /// packet + /// side information type + /// new side information size + /// 0 on success, < 0 on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_packet_shrink_side_data(AVPacket* @pkt, AVPacketSideDataType @type, ulong @size); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_packet_side_data_name(AVPacketSideDataType @type); + + /// Unpack a dictionary from side_data. + /// data from side_data + /// size of the data + /// the metadata storage dictionary + /// 0 on success, < 0 on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_packet_unpack_dictionary(byte* @data, ulong @size, AVDictionary** @dict); + + /// Wipe the packet. + /// The packet to be unreferenced. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_packet_unref(AVPacket* @pkt); + + /// Parse CPU caps from a string and update the given AV_CPU_* flags based on that. + /// negative on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_parse_cpu_caps(uint* @flags, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @s); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_parser_close(AVCodecParserContext* @s); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecParserContext* av_parser_init(int @codec_id); + + /// Iterate over all registered codec parsers. + /// a pointer where libavcodec will store the iteration state. Must point to NULL to start the iteration. + /// the next registered codec parser or NULL when the iteration is finished + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecParser* av_parser_iterate(void** @opaque); + + /// Parse a packet. + /// parser context. + /// codec context. + /// set to pointer to parsed buffer or NULL if not yet finished. + /// set to size of parsed buffer or zero if not yet finished. + /// input buffer. + /// buffer size in bytes without the padding. I.e. the full buffer size is assumed to be buf_size + AV_INPUT_BUFFER_PADDING_SIZE. To signal EOF, this should be 0 (so that the last frame can be output). + /// input presentation timestamp. + /// input decoding timestamp. + /// input byte position in stream. + /// the number of bytes of the input bitstream used. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_parser_parse2(AVCodecParserContext* @s, AVCodecContext* @avctx, byte** @poutbuf, int* @poutbuf_size, byte* @buf, int @buf_size, long @pts, long @dts, long @pos); + + /// Returns number of planes in pix_fmt, a negative AVERROR if pix_fmt is not a valid pixel format. + /// number of planes in pix_fmt, a negative AVERROR if pix_fmt is not a valid pixel format. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_pix_fmt_count_planes(AVPixelFormat @pix_fmt); + + /// Returns a pixel format descriptor for provided pixel format or NULL if this pixel format is unknown. + /// a pixel format descriptor for provided pixel format or NULL if this pixel format is unknown. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVPixFmtDescriptor* av_pix_fmt_desc_get(AVPixelFormat @pix_fmt); + + /// Returns an AVPixelFormat id described by desc, or AV_PIX_FMT_NONE if desc is not a valid pointer to a pixel format descriptor. + /// an AVPixelFormat id described by desc, or AV_PIX_FMT_NONE if desc is not a valid pointer to a pixel format descriptor. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVPixelFormat av_pix_fmt_desc_get_id(AVPixFmtDescriptor* @desc); + + /// Iterate over all pixel format descriptors known to libavutil. + /// previous descriptor. NULL to get the first descriptor. + /// next descriptor or NULL after the last descriptor + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVPixFmtDescriptor* av_pix_fmt_desc_next(AVPixFmtDescriptor* @prev); + + /// Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor. + /// the pixel format + /// store log2_chroma_w (horizontal/width shift) + /// store log2_chroma_h (vertical/height shift) + /// 0 on success, AVERROR(ENOSYS) on invalid or unknown pixel format + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_pix_fmt_get_chroma_sub_sample(AVPixelFormat @pix_fmt, int* @h_shift, int* @v_shift); + + /// Utility function to swap the endianness of a pixel format. + /// the pixel format + /// pixel format with swapped endianness if it exists, otherwise AV_PIX_FMT_NONE + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVPixelFormat av_pix_fmt_swap_endianness(AVPixelFormat @pix_fmt); + + /// Send a nice dump of a packet to the log. + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct. + /// The importance level of the message, lower values signifying higher importance. + /// packet to dump + /// True if the payload must be displayed, too. + /// AVStream that the packet belongs to + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_pkt_dump_log2(void* @avcl, int @level, AVPacket* @pkt, int @dump_payload, AVStream* @st); + + /// Send a nice dump of a packet to the specified file stream. + /// The file stream pointer where the dump should be sent to. + /// packet to dump + /// True if the payload must be displayed, too. + /// AVStream that the packet belongs to + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_pkt_dump2(_iobuf* @f, AVPacket* @pkt, int @dump_payload, AVStream* @st); + + /// Like av_probe_input_buffer2() but returns 0 on success + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_probe_input_buffer(AVIOContext* @pb, AVInputFormat** @fmt, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, void* @logctx, uint @offset, uint @max_probe_size); + + /// Probe a bytestream to determine the input format. Each time a probe returns with a score that is too low, the probe buffer size is increased and another attempt is made. When the maximum probe size is reached, the input format with the highest score is returned. + /// the bytestream to probe + /// the input format is put here + /// the url of the stream + /// the log context + /// the offset within the bytestream to probe from + /// the maximum probe buffer size (zero for default) + /// the score in case of success, a negative value corresponding to an the maximal score is AVPROBE_SCORE_MAX AVERROR code otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_probe_input_buffer2(AVIOContext* @pb, AVInputFormat** @fmt, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, void* @logctx, uint @offset, uint @max_probe_size); + + /// Guess the file format. + /// data to be probed + /// Whether the file is already opened; determines whether demuxers with or without AVFMT_NOFILE are probed. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVInputFormat* av_probe_input_format(AVProbeData* @pd, int @is_opened); + + /// Guess the file format. + /// data to be probed + /// Whether the file is already opened; determines whether demuxers with or without AVFMT_NOFILE are probed. + /// A probe score larger that this is required to accept a detection, the variable is set to the actual detection score afterwards. If the score is < = AVPROBE_SCORE_MAX / 4 it is recommended to retry with a larger probe buffer. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVInputFormat* av_probe_input_format2(AVProbeData* @pd, int @is_opened, int* @score_max); + + /// Guess the file format. + /// Whether the file is already opened; determines whether demuxers with or without AVFMT_NOFILE are probed. + /// The score of the best detection. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVInputFormat* av_probe_input_format3(AVProbeData* @pd, int @is_opened, int* @score_ret); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_program_add_stream_index(AVFormatContext* @ac, int @progid, uint @idx); + + /// Convert an AVRational to a IEEE 32-bit `float` expressed in fixed-point format. + /// Rational to be converted + /// Equivalent floating-point value, expressed as an unsigned 32-bit integer. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint av_q2intfloat(AVRational @q); + + /// Return the next frame of a stream. This function returns what is stored in the file, and does not validate that what is there are valid frames for the decoder. It will split what is stored in the file into frames and return one for each call. It will not omit invalid data between valid frames so as to give the decoder the maximum information possible for decoding. + /// 0 if OK, < 0 on error or end of file. On error, pkt will be blank (as if it came from av_packet_alloc()). + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_read_frame(AVFormatContext* @s, AVPacket* @pkt); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_read_image_line(ushort* @dst, in byte_ptr4 @data, in int4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component); + + /// Read a line from an image, and write the values of the pixel format component c to dst. + /// the array containing the pointers to the planes of the image + /// the array containing the linesizes of the image + /// the pixel format descriptor for the image + /// the horizontal coordinate of the first pixel to read + /// the vertical coordinate of the first pixel to read + /// the width of the line to read, that is the number of values to write to dst + /// if not zero and the format is a paletted format writes the values corresponding to the palette component c in data[1] to dst, rather than the palette indexes in data[0]. The behavior is undefined if the format is not paletted. + /// size of elements in dst array (2 or 4 byte) + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_read_image_line2(void* @dst, in byte_ptr4 @data, in int4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component, int @dst_element_size); + + /// Pause a network-based stream (e.g. RTSP stream). + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_read_pause(AVFormatContext* @s); + + /// Start playing a network-based stream (e.g. RTSP stream) at the current position. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_read_play(AVFormatContext* @s); + + /// Allocate, reallocate, or free a block of memory. + /// Pointer to a memory block already allocated with av_realloc() or `NULL` + /// Size in bytes of the memory block to be allocated or reallocated + /// Pointer to a newly-reallocated block or `NULL` if the block cannot be reallocated + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_realloc(void* @ptr, ulong @size); + + /// Allocate, reallocate, or free an array. + /// Pointer to a memory block already allocated with av_realloc() or `NULL` + /// Number of elements in the array + /// Size of the single element of the array + /// Pointer to a newly-reallocated block or NULL if the block cannot be reallocated + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_realloc_array(void* @ptr, ulong @nmemb, ulong @size); + + /// Allocate, reallocate, or free a block of memory. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_realloc_f(void* @ptr, ulong @nelem, ulong @elsize); + + /// Allocate, reallocate, or free a block of memory through a pointer to a pointer. + /// Pointer to a pointer to a memory block already allocated with av_realloc(), or a pointer to `NULL`. The pointer is updated on success, or freed on failure. + /// Size in bytes for the memory block to be allocated or reallocated + /// Zero on success, an AVERROR error code on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_reallocp(void* @ptr, ulong @size); + + /// Allocate, reallocate an array through a pointer to a pointer. + /// Pointer to a pointer to a memory block already allocated with av_realloc(), or a pointer to `NULL`. The pointer is updated on success, or freed on failure. + /// Number of elements + /// Size of the single element + /// Zero on success, an AVERROR error code on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_reallocp_array(void* @ptr, ulong @nmemb, ulong @size); + + /// Reduce a fraction. + /// Destination numerator + /// Destination denominator + /// Source numerator + /// Source denominator + /// Maximum allowed values for `dst_num` & `dst_den` + /// 1 if the operation is exact, 0 otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_reduce(int* @dst_num, int* @dst_den, long @num, long @den, long @max); + + /// Rescale a 64-bit integer with rounding to nearest. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_rescale(long @a, long @b, long @c); + + /// Rescale a timestamp while preserving known durations. + /// Input time base + /// Input timestamp + /// Duration time base; typically this is finer-grained (greater) than `in_tb` and `out_tb` + /// Duration till the next call to this function (i.e. duration of the current packet/frame) + /// Pointer to a timestamp expressed in terms of `fs_tb`, acting as a state variable + /// Output timebase + /// Timestamp expressed in terms of `out_tb` + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_rescale_delta(AVRational @in_tb, long @in_ts, AVRational @fs_tb, int @duration, long* @last, AVRational @out_tb); + + /// Rescale a 64-bit integer by 2 rational numbers. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_rescale_q(long @a, AVRational @bq, AVRational @cq); + + /// Rescale a 64-bit integer by 2 rational numbers with specified rounding. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_rescale_q_rnd(long @a, AVRational @bq, AVRational @cq, AVRounding @rnd); + + /// Rescale a 64-bit integer with specified rounding. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_rescale_rnd(long @a, long @b, long @c, AVRounding @rnd); + + /// Check if the sample format is planar. + /// the sample format to inspect + /// 1 if the sample format is planar, 0 if it is interleaved + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_sample_fmt_is_planar(AVSampleFormat @sample_fmt); + + /// Allocate a samples buffer for nb_samples samples, and fill data pointers and linesize accordingly. The allocated samples buffer can be freed by using av_freep(&audio_data[0]) Allocated data will be initialized to silence. + /// array to be filled with the pointer for each channel + /// aligned size for audio buffer(s), may be NULL + /// number of audio channels + /// number of samples per channel + /// buffer size alignment (0 = default, 1 = no alignment) + /// >=0 on success or a negative error code on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_samples_alloc(byte** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align); + + /// Allocate a data pointers array, samples buffer for nb_samples samples, and fill data pointers and linesize accordingly. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_samples_alloc_array_and_samples(byte*** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align); + + /// Copy samples from src to dst. + /// destination array of pointers to data planes + /// source array of pointers to data planes + /// offset in samples at which the data will be written to dst + /// offset in samples at which the data will be read from src + /// number of samples to be copied + /// number of audio channels + /// audio sample format + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_samples_copy(byte** @dst, byte** @src, int @dst_offset, int @src_offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt); + + /// Fill plane data pointers and linesize for samples with sample format sample_fmt. + /// array to be filled with the pointer for each channel + /// calculated linesize, may be NULL + /// the pointer to a buffer containing the samples + /// the number of channels + /// the number of samples in a single channel + /// the sample format + /// buffer size alignment (0 = default, 1 = no alignment) + /// minimum size in bytes required for the buffer on success, or a negative error code on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_samples_fill_arrays(byte** @audio_data, int* @linesize, byte* @buf, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align); + + /// Get the required buffer size for the given audio parameters. + /// calculated linesize, may be NULL + /// the number of channels + /// the number of samples in a single channel + /// the sample format + /// buffer size alignment (0 = default, 1 = no alignment) + /// required buffer size, or negative error code on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_samples_get_buffer_size(int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align); + + /// Fill an audio buffer with silence. + /// array of pointers to data planes + /// offset in samples at which to start filling + /// number of samples to fill + /// number of audio channels + /// audio sample format + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_samples_set_silence(byte** @audio_data, int @offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt); + + /// Generate an SDP for an RTP session. + /// array of AVFormatContexts describing the RTP streams. If the array is composed by only one context, such context can contain multiple AVStreams (one AVStream per RTP stream). Otherwise, all the contexts in the array (an AVCodecContext per RTP stream) must contain only one AVStream. + /// number of AVCodecContexts contained in ac + /// buffer where the SDP will be stored (must be allocated by the caller) + /// the size of the buffer + /// 0 if OK, AVERROR_xxx on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_sdp_create(AVFormatContext** @ac, int @n_files, byte* @buf, int @size); + + /// Seek to the keyframe at timestamp. 'timestamp' in 'stream_index'. + /// media file handle + /// If stream_index is (-1), a default stream is selected, and timestamp is automatically converted from AV_TIME_BASE units to the stream specific time_base. + /// Timestamp in AVStream.time_base units or, if no stream is specified, in AV_TIME_BASE units. + /// flags which select direction and seeking mode + /// >= 0 on success + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_seek_frame(AVFormatContext* @s, int @stream_index, long @timestamp, int @flags); + + /// Parse the key/value pairs list in opts. For each key/value pair found, stores the value in the field in ctx that is named like the key. ctx must be an AVClass context, storing is done using AVOptions. + /// options string to parse, may be NULL + /// a 0-terminated list of characters used to separate key from value + /// a 0-terminated list of characters used to separate two pairs from each other + /// the number of successfully set key/value pairs, or a negative value corresponding to an AVERROR code in case of error: AVERROR(EINVAL) if opts cannot be parsed, the error code issued by av_opt_set() if a key/value pair cannot be set + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_set_options_string(void* @ctx, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @opts, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key_val_sep, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @pairs_sep); + + /// Reduce packet size, correctly zeroing padding + /// packet + /// new size + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_shrink_packet(AVPacket* @pkt, int @size); + + /// Multiply two `size_t` values checking for overflow. + /// Pointer to the result of the operation + /// 0 on success, AVERROR(EINVAL) on overflow + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_size_mult(ulong @a, ulong @b, ulong* @r); + + /// Duplicate a string. + /// String to be duplicated + /// Pointer to a newly-allocated string containing a copy of `s` or `NULL` if the string cannot be allocated + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_strdup( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @s); + + /// Wrap an existing array as stream side data. + /// stream + /// side information type + /// the side data array. It must be allocated with the av_malloc() family of functions. The ownership of the data is transferred to st. + /// side information size + /// zero on success, a negative AVERROR code on failure. On failure, the stream is unchanged and the data remains owned by the caller. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_stream_add_side_data(AVStream* @st, AVPacketSideDataType @type, byte* @data, ulong @size); + + /// Get the AVClass for AVStream. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* av_stream_get_class(); + + /// Get the internal codec timebase from a stream. + /// input stream to extract the timebase from + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_stream_get_codec_timebase(AVStream* @st); + + /// Returns the pts of the last muxed packet + its duration + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern long av_stream_get_end_pts(AVStream* @st); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecParserContext* av_stream_get_parser(AVStream* @s); + + /// Get side information from stream. + /// stream + /// desired side information type + /// If supplied, *size will be set to the size of the side data or to zero if the desired side data is not present. + /// pointer to data if present or NULL otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_stream_get_side_data(AVStream* @stream, AVPacketSideDataType @type, ulong* @size); + + /// Allocate new information from stream. + /// stream + /// desired side information type + /// side information size + /// pointer to fresh allocated data or NULL otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_stream_new_side_data(AVStream* @stream, AVPacketSideDataType @type, ulong @size); + + /// Put a description of the AVERROR code errnum in errbuf. In case of failure the global variable errno is set to indicate the error. Even in case of failure av_strerror() will print a generic error message indicating the errnum provided to errbuf. + /// error code to describe + /// buffer to which description is written + /// the size in bytes of errbuf + /// 0 on success, a negative value if a description for errnum cannot be found + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_strerror(int @errnum, byte* @errbuf, ulong @errbuf_size); + + /// Duplicate a substring of a string. + /// String to be duplicated + /// Maximum length of the resulting string (not counting the terminating byte) + /// Pointer to a newly-allocated string containing a substring of `s` or `NULL` if the string cannot be allocated + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_strndup( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @s, ulong @len); + + /// Subtract one rational from another. + /// First rational + /// Second rational + /// b-c + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVRational av_sub_q(AVRational @b, AVRational @c); + + /// Wrapper to work around the lack of mkstemp() on mingw. Also, tries to create file in /tmp first, if possible. *prefix can be a character constant; *filename will be allocated internally. + /// file descriptor of opened file (or negative value corresponding to an AVERROR code on error) and opened file name in **filename. + [Obsolete("as fd numbers cannot be passed saftely between libs on some platforms")] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_tempfile( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @prefix, byte** @filename, int @log_offset, void* @log_ctx); + + /// Adjust frame number for NTSC drop frame time code. + /// frame number to adjust + /// frame per second, multiples of 30 + /// adjusted frame number + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_timecode_adjust_ntsc_framenum2(int @framenum, int @fps); + + /// Check if the timecode feature is available for the given frame rate + /// 0 if supported, < 0 otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_timecode_check_frame_rate(AVRational @rate); + + /// Convert sei info to SMPTE 12M binary representation. + /// frame rate in rational form + /// drop flag + /// hour + /// minute + /// second + /// frame number + /// the SMPTE binary representation + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint av_timecode_get_smpte(AVRational @rate, int @drop, int @hh, int @mm, int @ss, int @ff); + + /// Convert frame number to SMPTE 12M binary representation. + /// timecode data correctly initialized + /// frame number + /// the SMPTE binary representation + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint av_timecode_get_smpte_from_framenum(AVTimecode* @tc, int @framenum); + + /// Init a timecode struct with the passed parameters. + /// pointer to an allocated AVTimecode + /// frame rate in rational form + /// miscellaneous flags such as drop frame, +24 hours, ... (see AVTimecodeFlag) + /// the first frame number + /// a pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct (used for av_log) + /// 0 on success, AVERROR otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_timecode_init(AVTimecode* @tc, AVRational @rate, int @flags, int @frame_start, void* @log_ctx); + + /// Init a timecode struct from the passed timecode components. + /// pointer to an allocated AVTimecode + /// frame rate in rational form + /// miscellaneous flags such as drop frame, +24 hours, ... (see AVTimecodeFlag) + /// hours + /// minutes + /// seconds + /// frames + /// a pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct (used for av_log) + /// 0 on success, AVERROR otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_timecode_init_from_components(AVTimecode* @tc, AVRational @rate, int @flags, int @hh, int @mm, int @ss, int @ff, void* @log_ctx); + + /// Parse timecode representation (hh:mm:ss[:;.]ff). + /// pointer to an allocated AVTimecode + /// frame rate in rational form + /// timecode string which will determine the frame start + /// a pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct (used for av_log). + /// 0 on success, AVERROR otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_timecode_init_from_string(AVTimecode* @tc, AVRational @rate, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str, void* @log_ctx); + + /// Get the timecode string from the 25-bit timecode format (MPEG GOP format). + /// destination buffer, must be at least AV_TIMECODE_STR_SIZE long + /// the 25-bits timecode + /// the buf parameter + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_timecode_make_mpeg_tc_string(byte* @buf, uint @tc25bit); + + /// Get the timecode string from the SMPTE timecode format. + /// destination buffer, must be at least AV_TIMECODE_STR_SIZE long + /// the 32-bit SMPTE timecode + /// prevent the use of a drop flag when it is known the DF bit is arbitrary + /// the buf parameter + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_timecode_make_smpte_tc_string(byte* @buf, uint @tcsmpte, int @prevent_df); + + /// Get the timecode string from the SMPTE timecode format. + /// destination buffer, must be at least AV_TIMECODE_STR_SIZE long + /// frame rate of the timecode + /// the 32-bit SMPTE timecode + /// prevent the use of a drop flag when it is known the DF bit is arbitrary + /// prevent the use of a field flag when it is known the field bit is arbitrary (e.g. because it is used as PC flag) + /// the buf parameter + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_timecode_make_smpte_tc_string2(byte* @buf, AVRational @rate, uint @tcsmpte, int @prevent_df, int @skip_field); + + /// Load timecode string in buf. + /// timecode data correctly initialized + /// destination buffer, must be at least AV_TIMECODE_STR_SIZE long + /// frame number + /// the buf parameter + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* av_timecode_make_string(AVTimecode* @tc, byte* @buf, int @framenum); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_tree_destroy(AVTreeNode* @t); + + /// Apply enu(opaque, &elem) to all the elements in the tree in a given range. + /// a comparison function that returns < 0 for an element below the range, > 0 for an element above the range and == 0 for an element inside the range + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_tree_enumerate(AVTreeNode* @t, void* @opaque, av_tree_enumerate_cmp_func @cmp, av_tree_enumerate_enu_func @enu); + + /// Find an element. + /// a pointer to the root node of the tree + /// compare function used to compare elements in the tree, API identical to that of Standard C's qsort It is guaranteed that the first and only the first argument to cmp() will be the key parameter to av_tree_find(), thus it could if the user wants, be a different type (like an opaque context). + /// If next is not NULL, then next[0] will contain the previous element and next[1] the next element. If either does not exist, then the corresponding entry in next is unchanged. + /// An element with cmp(key, elem) == 0 or NULL if no such element exists in the tree. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_tree_find(AVTreeNode* @root, void* @key, av_tree_find_cmp_func @cmp, ref void_ptr2 @next); + + /// Insert or remove an element. + /// A pointer to a pointer to the root node of the tree; note that the root node can change during insertions, this is required to keep the tree balanced. + /// pointer to the element key to insert in the tree + /// compare function used to compare elements in the tree, API identical to that of Standard C's qsort + /// Used to allocate and free AVTreeNodes. For insertion the user must set it to an allocated and zeroed object of at least av_tree_node_size bytes size. av_tree_insert() will set it to NULL if it has been consumed. For deleting elements *next is set to NULL by the user and av_tree_insert() will set it to the AVTreeNode which was used for the removed element. This allows the use of flat arrays, which have lower overhead compared to many malloced elements. You might want to define a function like: + /// If no insertion happened, the found element; if an insertion or removal happened, then either key or NULL will be returned. Which one it is depends on the tree state and the implementation. You should make no assumptions that it's one or the other in the code. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void* av_tree_insert(AVTreeNode** @rootp, void* @key, av_tree_insert_cmp_func @cmp, AVTreeNode** @next); + + /// Allocate an AVTreeNode. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVTreeNode* av_tree_node_alloc(); + + /// Split a URL string into components. + /// the buffer for the protocol + /// the size of the proto buffer + /// the buffer for the authorization + /// the size of the authorization buffer + /// the buffer for the host name + /// the size of the hostname buffer + /// a pointer to store the port number in + /// the buffer for the path + /// the size of the path buffer + /// the URL to split + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_url_split(byte* @proto, int @proto_size, byte* @authorization, int @authorization_size, byte* @hostname, int @hostname_size, int* @port_ptr, byte* @path, int @path_size, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url); + + /// Sleep for a period of time. Although the duration is expressed in microseconds, the actual delay may be rounded to the precision of the system timer. + /// Number of microseconds to sleep. + /// zero on success or (negative) error code. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_usleep(uint @usec); + + /// Return an informative version string. This usually is the actual release version number or a git commit description. This string has no fixed format and can change any time. It should never be parsed by code. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string av_version_info(); + + /// Send the specified message to the log if the level is less than or equal to the current av_log_level. By default, all logging messages are sent to stderr. This behavior can be altered by setting a different logging callback function. + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct. + /// The importance level of the message expressed using a "Logging Constant". + /// The format string (printf-compatible) that specifies how subsequent arguments are converted to output. + /// The arguments referenced by the format string. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_vlog(void* @avcl, int @level, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt, byte* @vl); + + /// Write a packet to an output media file. + /// media file handle + /// The packet containing the data to be written. Note that unlike av_interleaved_write_frame(), this function does not take ownership of the packet passed to it (though some muxers may make an internal reference to the input packet). This parameter can be NULL (at any time, not just at the end), in order to immediately flush data buffered within the muxer, for muxers that buffer up data internally before writing it to the output. Packet's "stream_index" field must be set to the index of the corresponding stream in "s->streams". The timestamps ( "pts", "dts") must be set to correct values in the stream's timebase (unless the output format is flagged with the AVFMT_NOTIMESTAMPS flag, then they can be set to AV_NOPTS_VALUE). The dts for subsequent packets passed to this function must be strictly increasing when compared in their respective timebases (unless the output format is flagged with the AVFMT_TS_NONSTRICT, then they merely have to be nondecreasing). "duration") should also be set if known. + /// < 0 on error, = 0 if OK, 1 if flushed and there is no more data to flush + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_write_frame(AVFormatContext* @s, AVPacket* @pkt); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_write_image_line(ushort* @src, ref byte_ptr4 @data, in int4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w); + + /// Write the values from src to the pixel format component c of an image line. + /// array containing the values to write + /// the array containing the pointers to the planes of the image to write into. It is supposed to be zeroed. + /// the array containing the linesizes of the image + /// the pixel format descriptor for the image + /// the horizontal coordinate of the first pixel to write + /// the vertical coordinate of the first pixel to write + /// the width of the line to write, that is the number of values to write to the image line + /// size of elements in src array (2 or 4 byte) + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void av_write_image_line2(void* @src, ref byte_ptr4 @data, in int4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @src_element_size); + + /// Write the stream trailer to an output media file and free the file private data. + /// media file handle + /// 0 if OK, AVERROR_xxx on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_write_trailer(AVFormatContext* @s); + + /// Write an uncoded frame to an output media file. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_write_uncoded_frame(AVFormatContext* @s, int @stream_index, AVFrame* @frame); + + /// Test whether a muxer supports uncoded frame. + /// >=0 if an uncoded frame can be written to that muxer and stream, < 0 if not + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int av_write_uncoded_frame_query(AVFormatContext* @s, int @stream_index); + + /// Encode extradata length to a buffer. Used by xiph codecs. + /// buffer to write to; must be at least (v/255+1) bytes long + /// size of extradata in bytes + /// number of bytes written to the buffer. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint av_xiphlacing(byte* @s, uint @v); + + /// Modify width and height values so that they will result in a memory buffer that is acceptable for the codec if you do not use any horizontal padding. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avcodec_align_dimensions(AVCodecContext* @s, int* @width, int* @height); + + /// Modify width and height values so that they will result in a memory buffer that is acceptable for the codec if you also ensure that all line sizes are a multiple of the respective linesize_align[i]. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avcodec_align_dimensions2(AVCodecContext* @s, int* @width, int* @height, ref int8 @linesize_align); + + /// Allocate an AVCodecContext and set its fields to default values. The resulting struct should be freed with avcodec_free_context(). + /// if non-NULL, allocate private data and initialize defaults for the given codec. It is illegal to then call avcodec_open2() with a different codec. If NULL, then the codec-specific defaults won't be initialized, which may result in suboptimal default settings (this is important mainly for encoders, e.g. libx264). + /// An AVCodecContext filled with default values or NULL on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecContext* avcodec_alloc_context3(AVCodec* @codec); + + /// Converts swscale x/y chroma position to AVChromaLocation. + /// horizontal chroma sample position + /// vertical chroma sample position + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVChromaLocation avcodec_chroma_pos_to_enum(int @xpos, int @ypos); + + /// Close a given AVCodecContext and free all the data associated with it (but not the AVCodecContext itself). + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_close(AVCodecContext* @avctx); + + /// Return the libavcodec build-time configuration. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avcodec_configuration(); + + /// Decode a subtitle message. Return a negative value on error, otherwise return the number of bytes used. If no subtitle could be decompressed, got_sub_ptr is zero. Otherwise, the subtitle is stored in *sub. Note that AV_CODEC_CAP_DR1 is not available for subtitle codecs. This is for simplicity, because the performance difference is expected to be negligible and reusing a get_buffer written for video codecs would probably perform badly due to a potentially very different allocation pattern. + /// the codec context + /// The preallocated AVSubtitle in which the decoded subtitle will be stored, must be freed with avsubtitle_free if *got_sub_ptr is set. + /// Zero if no subtitle could be decompressed, otherwise, it is nonzero. + /// The input AVPacket containing the input buffer. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_decode_subtitle2(AVCodecContext* @avctx, AVSubtitle* @sub, int* @got_sub_ptr, AVPacket* @avpkt); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_default_execute(AVCodecContext* @c, avcodec_default_execute_func_func @func, void* @arg, int* @ret, int @count, int @size); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_default_execute2(AVCodecContext* @c, avcodec_default_execute2_func_func @func, void* @arg, int* @ret, int @count); + + /// The default callback for AVCodecContext.get_buffer2(). It is made public so it can be called by custom get_buffer2() implementations for decoders without AV_CODEC_CAP_DR1 set. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_default_get_buffer2(AVCodecContext* @s, AVFrame* @frame, int @flags); + + /// The default callback for AVCodecContext.get_encode_buffer(). It is made public so it can be called by custom get_encode_buffer() implementations for encoders without AV_CODEC_CAP_DR1 set. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_default_get_encode_buffer(AVCodecContext* @s, AVPacket* @pkt, int @flags); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVPixelFormat avcodec_default_get_format(AVCodecContext* @s, AVPixelFormat* @fmt); + + /// Returns descriptor for given codec ID or NULL if no descriptor exists. + /// descriptor for given codec ID or NULL if no descriptor exists. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecDescriptor* avcodec_descriptor_get(AVCodecID @id); + + /// Returns codec descriptor with the given name or NULL if no such descriptor exists. + /// codec descriptor with the given name or NULL if no such descriptor exists. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecDescriptor* avcodec_descriptor_get_by_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Iterate over all codec descriptors known to libavcodec. + /// previous descriptor. NULL to get the first descriptor. + /// next descriptor or NULL after the last descriptor + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecDescriptor* avcodec_descriptor_next(AVCodecDescriptor* @prev); + + /// @{ + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_encode_subtitle(AVCodecContext* @avctx, byte* @buf, int @buf_size, AVSubtitle* @sub); + + /// Converts AVChromaLocation to swscale x/y chroma position. + /// horizontal chroma sample position + /// vertical chroma sample position + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_enum_to_chroma_pos(int* @xpos, int* @ypos, AVChromaLocation @pos); + + /// Fill AVFrame audio data and linesize pointers. + /// the AVFrame frame->nb_samples must be set prior to calling the function. This function fills in frame->data, frame->extended_data, frame->linesize[0]. + /// channel count + /// sample format + /// buffer to use for frame data + /// size of buffer + /// plane size sample alignment (0 = default) + /// >=0 on success, negative error code on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_fill_audio_frame(AVFrame* @frame, int @nb_channels, AVSampleFormat @sample_fmt, byte* @buf, int @buf_size, int @align); + + /// Find the best pixel format to convert to given a certain source pixel format. When converting from one pixel format to another, information loss may occur. For example, when converting from RGB24 to GRAY, the color information will be lost. Similarly, other losses occur when converting from some formats to other formats. avcodec_find_best_pix_fmt_of_2() searches which of the given pixel formats should be used to suffer the least amount of loss. The pixel formats from which it chooses one, are determined by the pix_fmt_list parameter. + /// AV_PIX_FMT_NONE terminated array of pixel formats to choose from + /// source pixel format + /// Whether the source pixel format alpha channel is used. + /// Combination of flags informing you what kind of losses will occur. + /// The best pixel format to convert to or -1 if none was found. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVPixelFormat avcodec_find_best_pix_fmt_of_list(AVPixelFormat* @pix_fmt_list, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr); + + /// Find a registered decoder with a matching codec ID. + /// AVCodecID of the requested decoder + /// A decoder if one was found, NULL otherwise. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodec* avcodec_find_decoder(AVCodecID @id); + + /// Find a registered decoder with the specified name. + /// name of the requested decoder + /// A decoder if one was found, NULL otherwise. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodec* avcodec_find_decoder_by_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Find a registered encoder with a matching codec ID. + /// AVCodecID of the requested encoder + /// An encoder if one was found, NULL otherwise. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodec* avcodec_find_encoder(AVCodecID @id); + + /// Find a registered encoder with the specified name. + /// name of the requested encoder + /// An encoder if one was found, NULL otherwise. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodec* avcodec_find_encoder_by_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Reset the internal codec state / flush internal buffers. Should be called e.g. when seeking or when switching to a different stream. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avcodec_flush_buffers(AVCodecContext* @avctx); + + /// Free the codec context and everything associated with it and write NULL to the provided pointer. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avcodec_free_context(AVCodecContext** @avctx); + + /// Get the AVClass for AVCodecContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* avcodec_get_class(); + + [Obsolete("This function should not be used.")] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* avcodec_get_frame_class(); + + /// Retrieve supported hardware configurations for a codec. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecHWConfig* avcodec_get_hw_config(AVCodec* @codec, int @index); + + /// Create and return a AVHWFramesContext with values adequate for hardware decoding. This is meant to get called from the get_format callback, and is a helper for preparing a AVHWFramesContext for AVCodecContext.hw_frames_ctx. This API is for decoding with certain hardware acceleration modes/APIs only. + /// The context which is currently calling get_format, and which implicitly contains all state needed for filling the returned AVHWFramesContext properly. + /// A reference to the AVHWDeviceContext describing the device which will be used by the hardware decoder. + /// The hwaccel format you are going to return from get_format. + /// On success, set to a reference to an _uninitialized_ AVHWFramesContext, created from the given device_ref. Fields will be set to values required for decoding. Not changed if an error is returned. + /// zero on success, a negative value on error. The following error codes have special semantics: AVERROR(ENOENT): the decoder does not support this functionality. Setup is always manual, or it is a decoder which does not support setting AVCodecContext.hw_frames_ctx at all, or it is a software format. AVERROR(EINVAL): it is known that hardware decoding is not supported for this configuration, or the device_ref is not supported for the hwaccel referenced by hw_pix_fmt. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_get_hw_frames_parameters(AVCodecContext* @avctx, AVBufferRef* @device_ref, AVPixelFormat @hw_pix_fmt, AVBufferRef** @out_frames_ref); + + /// Get the name of a codec. + /// a static string identifying the codec; never NULL + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avcodec_get_name(AVCodecID @id); + + /// Get the AVClass for AVSubtitleRect. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* avcodec_get_subtitle_rect_class(); + + /// Get the type of the given codec. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVMediaType avcodec_get_type(AVCodecID @codec_id); + + /// Returns a positive value if s is open (i.e. avcodec_open2() was called on it with no corresponding avcodec_close()), 0 otherwise. + /// a positive value if s is open (i.e. avcodec_open2() was called on it with no corresponding avcodec_close()), 0 otherwise. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_is_open(AVCodecContext* @s); + + /// Return the libavcodec license. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avcodec_license(); + + /// Initialize the AVCodecContext to use the given AVCodec. Prior to using this function the context has to be allocated with avcodec_alloc_context3(). + /// The context to initialize. + /// The codec to open this context for. If a non-NULL codec has been previously passed to avcodec_alloc_context3() or for this context, then this parameter MUST be either NULL or equal to the previously passed codec. + /// A dictionary filled with AVCodecContext and codec-private options. On return this object will be filled with options that were not found. + /// zero on success, a negative value on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_open2(AVCodecContext* @avctx, AVCodec* @codec, AVDictionary** @options); + + /// Allocate a new AVCodecParameters and set its fields to default values (unknown/invalid/0). The returned struct must be freed with avcodec_parameters_free(). + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecParameters* avcodec_parameters_alloc(); + + /// Copy the contents of src to dst. Any allocated fields in dst are freed and replaced with newly allocated duplicates of the corresponding fields in src. + /// >= 0 on success, a negative AVERROR code on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_parameters_copy(AVCodecParameters* @dst, AVCodecParameters* @src); + + /// Free an AVCodecParameters instance and everything associated with it and write NULL to the supplied pointer. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avcodec_parameters_free(AVCodecParameters** @par); + + /// Fill the parameters struct based on the values from the supplied codec context. Any allocated fields in par are freed and replaced with duplicates of the corresponding fields in codec. + /// >= 0 on success, a negative AVERROR code on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_parameters_from_context(AVCodecParameters* @par, AVCodecContext* @codec); + + /// Fill the codec context based on the values from the supplied codec parameters. Any allocated fields in codec that have a corresponding field in par are freed and replaced with duplicates of the corresponding field in par. Fields in codec that do not have a counterpart in par are not touched. + /// >= 0 on success, a negative AVERROR code on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_parameters_to_context(AVCodecContext* @codec, AVCodecParameters* @par); + + /// Return a value representing the fourCC code associated to the pixel format pix_fmt, or 0 if no associated fourCC code can be found. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avcodec_pix_fmt_to_codec_tag(AVPixelFormat @pix_fmt); + + /// Return a name for the specified profile, if available. + /// the ID of the codec to which the requested profile belongs + /// the profile value for which a name is requested + /// A name for the profile if found, NULL otherwise. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avcodec_profile_name(AVCodecID @codec_id, int @profile); + + /// Return decoded output data from a decoder. + /// codec context + /// This will be set to a reference-counted video or audio frame (depending on the decoder type) allocated by the decoder. Note that the function will always call av_frame_unref(frame) before doing anything else. + /// 0: success, a frame was returned AVERROR(EAGAIN): output is not available in this state - user must try to send new input AVERROR_EOF: the decoder has been fully flushed, and there will be no more output frames AVERROR(EINVAL): codec not opened, or it is an encoder AVERROR_INPUT_CHANGED: current decoded frame has changed parameters with respect to first decoded frame. Applicable when flag AV_CODEC_FLAG_DROPCHANGED is set. other negative values: legitimate decoding errors + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_receive_frame(AVCodecContext* @avctx, AVFrame* @frame); + + /// Read encoded data from the encoder. + /// codec context + /// This will be set to a reference-counted packet allocated by the encoder. Note that the function will always call av_packet_unref(avpkt) before doing anything else. + /// 0 on success, otherwise negative error code: AVERROR(EAGAIN): output is not available in the current state - user must try to send input AVERROR_EOF: the encoder has been fully flushed, and there will be no more output packets AVERROR(EINVAL): codec not opened, or it is a decoder other errors: legitimate encoding errors + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_receive_packet(AVCodecContext* @avctx, AVPacket* @avpkt); + + /// Supply a raw video or audio frame to the encoder. Use avcodec_receive_packet() to retrieve buffered output packets. + /// codec context + /// AVFrame containing the raw audio or video frame to be encoded. Ownership of the frame remains with the caller, and the encoder will not write to the frame. The encoder may create a reference to the frame data (or copy it if the frame is not reference-counted). It can be NULL, in which case it is considered a flush packet. This signals the end of the stream. If the encoder still has packets buffered, it will return them after this call. Once flushing mode has been entered, additional flush packets are ignored, and sending frames will return AVERROR_EOF. + /// 0 on success, otherwise negative error code: AVERROR(EAGAIN): input is not accepted in the current state - user must read output with avcodec_receive_packet() (once all output is read, the packet should be resent, and the call will not fail with EAGAIN). AVERROR_EOF: the encoder has been flushed, and no new frames can be sent to it AVERROR(EINVAL): codec not opened, it is a decoder, or requires flush AVERROR(ENOMEM): failed to add packet to internal queue, or similar other errors: legitimate encoding errors + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_send_frame(AVCodecContext* @avctx, AVFrame* @frame); + + /// Supply raw packet data as input to a decoder. + /// codec context + /// The input AVPacket. Usually, this will be a single video frame, or several complete audio frames. Ownership of the packet remains with the caller, and the decoder will not write to the packet. The decoder may create a reference to the packet data (or copy it if the packet is not reference-counted). Unlike with older APIs, the packet is always fully consumed, and if it contains multiple frames (e.g. some audio codecs), will require you to call avcodec_receive_frame() multiple times afterwards before you can send a new packet. It can be NULL (or an AVPacket with data set to NULL and size set to 0); in this case, it is considered a flush packet, which signals the end of the stream. Sending the first flush packet will return success. Subsequent ones are unnecessary and will return AVERROR_EOF. If the decoder still has frames buffered, it will return them after sending a flush packet. + /// 0 on success, otherwise negative error code: AVERROR(EAGAIN): input is not accepted in the current state - user must read output with avcodec_receive_frame() (once all output is read, the packet should be resent, and the call will not fail with EAGAIN). AVERROR_EOF: the decoder has been flushed, and no new packets can be sent to it (also returned if more than 1 flush packet is sent) AVERROR(EINVAL): codec not opened, it is an encoder, or requires flush AVERROR(ENOMEM): failed to add packet to internal queue, or similar other errors: legitimate decoding errors + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avcodec_send_packet(AVCodecContext* @avctx, AVPacket* @avpkt); + + /// @} + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avcodec_string(byte* @buf, int @buf_size, AVCodecContext* @enc, int @encode); + + /// Return the LIBAVCODEC_VERSION_INT constant. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avcodec_version(); + + /// Send control message from application to device. + /// device context. + /// message type. + /// message data. Exact type depends on message type. + /// size of message data. + /// >= 0 on success, negative on error. AVERROR(ENOSYS) when device doesn't implement handler of the message. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avdevice_app_to_dev_control_message(AVFormatContext* @s, AVAppToDevMessageType @type, void* @data, ulong @data_size); + + /// Initialize capabilities probing API based on AVOption API. + /// Device capabilities data. Pointer to a NULL pointer must be passed. + /// Context of the device. + /// An AVDictionary filled with device-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. The same options must be passed later to avformat_write_header() for output devices or avformat_open_input() for input devices, or at any other place that affects device-private options. + /// >= 0 on success, negative otherwise. + [Obsolete()] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avdevice_capabilities_create(AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s, AVDictionary** @device_options); + + /// Free resources created by avdevice_capabilities_create() + /// Device capabilities data to be freed. + /// Context of the device. + [Obsolete()] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avdevice_capabilities_free(AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s); + + /// Return the libavdevice build-time configuration. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avdevice_configuration(); + + /// Send control message from device to application. + /// device context. + /// message type. + /// message data. Can be NULL. + /// size of message data. + /// >= 0 on success, negative on error. AVERROR(ENOSYS) when application doesn't implement handler of the message. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avdevice_dev_to_app_control_message(AVFormatContext* @s, AVDevToAppMessageType @type, void* @data, ulong @data_size); + + /// Convenient function to free result of avdevice_list_devices(). + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avdevice_free_list_devices(AVDeviceInfoList** @device_list); + + /// Return the libavdevice license. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avdevice_license(); + + /// List devices. + /// device context. + /// list of autodetected devices. + /// count of autodetected devices, negative on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avdevice_list_devices(AVFormatContext* @s, AVDeviceInfoList** @device_list); + + /// List devices. + /// device format. May be NULL if device name is set. + /// device name. May be NULL if device format is set. + /// An AVDictionary filled with device-private options. May be NULL. The same options must be passed later to avformat_write_header() for output devices or avformat_open_input() for input devices, or at any other place that affects device-private options. + /// list of autodetected devices + /// count of autodetected devices, negative on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avdevice_list_input_sources(AVInputFormat* @device, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avdevice_list_output_sinks(AVOutputFormat* @device, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list); + + /// Initialize libavdevice and register all the input and output devices. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avdevice_register_all(); + + /// Return the LIBAVDEVICE_VERSION_INT constant. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avdevice_version(); + + /// Negotiate the media format, dimensions, etc of all inputs to a filter. + /// the filter to negotiate the properties for its inputs + /// zero on successful negotiation + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_config_links(AVFilterContext* @filter); + + /// Return the libavfilter build-time configuration. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avfilter_configuration(); + + /// Get the number of elements in an AVFilter's inputs or outputs array. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avfilter_filter_pad_count(AVFilter* @filter, int @is_output); + + /// Free a filter context. This will also remove the filter from its filtergraph's list of filters. + /// the filter to free + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avfilter_free(AVFilterContext* @filter); + + /// Get a filter definition matching the given name. + /// the filter name to find + /// the filter definition, if any matching one is registered. NULL if none found. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFilter* avfilter_get_by_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Returns AVClass for AVFilterContext. + /// AVClass for AVFilterContext. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* avfilter_get_class(); + + /// Allocate a filter graph. + /// the allocated filter graph on success or NULL. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFilterGraph* avfilter_graph_alloc(); + + /// Create a new filter instance in a filter graph. + /// graph in which the new filter will be used + /// the filter to create an instance of + /// Name to give to the new instance (will be copied to AVFilterContext.name). This may be used by the caller to identify different filters, libavfilter itself assigns no semantics to this parameter. May be NULL. + /// the context of the newly created filter instance (note that it is also retrievable directly through AVFilterGraph.filters or with avfilter_graph_get_filter()) on success or NULL on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFilterContext* avfilter_graph_alloc_filter(AVFilterGraph* @graph, AVFilter* @filter, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Check validity and configure all the links and formats in the graph. + /// the filter graph + /// context used for logging + /// >= 0 in case of success, a negative AVERROR code otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_graph_config(AVFilterGraph* @graphctx, void* @log_ctx); + + /// Create and add a filter instance into an existing graph. The filter instance is created from the filter filt and inited with the parameter args. opaque is currently ignored. + /// the instance name to give to the created filter instance + /// the filter graph + /// a negative AVERROR error code in case of failure, a non negative value otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_graph_create_filter(AVFilterContext** @filt_ctx, AVFilter* @filt, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @args, void* @opaque, AVFilterGraph* @graph_ctx); + + /// Dump a graph into a human-readable string representation. + /// the graph to dump + /// formatting options; currently ignored + /// a string, or NULL in case of memory allocation failure; the string must be freed using av_free + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern byte* avfilter_graph_dump(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @options); + + /// Free a graph, destroy its links, and set *graph to NULL. If *graph is NULL, do nothing. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avfilter_graph_free(AVFilterGraph** @graph); + + /// Get a filter instance identified by instance name from graph. + /// filter graph to search through. + /// filter instance name (should be unique in the graph). + /// the pointer to the found filter instance or NULL if it cannot be found. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFilterContext* avfilter_graph_get_filter(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Add a graph described by a string to a graph. + /// the filter graph where to link the parsed graph context + /// string to be parsed + /// linked list to the inputs of the graph + /// linked list to the outputs of the graph + /// zero on success, a negative AVERROR code on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_graph_parse(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filters, AVFilterInOut* @inputs, AVFilterInOut* @outputs, void* @log_ctx); + + /// Add a graph described by a string to a graph. + /// the filter graph where to link the parsed graph context + /// string to be parsed + /// pointer to a linked list to the inputs of the graph, may be NULL. If non-NULL, *inputs is updated to contain the list of open inputs after the parsing, should be freed with avfilter_inout_free(). + /// pointer to a linked list to the outputs of the graph, may be NULL. If non-NULL, *outputs is updated to contain the list of open outputs after the parsing, should be freed with avfilter_inout_free(). + /// non negative on success, a negative AVERROR code on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_graph_parse_ptr(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs, void* @log_ctx); + + /// Add a graph described by a string to a graph. + /// the filter graph where to link the parsed graph context + /// string to be parsed + /// a linked list of all free (unlinked) inputs of the parsed graph will be returned here. It is to be freed by the caller using avfilter_inout_free(). + /// a linked list of all free (unlinked) outputs of the parsed graph will be returned here. It is to be freed by the caller using avfilter_inout_free(). + /// zero on success, a negative AVERROR code on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_graph_parse2(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs); + + /// Queue a command for one or more filter instances. + /// the filter graph + /// the filter(s) to which the command should be sent "all" sends to all filters otherwise it can be a filter or filter instance name which will send the command to all matching filters. + /// the command to sent, for handling simplicity all commands must be alphanumeric only + /// the argument for the command + /// time at which the command should be sent to the filter + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_graph_queue_command(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @target, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @cmd, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @arg, int @flags, double @ts); + + /// Request a frame on the oldest sink link. + /// the return value of ff_request_frame(), or AVERROR_EOF if all links returned AVERROR_EOF + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_graph_request_oldest(AVFilterGraph* @graph); + + /// Send a command to one or more filter instances. + /// the filter graph + /// the filter(s) to which the command should be sent "all" sends to all filters otherwise it can be a filter or filter instance name which will send the command to all matching filters. + /// the command to send, for handling simplicity all commands must be alphanumeric only + /// the argument for the command + /// a buffer with size res_size where the filter(s) can return a response. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_graph_send_command(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @target, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @cmd, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @arg, byte* @res, int @res_len, int @flags); + + /// Enable or disable automatic format conversion inside the graph. + /// any of the AVFILTER_AUTO_CONVERT_* constants + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avfilter_graph_set_auto_convert(AVFilterGraph* @graph, uint @flags); + + /// Initialize a filter with the supplied dictionary of options. + /// uninitialized filter context to initialize + /// An AVDictionary filled with options for this filter. On return this parameter will be destroyed and replaced with a dict containing options that were not found. This dictionary must be freed by the caller. May be NULL, then this function is equivalent to avfilter_init_str() with the second parameter set to NULL. + /// 0 on success, a negative AVERROR on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_init_dict(AVFilterContext* @ctx, AVDictionary** @options); + + /// Initialize a filter with the supplied parameters. + /// uninitialized filter context to initialize + /// Options to initialize the filter with. This must be a ':'-separated list of options in the 'key=value' form. May be NULL if the options have been set directly using the AVOptions API or there are no options that need to be set. + /// 0 on success, a negative AVERROR on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_init_str(AVFilterContext* @ctx, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @args); + + /// Allocate a single AVFilterInOut entry. Must be freed with avfilter_inout_free(). + /// allocated AVFilterInOut on success, NULL on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFilterInOut* avfilter_inout_alloc(); + + /// Free the supplied list of AVFilterInOut and set *inout to NULL. If *inout is NULL, do nothing. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avfilter_inout_free(AVFilterInOut** @inout); + + /// Insert a filter in the middle of an existing link. + /// the link into which the filter should be inserted + /// the filter to be inserted + /// the input pad on the filter to connect + /// the output pad on the filter to connect + /// zero on success + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_insert_filter(AVFilterLink* @link, AVFilterContext* @filt, uint @filt_srcpad_idx, uint @filt_dstpad_idx); + + /// Return the libavfilter license. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avfilter_license(); + + /// Link two filters together. + /// the source filter + /// index of the output pad on the source filter + /// the destination filter + /// index of the input pad on the destination filter + /// zero on success + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_link(AVFilterContext* @src, uint @srcpad, AVFilterContext* @dst, uint @dstpad); + + /// Free the link in *link, and set its pointer to NULL. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avfilter_link_free(AVFilterLink** @link); + + /// Get the number of elements in an AVFilter's inputs or outputs array. + [Obsolete("Use avfilter_filter_pad_count() instead.")] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_pad_count(AVFilterPad* @pads); + + /// Get the name of an AVFilterPad. + /// an array of AVFilterPads + /// index of the pad in the array; it is the caller's responsibility to ensure the index is valid + /// name of the pad_idx'th pad in pads + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avfilter_pad_get_name(AVFilterPad* @pads, int @pad_idx); + + /// Get the type of an AVFilterPad. + /// an array of AVFilterPads + /// index of the pad in the array; it is the caller's responsibility to ensure the index is valid + /// type of the pad_idx'th pad in pads + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVMediaType avfilter_pad_get_type(AVFilterPad* @pads, int @pad_idx); + + /// Make the filter instance process a command. It is recommended to use avfilter_graph_send_command(). + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avfilter_process_command(AVFilterContext* @filter, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @cmd, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @arg, byte* @res, int @res_len, int @flags); + + /// Return the LIBAVFILTER_VERSION_INT constant. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avfilter_version(); + + /// Allocate an AVFormatContext. avformat_free_context() can be used to free the context and everything allocated by the framework within it. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVFormatContext* avformat_alloc_context(); + + /// Allocate an AVFormatContext for an output format. avformat_free_context() can be used to free the context and everything allocated by the framework within it. + /// format to use for allocating the context, if NULL format_name and filename are used instead + /// the name of output format to use for allocating the context, if NULL filename is used instead + /// the name of the filename to use for allocating the context, may be NULL + /// >= 0 in case of success, a negative AVERROR code in case of failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_alloc_output_context2(AVFormatContext** @ctx, AVOutputFormat* @oformat, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @format_name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename); + + /// Close an opened input AVFormatContext. Free it and all its contents and set *s to NULL. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avformat_close_input(AVFormatContext** @s); + + /// Return the libavformat build-time configuration. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avformat_configuration(); + + /// Read packets of a media file to get stream information. This is useful for file formats with no headers such as MPEG. This function also computes the real framerate in case of MPEG-2 repeat frame mode. The logical file position is not changed by this function; examined packets may be buffered for later processing. + /// media file handle + /// If non-NULL, an ic.nb_streams long array of pointers to dictionaries, where i-th member contains options for codec corresponding to i-th stream. On return each dictionary will be filled with options that were not found. + /// >=0 if OK, AVERROR_xxx on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_find_stream_info(AVFormatContext* @ic, AVDictionary** @options); + + /// Discard all internally buffered data. This can be useful when dealing with discontinuities in the byte stream. Generally works only with formats that can resync. This includes headerless formats like MPEG-TS/TS but should also work with NUT, Ogg and in a limited way AVI for example. + /// media file handle + /// >=0 on success, error code otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_flush(AVFormatContext* @s); + + /// Free an AVFormatContext and all its streams. + /// context to free + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avformat_free_context(AVFormatContext* @s); + + /// Get the AVClass for AVFormatContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* avformat_get_class(); + + /// Returns the table mapping MOV FourCCs for audio to AVCodecID. + /// the table mapping MOV FourCCs for audio to AVCodecID. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecTag* avformat_get_mov_audio_tags(); + + /// Returns the table mapping MOV FourCCs for video to libavcodec AVCodecID. + /// the table mapping MOV FourCCs for video to libavcodec AVCodecID. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecTag* avformat_get_mov_video_tags(); + + /// Returns the table mapping RIFF FourCCs for audio to AVCodecID. + /// the table mapping RIFF FourCCs for audio to AVCodecID. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecTag* avformat_get_riff_audio_tags(); + + /// @{ Get the tables mapping RIFF FourCCs to libavcodec AVCodecIDs. The tables are meant to be passed to av_codec_get_id()/av_codec_get_tag() as in the following code: + /// the table mapping RIFF FourCCs for video to libavcodec AVCodecID. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVCodecTag* avformat_get_riff_video_tags(); + + /// Get the index entry count for the given AVStream. + /// stream + /// the number of index entries in the stream + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_index_get_entries_count(AVStream* @st); + + /// Get the AVIndexEntry corresponding to the given index. + /// Stream containing the requested AVIndexEntry. + /// The desired index. + /// A pointer to the requested AVIndexEntry if it exists, NULL otherwise. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVIndexEntry* avformat_index_get_entry(AVStream* @st, int @idx); + + /// Get the AVIndexEntry corresponding to the given timestamp. + /// Stream containing the requested AVIndexEntry. + /// If AVSEEK_FLAG_BACKWARD then the returned entry will correspond to the timestamp which is < = the requested one, if backward is 0, then it will be >= if AVSEEK_FLAG_ANY seek to any frame, only keyframes otherwise. + /// A pointer to the requested AVIndexEntry if it exists, NULL otherwise. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVIndexEntry* avformat_index_get_entry_from_timestamp(AVStream* @st, long @wanted_timestamp, int @flags); + + /// Allocate the stream private data and initialize the codec, but do not write the header. May optionally be used before avformat_write_header to initialize stream parameters before actually writing the header. If using this function, do not pass the same options to avformat_write_header. + /// Media file handle, must be allocated with avformat_alloc_context(). Its oformat field must be set to the desired output format; Its pb field must be set to an already opened AVIOContext. + /// An AVDictionary filled with AVFormatContext and muxer-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. + /// AVSTREAM_INIT_IN_WRITE_HEADER on success if the codec requires avformat_write_header to fully initialize, AVSTREAM_INIT_IN_INIT_OUTPUT on success if the codec has been fully initialized, negative AVERROR on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_init_output(AVFormatContext* @s, AVDictionary** @options); + + /// Return the libavformat license. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avformat_license(); + + /// Check if the stream st contained in s is matched by the stream specifier spec. + /// >0 if st is matched by spec; 0 if st is not matched by spec; AVERROR code if spec is invalid + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_match_stream_specifier(AVFormatContext* @s, AVStream* @st, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @spec); + + /// Undo the initialization done by avformat_network_init. Call it only once for each time you called avformat_network_init. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_network_deinit(); + + /// Do global initialization of network libraries. This is optional, and not recommended anymore. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_network_init(); + + /// Add a new stream to a media file. + /// media file handle + /// unused, does nothing + /// newly created stream or NULL on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVStream* avformat_new_stream(AVFormatContext* @s, AVCodec* @c); + + /// Open an input stream and read the header. The codecs are not opened. The stream must be closed with avformat_close_input(). + /// Pointer to user-supplied AVFormatContext (allocated by avformat_alloc_context). May be a pointer to NULL, in which case an AVFormatContext is allocated by this function and written into ps. Note that a user-supplied AVFormatContext will be freed on failure. + /// URL of the stream to open. + /// If non-NULL, this parameter forces a specific input format. Otherwise the format is autodetected. + /// A dictionary filled with AVFormatContext and demuxer-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. + /// 0 on success, a negative AVERROR on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_open_input(AVFormatContext** @ps, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, AVInputFormat* @fmt, AVDictionary** @options); + + /// Test if the given container can store a codec. + /// container to check for compatibility + /// codec to potentially store in container + /// standards compliance level, one of FF_COMPLIANCE_* + /// 1 if codec with ID codec_id can be stored in ofmt, 0 if it cannot. A negative number if this information is not available. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_query_codec(AVOutputFormat* @ofmt, AVCodecID @codec_id, int @std_compliance); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_queue_attached_pictures(AVFormatContext* @s); + + /// Seek to timestamp ts. Seeking will be done so that the point from which all active streams can be presented successfully will be closest to ts and within min/max_ts. Active streams are all streams that have AVStream.discard < AVDISCARD_ALL. + /// media file handle + /// index of the stream which is used as time base reference + /// smallest acceptable timestamp + /// target timestamp + /// largest acceptable timestamp + /// flags + /// >=0 on success, error code otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_seek_file(AVFormatContext* @s, int @stream_index, long @min_ts, long @ts, long @max_ts, int @flags); + + /// Transfer internal timing information from one stream to another. + /// target output format for ost + /// output stream which needs timings copy and adjustments + /// reference input stream to copy timings from + /// define from where the stream codec timebase needs to be imported + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_transfer_internal_stream_timing_info(AVOutputFormat* @ofmt, AVStream* @ost, AVStream* @ist, AVTimebaseSource @copy_tb); + + /// Return the LIBAVFORMAT_VERSION_INT constant. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avformat_version(); + + /// Allocate the stream private data and write the stream header to an output media file. + /// Media file handle, must be allocated with avformat_alloc_context(). Its oformat field must be set to the desired output format; Its pb field must be set to an already opened AVIOContext. + /// An AVDictionary filled with AVFormatContext and muxer-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. + /// AVSTREAM_INIT_IN_WRITE_HEADER on success if the codec had not already been fully initialized in avformat_init, AVSTREAM_INIT_IN_INIT_OUTPUT on success if the codec had already been fully initialized in avformat_init, negative AVERROR on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avformat_write_header(AVFormatContext* @s, AVDictionary** @options); + + /// Accept and allocate a client context on a server context. + /// the server context + /// the client context, must be unallocated + /// >= 0 on success or a negative value corresponding to an AVERROR on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_accept(AVIOContext* @s, AVIOContext** @c); + + /// Allocate and initialize an AVIOContext for buffered I/O. It must be later freed with avio_context_free(). + /// Memory block for input/output operations via AVIOContext. The buffer must be allocated with av_malloc() and friends. It may be freed and replaced with a new buffer by libavformat. AVIOContext.buffer holds the buffer currently in use, which must be later freed with av_free(). + /// The buffer size is very important for performance. For protocols with fixed blocksize it should be set to this blocksize. For others a typical size is a cache page, e.g. 4kb. + /// Set to 1 if the buffer should be writable, 0 otherwise. + /// An opaque pointer to user-specific data. + /// A function for refilling the buffer, may be NULL. For stream protocols, must never return 0 but rather a proper AVERROR code. + /// A function for writing the buffer contents, may be NULL. The function may not change the input buffers content. + /// A function for seeking to specified byte position, may be NULL. + /// Allocated AVIOContext or NULL on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVIOContext* avio_alloc_context(byte* @buffer, int @buffer_size, int @write_flag, void* @opaque, avio_alloc_context_read_packet_func @read_packet, avio_alloc_context_write_packet_func @write_packet, avio_alloc_context_seek_func @seek); + + /// Return AVIO_FLAG_* access flags corresponding to the access permissions of the resource in url, or a negative value corresponding to an AVERROR code in case of failure. The returned access flags are masked by the value in flags. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_check( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, int @flags); + + /// Close the resource accessed by the AVIOContext s and free it. This function can only be used if s was opened by avio_open(). + /// 0 on success, an AVERROR < 0 on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_close(AVIOContext* @s); + + /// Close directory. + /// directory read context. + /// >=0 on success or negative on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_close_dir(AVIODirContext** @s); + + /// Return the written size and a pointer to the buffer. The buffer must be freed with av_free(). Padding of AV_INPUT_BUFFER_PADDING_SIZE is added to the buffer. + /// IO context + /// pointer to a byte buffer + /// the length of the byte buffer + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_close_dyn_buf(AVIOContext* @s, byte** @pbuffer); + + /// Close the resource accessed by the AVIOContext *s, free it and set the pointer pointing to it to NULL. This function can only be used if s was opened by avio_open(). + /// 0 on success, an AVERROR < 0 on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_closep(AVIOContext** @s); + + /// Free the supplied IO context and everything associated with it. + /// Double pointer to the IO context. This function will write NULL into s. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_context_free(AVIOContext** @s); + + /// Iterate through names of available protocols. + /// A private pointer representing current protocol. It must be a pointer to NULL on first iteration and will be updated by successive calls to avio_enum_protocols. + /// If set to 1, iterate over output protocols, otherwise over input protocols. + /// A static string containing the name of current protocol or NULL + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avio_enum_protocols(void** @opaque, int @output); + + /// Similar to feof() but also returns nonzero on read errors. + /// non zero if and only if at end of file or a read error happened when reading. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_feof(AVIOContext* @s); + + /// Return the name of the protocol that will handle the passed URL. + /// Name of the protocol or NULL. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avio_find_protocol_name( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url); + + /// Force flushing of buffered data. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_flush(AVIOContext* @s); + + /// Free entry allocated by avio_read_dir(). + /// entry to be freed. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_free_directory_entry(AVIODirEntry** @entry); + + /// Return the written size and a pointer to the buffer. The AVIOContext stream is left intact. The buffer must NOT be freed. No padding is added to the buffer. + /// IO context + /// pointer to a byte buffer + /// the length of the byte buffer + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_get_dyn_buf(AVIOContext* @s, byte** @pbuffer); + + /// Read a string from pb into buf. The reading will terminate when either a NULL character was encountered, maxlen bytes have been read, or nothing more can be read from pb. The result is guaranteed to be NULL-terminated, it will be truncated if buf is too small. Note that the string is not interpreted or validated in any way, it might get truncated in the middle of a sequence for multi-byte encodings. + /// number of bytes read (is always < = maxlen). If reading ends on EOF or error, the return value will be one more than bytes actually read. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_get_str(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_get_str16be(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen); + + /// Read a UTF-16 string from pb and convert it to UTF-8. The reading will terminate when either a null or invalid character was encountered or maxlen bytes have been read. + /// number of bytes read (is always < = maxlen) + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_get_str16le(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen); + + /// Perform one step of the protocol handshake to accept a new client. This function must be called on a client returned by avio_accept() before using it as a read/write context. It is separate from avio_accept() because it may block. A step of the handshake is defined by places where the application may decide to change the proceedings. For example, on a protocol with a request header and a reply header, each one can constitute a step because the application may use the parameters from the request to change parameters in the reply; or each individual chunk of the request can constitute a step. If the handshake is already finished, avio_handshake() does nothing and returns 0 immediately. + /// the client context to perform the handshake on + /// 0 on a complete and successful handshake > 0 if the handshake progressed, but is not complete < 0 for an AVERROR code + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_handshake(AVIOContext* @c); + + /// Create and initialize a AVIOContext for accessing the resource indicated by url. + /// Used to return the pointer to the created AVIOContext. In case of failure the pointed to value is set to NULL. + /// resource to access + /// flags which control how the resource indicated by url is to be opened + /// >= 0 in case of success, a negative value corresponding to an AVERROR code in case of failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_open(AVIOContext** @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, int @flags); + + /// Open directory for reading. + /// directory read context. Pointer to a NULL pointer must be passed. + /// directory to be listed. + /// A dictionary filled with protocol-private options. On return this parameter will be destroyed and replaced with a dictionary containing options that were not found. May be NULL. + /// >=0 on success or negative on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_open_dir(AVIODirContext** @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, AVDictionary** @options); + + /// Open a write only memory stream. + /// new IO context + /// zero if no error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_open_dyn_buf(AVIOContext** @s); + + /// Create and initialize a AVIOContext for accessing the resource indicated by url. + /// Used to return the pointer to the created AVIOContext. In case of failure the pointed to value is set to NULL. + /// resource to access + /// flags which control how the resource indicated by url is to be opened + /// an interrupt callback to be used at the protocols level + /// A dictionary filled with protocol-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. + /// >= 0 in case of success, a negative value corresponding to an AVERROR code in case of failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_open2(AVIOContext** @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, int @flags, AVIOInterruptCB* @int_cb, AVDictionary** @options); + + /// Pause and resume playing - only meaningful if using a network streaming protocol (e.g. MMS). + /// IO context from which to call the read_pause function pointer + /// 1 for pause, 0 for resume + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_pause(AVIOContext* @h, int @pause); + + /// Write a NULL terminated array of strings to the context. Usually you don't need to use this function directly but its macro wrapper, avio_print. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_print_string_array(AVIOContext* @s, byte*[] @strings); + + /// Writes a formatted string to the context. + /// number of bytes written, < 0 on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_printf(AVIOContext* @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt); + + /// Get AVClass by names of available protocols. + /// A AVClass of input protocol name or NULL + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* avio_protocol_get_class( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + + /// Write a NULL-terminated string. + /// number of bytes written. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_put_str(AVIOContext* @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str); + + /// Convert an UTF-8 string to UTF-16BE and write it. + /// the AVIOContext + /// NULL-terminated UTF-8 string + /// number of bytes written. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_put_str16be(AVIOContext* @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str); + + /// Convert an UTF-8 string to UTF-16LE and write it. + /// the AVIOContext + /// NULL-terminated UTF-8 string + /// number of bytes written. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_put_str16le(AVIOContext* @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str); + + /// @{ + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_r8(AVIOContext* @s); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avio_rb16(AVIOContext* @s); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avio_rb24(AVIOContext* @s); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avio_rb32(AVIOContext* @s); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern ulong avio_rb64(AVIOContext* @s); + + /// Read size bytes from AVIOContext into buf. + /// number of bytes read or AVERROR + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_read(AVIOContext* @s, byte* @buf, int @size); + + /// Get next directory entry. + /// directory read context. + /// next entry or NULL when no more entries. + /// >=0 on success or negative on error. End of list is not considered an error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_read_dir(AVIODirContext* @s, AVIODirEntry** @next); + + /// Read size bytes from AVIOContext into buf. Unlike avio_read(), this is allowed to read fewer bytes than requested. The missing bytes can be read in the next call. This always tries to read at least 1 byte. Useful to reduce latency in certain cases. + /// number of bytes read or AVERROR + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_read_partial(AVIOContext* @s, byte* @buf, int @size); + + /// Read contents of h into print buffer, up to max_size bytes, or up to EOF. + /// 0 for success (max_size bytes read or EOF reached), negative error code otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_read_to_bprint(AVIOContext* @h, AVBPrint* @pb, ulong @max_size); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avio_rl16(AVIOContext* @s); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avio_rl24(AVIOContext* @s); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avio_rl32(AVIOContext* @s); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern ulong avio_rl64(AVIOContext* @s); + + /// fseek() equivalent for AVIOContext. + /// new position or AVERROR. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern long avio_seek(AVIOContext* @s, long @offset, int @whence); + + /// Seek to a given timestamp relative to some component stream. Only meaningful if using a network streaming protocol (e.g. MMS.). + /// IO context from which to call the seek function pointers + /// The stream index that the timestamp is relative to. If stream_index is (-1) the timestamp should be in AV_TIME_BASE units from the beginning of the presentation. If a stream_index >= 0 is used and the protocol does not support seeking based on component streams, the call will fail. + /// timestamp in AVStream.time_base units or if there is no stream specified then in AV_TIME_BASE units. + /// Optional combination of AVSEEK_FLAG_BACKWARD, AVSEEK_FLAG_BYTE and AVSEEK_FLAG_ANY. The protocol may silently ignore AVSEEK_FLAG_BACKWARD and AVSEEK_FLAG_ANY, but AVSEEK_FLAG_BYTE will fail if used and not supported. + /// >= 0 on success + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern long avio_seek_time(AVIOContext* @h, int @stream_index, long @timestamp, int @flags); + + /// Get the filesize. + /// filesize or AVERROR + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern long avio_size(AVIOContext* @s); + + /// Skip given number of bytes forward + /// new position or AVERROR. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern long avio_skip(AVIOContext* @s, long @offset); + + /// Writes a formatted string to the context taking a va_list. + /// number of bytes written, < 0 on error. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int avio_vprintf(AVIOContext* @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt, byte* @ap); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_w8(AVIOContext* @s, int @b); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_wb16(AVIOContext* @s, uint @val); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_wb24(AVIOContext* @s, uint @val); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_wb32(AVIOContext* @s, uint @val); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_wb64(AVIOContext* @s, ulong @val); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_wl16(AVIOContext* @s, uint @val); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_wl24(AVIOContext* @s, uint @val); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_wl32(AVIOContext* @s, uint @val); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_wl64(AVIOContext* @s, ulong @val); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_write(AVIOContext* @s, byte* @buf, int @size); + + /// Mark the written bytestream as a specific type. + /// the stream time the current bytestream pos corresponds to (in AV_TIME_BASE units), or AV_NOPTS_VALUE if unknown or not applicable + /// the kind of data written starting at the current pos + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avio_write_marker(AVIOContext* @s, long @time, AVIODataMarkerType @type); + + /// Free all allocated data in the given subtitle struct. + /// AVSubtitle to free. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void avsubtitle_free(AVSubtitle* @sub); + + /// Return the libavutil build-time configuration. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avutil_configuration(); + + /// Return the libavutil license. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string avutil_license(); + + /// Return the LIBAVUTIL_VERSION_INT constant. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint avutil_version(); + + /// Return the libpostproc build-time configuration. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string postproc_configuration(); + + /// Return the libpostproc license. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string postproc_license(); + + /// Return the LIBPOSTPROC_VERSION_INT constant. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint postproc_version(); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void pp_free_context(void* @ppContext); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void pp_free_mode(void* @mode); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void* pp_get_context(int @width, int @height, int @flags); + + /// Return a pp_mode or NULL if an error occurred. + /// the string after "-pp" on the command line + /// a number from 0 to PP_QUALITY_MAX + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void* pp_get_mode_by_name_and_quality( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @quality); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void pp_postprocess(in byte_ptr3 @src, in int3 @srcStride, ref byte_ptr3 @dst, in int3 @dstStride, int @horizontalSize, int @verticalSize, sbyte* @QP_store, int @QP_stride, void* @mode, void* @ppContext, int @pict_type); + + /// Allocate SwrContext. + /// NULL on error, allocated context otherwise + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern SwrContext* swr_alloc(); + + /// Allocate SwrContext if needed and set/reset common parameters. + /// existing Swr context if available, or NULL if not + /// output channel layout (AV_CH_LAYOUT_*) + /// output sample format (AV_SAMPLE_FMT_*). + /// output sample rate (frequency in Hz) + /// input channel layout (AV_CH_LAYOUT_*) + /// input sample format (AV_SAMPLE_FMT_*). + /// input sample rate (frequency in Hz) + /// logging level offset + /// parent logging context, can be NULL + /// NULL on error, allocated context otherwise + [Obsolete("use ")] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern SwrContext* swr_alloc_set_opts(SwrContext* @s, long @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, long @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx); + + /// Allocate SwrContext if needed and set/reset common parameters. + /// Pointer to an existing Swr context if available, or to NULL if not. On success, *ps will be set to the allocated context. + /// output channel layout (e.g. AV_CHANNEL_LAYOUT_*) + /// output sample format (AV_SAMPLE_FMT_*). + /// output sample rate (frequency in Hz) + /// input channel layout (e.g. AV_CHANNEL_LAYOUT_*) + /// input sample format (AV_SAMPLE_FMT_*). + /// input sample rate (frequency in Hz) + /// logging level offset + /// parent logging context, can be NULL + /// 0 on success, a negative AVERROR code on error. On error, the Swr context is freed and *ps set to NULL. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_alloc_set_opts2(SwrContext** @ps, AVChannelLayout* @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, AVChannelLayout* @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx); + + /// Generate a channel mixing matrix. + /// input channel layout + /// output channel layout + /// mix level for the center channel + /// mix level for the surround channel(s) + /// mix level for the low-frequency effects channel + /// if 1.0, coefficients will be normalized to prevent overflow. if INT_MAX, coefficients will not be normalized. + /// mixing coefficients; matrix[i + stride * o] is the weight of input channel i in output channel o. + /// distance between adjacent input channels in the matrix array + /// matrixed stereo downmix mode (e.g. dplii) + /// parent logging context, can be NULL + /// 0 on success, negative AVERROR code on failure + [Obsolete("use ")] + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_build_matrix(ulong @in_layout, ulong @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @rematrix_maxval, double @rematrix_volume, double* @matrix, int @stride, AVMatrixEncoding @matrix_encoding, void* @log_ctx); + + /// Generate a channel mixing matrix. + /// input channel layout + /// output channel layout + /// mix level for the center channel + /// mix level for the surround channel(s) + /// mix level for the low-frequency effects channel + /// mixing coefficients; matrix[i + stride * o] is the weight of input channel i in output channel o. + /// distance between adjacent input channels in the matrix array + /// matrixed stereo downmix mode (e.g. dplii) + /// 0 on success, negative AVERROR code on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_build_matrix2(AVChannelLayout* @in_layout, AVChannelLayout* @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @maxval, double @rematrix_volume, double* @matrix, long @stride, AVMatrixEncoding @matrix_encoding, void* @log_context); + + /// Closes the context so that swr_is_initialized() returns 0. + /// Swr context to be closed + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void swr_close(SwrContext* @s); + + /// Configure or reconfigure the SwrContext using the information provided by the AVFrames. + /// audio resample context + /// 0 on success, AVERROR on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_config_frame(SwrContext* @swr, AVFrame* @out, AVFrame* @in); + + /// Convert audio. + /// allocated Swr context, with parameters set + /// output buffers, only the first one need be set in case of packed audio + /// amount of space available for output in samples per channel + /// input buffers, only the first one need to be set in case of packed audio + /// number of input samples available in one channel + /// number of samples output per channel, negative value on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_convert(SwrContext* @s, byte** @out, int @out_count, byte** @in, int @in_count); + + /// Convert the samples in the input AVFrame and write them to the output AVFrame. + /// audio resample context + /// output AVFrame + /// input AVFrame + /// 0 on success, AVERROR on failure or nonmatching configuration. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_convert_frame(SwrContext* @swr, AVFrame* @output, AVFrame* @input); + + /// Drops the specified number of output samples. + /// allocated Swr context + /// number of samples to be dropped + /// >= 0 on success, or a negative AVERROR code on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_drop_output(SwrContext* @s, int @count); + + /// Free the given SwrContext and set the pointer to NULL. + /// a pointer to a pointer to Swr context + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void swr_free(SwrContext** @s); + + /// Get the AVClass for SwrContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + /// the AVClass of SwrContext + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* swr_get_class(); + + /// Gets the delay the next input sample will experience relative to the next output sample. + /// swr context + /// timebase in which the returned delay will be: + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern long swr_get_delay(SwrContext* @s, long @base); + + /// Find an upper bound on the number of samples that the next swr_convert call will output, if called with in_samples of input samples. This depends on the internal state, and anything changing the internal state (like further swr_convert() calls) will may change the number of samples swr_get_out_samples() returns for the same number of input samples. + /// number of input samples. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_get_out_samples(SwrContext* @s, int @in_samples); + + /// Initialize context after user parameters have been set. + /// Swr context to initialize + /// AVERROR error code in case of failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_init(SwrContext* @s); + + /// Injects the specified number of silence samples. + /// allocated Swr context + /// number of samples to be dropped + /// >= 0 on success, or a negative AVERROR code on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_inject_silence(SwrContext* @s, int @count); + + /// Check whether an swr context has been initialized or not. + /// Swr context to check + /// positive if it has been initialized, 0 if not initialized + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_is_initialized(SwrContext* @s); + + /// Convert the next timestamp from input to output timestamps are in 1/(in_sample_rate * out_sample_rate) units. + /// the output timestamp for the next output sample + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern long swr_next_pts(SwrContext* @s, long @pts); + + /// Set a customized input channel mapping. + /// allocated Swr context, not yet initialized + /// customized input channel mapping (array of channel indexes, -1 for a muted channel) + /// >= 0 on success, or AVERROR error code in case of failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_set_channel_mapping(SwrContext* @s, int* @channel_map); + + /// Activate resampling compensation ("soft" compensation). This function is internally called when needed in swr_next_pts(). + /// allocated Swr context. If it is not initialized, or SWR_FLAG_RESAMPLE is not set, swr_init() is called with the flag set. + /// delta in PTS per sample + /// number of samples to compensate for + /// >= 0 on success, AVERROR error codes if: + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_set_compensation(SwrContext* @s, int @sample_delta, int @compensation_distance); + + /// Set a customized remix matrix. + /// allocated Swr context, not yet initialized + /// remix coefficients; matrix[i + stride * o] is the weight of input channel i in output channel o + /// offset between lines of the matrix + /// >= 0 on success, or AVERROR error code in case of failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int swr_set_matrix(SwrContext* @s, double* @matrix, int @stride); + + /// Return the swr build-time configuration. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string swresample_configuration(); + + /// Return the swr license. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string swresample_license(); + + /// Return the LIBSWRESAMPLE_VERSION_INT constant. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint swresample_version(); + + /// Allocate an empty SwsContext. This must be filled and passed to sws_init_context(). For filling see AVOptions, options.c and sws_setColorspaceDetails(). + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern SwsContext* sws_alloc_context(); + + /// Allocate and return an uninitialized vector with length coefficients. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern SwsVector* sws_allocVec(int @length); + + /// Convert an 8-bit paletted frame into a frame with a color depth of 24 bits. + /// source frame buffer + /// destination frame buffer + /// number of pixels to convert + /// array with [256] entries, which must match color arrangement (RGB or BGR) of src + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void sws_convertPalette8ToPacked24(byte* @src, byte* @dst, int @num_pixels, byte* @palette); + + /// Convert an 8-bit paletted frame into a frame with a color depth of 32 bits. + /// source frame buffer + /// destination frame buffer + /// number of pixels to convert + /// array with [256] entries, which must match color arrangement (RGB or BGR) of src + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void sws_convertPalette8ToPacked32(byte* @src, byte* @dst, int @num_pixels, byte* @palette); + + /// Finish the scaling process for a pair of source/destination frames previously submitted with sws_frame_start(). Must be called after all sws_send_slice() and sws_receive_slice() calls are done, before any new sws_frame_start() calls. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void sws_frame_end(SwsContext* @c); + + /// Initialize the scaling process for a given pair of source/destination frames. Must be called before any calls to sws_send_slice() and sws_receive_slice(). + /// The destination frame. + /// The source frame. The data buffers must be allocated, but the frame data does not have to be ready at this point. Data availability is then signalled by sws_send_slice(). + /// 0 on success, a negative AVERROR code on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_frame_start(SwsContext* @c, AVFrame* @dst, AVFrame* @src); + + /// Free the swscaler context swsContext. If swsContext is NULL, then does nothing. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void sws_freeContext(SwsContext* @swsContext); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void sws_freeFilter(SwsFilter* @filter); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void sws_freeVec(SwsVector* @a); + + /// Get the AVClass for swsContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern AVClass* sws_get_class(); + + /// Check if context can be reused, otherwise reallocate a new one. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern SwsContext* sws_getCachedContext(SwsContext* @context, int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param); + + /// Return a pointer to yuv<->rgb coefficients for the given colorspace suitable for sws_setColorspaceDetails(). + /// One of the SWS_CS_* macros. If invalid, SWS_CS_DEFAULT is used. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int* sws_getCoefficients(int @colorspace); + + /// #if LIBSWSCALE_VERSION_MAJOR > 6 + /// negative error code on error, non negative otherwise #else + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_getColorspaceDetails(SwsContext* @c, int** @inv_table, int* @srcRange, int** @table, int* @dstRange, int* @brightness, int* @contrast, int* @saturation); + + /// Allocate and return an SwsContext. You need it to perform scaling/conversion operations using sws_scale(). + /// the width of the source image + /// the height of the source image + /// the source image format + /// the width of the destination image + /// the height of the destination image + /// the destination image format + /// specify which algorithm and options to use for rescaling + /// extra parameters to tune the used scaler For SWS_BICUBIC param[0] and [1] tune the shape of the basis function, param[0] tunes f(1) and param[1] f´(1) For SWS_GAUSS param[0] tunes the exponent and thus cutoff frequency For SWS_LANCZOS param[0] tunes the width of the window function + /// a pointer to an allocated context, or NULL in case of error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern SwsContext* sws_getContext(int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param); + + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern SwsFilter* sws_getDefaultFilter(float @lumaGBlur, float @chromaGBlur, float @lumaSharpen, float @chromaSharpen, float @chromaHShift, float @chromaVShift, int @verbose); + + /// Return a normalized Gaussian curve used to filter stuff quality = 3 is high quality, lower is lower quality. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern SwsVector* sws_getGaussianVec(double @variance, double @quality); + + /// Initialize the swscaler context sws_context. + /// zero or positive value on success, a negative value on error + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_init_context(SwsContext* @sws_context, SwsFilter* @srcFilter, SwsFilter* @dstFilter); + + /// Returns a positive value if an endianness conversion for pix_fmt is supported, 0 otherwise. + /// the pixel format + /// a positive value if an endianness conversion for pix_fmt is supported, 0 otherwise. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_isSupportedEndiannessConversion(AVPixelFormat @pix_fmt); + + /// Return a positive value if pix_fmt is a supported input format, 0 otherwise. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_isSupportedInput(AVPixelFormat @pix_fmt); + + /// Return a positive value if pix_fmt is a supported output format, 0 otherwise. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_isSupportedOutput(AVPixelFormat @pix_fmt); + + /// Scale all the coefficients of a so that their sum equals height. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void sws_normalizeVec(SwsVector* @a, double @height); + + /// Request a horizontal slice of the output data to be written into the frame previously provided to sws_frame_start(). + /// first row of the slice; must be a multiple of sws_receive_slice_alignment() + /// number of rows in the slice; must be a multiple of sws_receive_slice_alignment(), except for the last slice (i.e. when slice_start+slice_height is equal to output frame height) + /// a non-negative number if the data was successfully written into the output AVERROR(EAGAIN) if more input data needs to be provided before the output can be produced another negative AVERROR code on other kinds of scaling failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_receive_slice(SwsContext* @c, uint @slice_start, uint @slice_height); + + /// Returns alignment required for output slices requested with sws_receive_slice(). Slice offsets and sizes passed to sws_receive_slice() must be multiples of the value returned from this function. + /// alignment required for output slices requested with sws_receive_slice(). Slice offsets and sizes passed to sws_receive_slice() must be multiples of the value returned from this function. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint sws_receive_slice_alignment(SwsContext* @c); + + /// Scale the image slice in srcSlice and put the resulting scaled slice in the image in dst. A slice is a sequence of consecutive rows in an image. + /// the scaling context previously created with sws_getContext() + /// the array containing the pointers to the planes of the source slice + /// the array containing the strides for each plane of the source image + /// the position in the source image of the slice to process, that is the number (counted starting from zero) in the image of the first row of the slice + /// the height of the source slice, that is the number of rows in the slice + /// the array containing the pointers to the planes of the destination image + /// the array containing the strides for each plane of the destination image + /// the height of the output slice + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_scale(SwsContext* @c, byte*[] @srcSlice, int[] @srcStride, int @srcSliceY, int @srcSliceH, byte*[] @dst, int[] @dstStride); + + /// Scale source data from src and write the output to dst. + /// The destination frame. See documentation for sws_frame_start() for more details. + /// The source frame. + /// 0 on success, a negative AVERROR code on failure + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_scale_frame(SwsContext* @c, AVFrame* @dst, AVFrame* @src); + + /// Scale all the coefficients of a by the scalar value. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern void sws_scaleVec(SwsVector* @a, double @scalar); + + /// Indicate that a horizontal slice of input data is available in the source frame previously provided to sws_frame_start(). The slices may be provided in any order, but may not overlap. For vertically subsampled pixel formats, the slices must be aligned according to subsampling. + /// first row of the slice + /// number of rows in the slice + /// a non-negative number on success, a negative AVERROR code on failure. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_send_slice(SwsContext* @c, uint @slice_start, uint @slice_height); + + /// Returns negative error code on error, non negative otherwise #else Returns -1 if not supported #endif + /// the yuv2rgb coefficients describing the input yuv space, normally ff_yuv2rgb_coeffs[x] + /// flag indicating the while-black range of the input (1=jpeg / 0=mpeg) + /// the yuv2rgb coefficients describing the output yuv space, normally ff_yuv2rgb_coeffs[x] + /// flag indicating the while-black range of the output (1=jpeg / 0=mpeg) + /// 16.16 fixed point brightness correction + /// 16.16 fixed point contrast correction + /// 16.16 fixed point saturation correction #if LIBSWSCALE_VERSION_MAJOR > 6 + /// negative error code on error, non negative otherwise #else + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern int sws_setColorspaceDetails(SwsContext* @c, in int4 @inv_table, int @srcRange, in int4 @table, int @dstRange, int @brightness, int @contrast, int @saturation); + + /// Return the libswscale build-time configuration. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string swscale_configuration(); + + /// Return the libswscale license. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public static extern string swscale_license(); + + /// Color conversion and scaling library. + [DllImport("__Internal", CallingConvention = CallingConvention.Cdecl)] + public static extern uint swscale_version(); + + public unsafe static void Initialize() + { + vectors.av_abuffersink_params_alloc = av_abuffersink_params_alloc; + vectors.av_add_index_entry = av_add_index_entry; + vectors.av_add_q = av_add_q; + vectors.av_add_stable = av_add_stable; + vectors.av_append_packet = av_append_packet; + vectors.av_audio_fifo_alloc = av_audio_fifo_alloc; + vectors.av_audio_fifo_drain = av_audio_fifo_drain; + vectors.av_audio_fifo_free = av_audio_fifo_free; + vectors.av_audio_fifo_peek = av_audio_fifo_peek; + vectors.av_audio_fifo_peek_at = av_audio_fifo_peek_at; + vectors.av_audio_fifo_read = av_audio_fifo_read; + vectors.av_audio_fifo_realloc = av_audio_fifo_realloc; + vectors.av_audio_fifo_reset = av_audio_fifo_reset; + vectors.av_audio_fifo_size = av_audio_fifo_size; + vectors.av_audio_fifo_space = av_audio_fifo_space; + vectors.av_audio_fifo_write = av_audio_fifo_write; + vectors.av_bprint_channel_layout = av_bprint_channel_layout; + vectors.av_bsf_alloc = av_bsf_alloc; + vectors.av_bsf_flush = av_bsf_flush; + vectors.av_bsf_free = av_bsf_free; + vectors.av_bsf_get_by_name = av_bsf_get_by_name; + vectors.av_bsf_get_class = av_bsf_get_class; + vectors.av_bsf_get_null_filter = av_bsf_get_null_filter; + vectors.av_bsf_init = av_bsf_init; + vectors.av_bsf_iterate = av_bsf_iterate; + vectors.av_bsf_list_alloc = av_bsf_list_alloc; + vectors.av_bsf_list_append = av_bsf_list_append; + vectors.av_bsf_list_append2 = av_bsf_list_append2; + vectors.av_bsf_list_finalize = av_bsf_list_finalize; + vectors.av_bsf_list_free = av_bsf_list_free; + vectors.av_bsf_list_parse_str = av_bsf_list_parse_str; + vectors.av_bsf_receive_packet = av_bsf_receive_packet; + vectors.av_bsf_send_packet = av_bsf_send_packet; + vectors.av_buffer_alloc = av_buffer_alloc; + vectors.av_buffer_allocz = av_buffer_allocz; + vectors.av_buffer_create = av_buffer_create; + vectors.av_buffer_default_free = av_buffer_default_free; + vectors.av_buffer_get_opaque = av_buffer_get_opaque; + vectors.av_buffer_get_ref_count = av_buffer_get_ref_count; + vectors.av_buffer_is_writable = av_buffer_is_writable; + vectors.av_buffer_make_writable = av_buffer_make_writable; + vectors.av_buffer_pool_buffer_get_opaque = av_buffer_pool_buffer_get_opaque; + vectors.av_buffer_pool_get = av_buffer_pool_get; + vectors.av_buffer_pool_init = av_buffer_pool_init; + vectors.av_buffer_pool_init2 = av_buffer_pool_init2; + vectors.av_buffer_pool_uninit = av_buffer_pool_uninit; + vectors.av_buffer_realloc = av_buffer_realloc; + vectors.av_buffer_ref = av_buffer_ref; + vectors.av_buffer_replace = av_buffer_replace; + vectors.av_buffer_unref = av_buffer_unref; + vectors.av_buffersink_get_ch_layout = av_buffersink_get_ch_layout; + vectors.av_buffersink_get_channel_layout = av_buffersink_get_channel_layout; + vectors.av_buffersink_get_channels = av_buffersink_get_channels; + vectors.av_buffersink_get_format = av_buffersink_get_format; + vectors.av_buffersink_get_frame = av_buffersink_get_frame; + vectors.av_buffersink_get_frame_flags = av_buffersink_get_frame_flags; + vectors.av_buffersink_get_frame_rate = av_buffersink_get_frame_rate; + vectors.av_buffersink_get_h = av_buffersink_get_h; + vectors.av_buffersink_get_hw_frames_ctx = av_buffersink_get_hw_frames_ctx; + vectors.av_buffersink_get_sample_aspect_ratio = av_buffersink_get_sample_aspect_ratio; + vectors.av_buffersink_get_sample_rate = av_buffersink_get_sample_rate; + vectors.av_buffersink_get_samples = av_buffersink_get_samples; + vectors.av_buffersink_get_time_base = av_buffersink_get_time_base; + vectors.av_buffersink_get_type = av_buffersink_get_type; + vectors.av_buffersink_get_w = av_buffersink_get_w; + vectors.av_buffersink_params_alloc = av_buffersink_params_alloc; + vectors.av_buffersink_set_frame_size = av_buffersink_set_frame_size; + vectors.av_buffersrc_add_frame = av_buffersrc_add_frame; + vectors.av_buffersrc_add_frame_flags = av_buffersrc_add_frame_flags; + vectors.av_buffersrc_close = av_buffersrc_close; + vectors.av_buffersrc_get_nb_failed_requests = av_buffersrc_get_nb_failed_requests; + vectors.av_buffersrc_parameters_alloc = av_buffersrc_parameters_alloc; + vectors.av_buffersrc_parameters_set = av_buffersrc_parameters_set; + vectors.av_buffersrc_write_frame = av_buffersrc_write_frame; + vectors.av_calloc = av_calloc; + vectors.av_channel_description = av_channel_description; + vectors.av_channel_description_bprint = av_channel_description_bprint; + vectors.av_channel_from_string = av_channel_from_string; + vectors.av_channel_layout_channel_from_index = av_channel_layout_channel_from_index; + vectors.av_channel_layout_channel_from_string = av_channel_layout_channel_from_string; + vectors.av_channel_layout_check = av_channel_layout_check; + vectors.av_channel_layout_compare = av_channel_layout_compare; + vectors.av_channel_layout_copy = av_channel_layout_copy; + vectors.av_channel_layout_default = av_channel_layout_default; + vectors.av_channel_layout_describe = av_channel_layout_describe; + vectors.av_channel_layout_describe_bprint = av_channel_layout_describe_bprint; + vectors.av_channel_layout_extract_channel = av_channel_layout_extract_channel; + vectors.av_channel_layout_from_mask = av_channel_layout_from_mask; + vectors.av_channel_layout_from_string = av_channel_layout_from_string; + vectors.av_channel_layout_index_from_channel = av_channel_layout_index_from_channel; + vectors.av_channel_layout_index_from_string = av_channel_layout_index_from_string; + vectors.av_channel_layout_standard = av_channel_layout_standard; + vectors.av_channel_layout_subset = av_channel_layout_subset; + vectors.av_channel_layout_uninit = av_channel_layout_uninit; + vectors.av_channel_name = av_channel_name; + vectors.av_channel_name_bprint = av_channel_name_bprint; + vectors.av_chroma_location_from_name = av_chroma_location_from_name; + vectors.av_chroma_location_name = av_chroma_location_name; + vectors.av_codec_get_id = av_codec_get_id; + vectors.av_codec_get_tag = av_codec_get_tag; + vectors.av_codec_get_tag2 = av_codec_get_tag2; + vectors.av_codec_is_decoder = av_codec_is_decoder; + vectors.av_codec_is_encoder = av_codec_is_encoder; + vectors.av_codec_iterate = av_codec_iterate; + vectors.av_color_primaries_from_name = av_color_primaries_from_name; + vectors.av_color_primaries_name = av_color_primaries_name; + vectors.av_color_range_from_name = av_color_range_from_name; + vectors.av_color_range_name = av_color_range_name; + vectors.av_color_space_from_name = av_color_space_from_name; + vectors.av_color_space_name = av_color_space_name; + vectors.av_color_transfer_from_name = av_color_transfer_from_name; + vectors.av_color_transfer_name = av_color_transfer_name; + vectors.av_compare_mod = av_compare_mod; + vectors.av_compare_ts = av_compare_ts; + vectors.av_content_light_metadata_alloc = av_content_light_metadata_alloc; + vectors.av_content_light_metadata_create_side_data = av_content_light_metadata_create_side_data; + vectors.av_cpb_properties_alloc = av_cpb_properties_alloc; + vectors.av_cpu_count = av_cpu_count; + vectors.av_cpu_force_count = av_cpu_force_count; + vectors.av_cpu_max_align = av_cpu_max_align; + vectors.av_d2q = av_d2q; + vectors.av_d3d11va_alloc_context = av_d3d11va_alloc_context; + vectors.av_default_get_category = av_default_get_category; + vectors.av_default_item_name = av_default_item_name; + vectors.av_demuxer_iterate = av_demuxer_iterate; + vectors.av_dict_copy = av_dict_copy; + vectors.av_dict_count = av_dict_count; + vectors.av_dict_free = av_dict_free; + vectors.av_dict_get = av_dict_get; + vectors.av_dict_get_string = av_dict_get_string; + vectors.av_dict_parse_string = av_dict_parse_string; + vectors.av_dict_set = av_dict_set; + vectors.av_dict_set_int = av_dict_set_int; + vectors.av_disposition_from_string = av_disposition_from_string; + vectors.av_disposition_to_string = av_disposition_to_string; + vectors.av_div_q = av_div_q; + vectors.av_dump_format = av_dump_format; + vectors.av_dynamic_hdr_plus_alloc = av_dynamic_hdr_plus_alloc; + vectors.av_dynamic_hdr_plus_create_side_data = av_dynamic_hdr_plus_create_side_data; + vectors.av_dynarray_add = av_dynarray_add; + vectors.av_dynarray_add_nofree = av_dynarray_add_nofree; + vectors.av_dynarray2_add = av_dynarray2_add; + vectors.av_fast_malloc = av_fast_malloc; + vectors.av_fast_mallocz = av_fast_mallocz; + vectors.av_fast_padded_malloc = av_fast_padded_malloc; + vectors.av_fast_padded_mallocz = av_fast_padded_mallocz; + vectors.av_fast_realloc = av_fast_realloc; + vectors.av_file_map = av_file_map; + vectors.av_file_unmap = av_file_unmap; + vectors.av_filename_number_test = av_filename_number_test; + vectors.av_filter_iterate = av_filter_iterate; + vectors.av_find_best_pix_fmt_of_2 = av_find_best_pix_fmt_of_2; + vectors.av_find_best_stream = av_find_best_stream; + vectors.av_find_default_stream_index = av_find_default_stream_index; + vectors.av_find_input_format = av_find_input_format; + vectors.av_find_nearest_q_idx = av_find_nearest_q_idx; + vectors.av_find_program_from_stream = av_find_program_from_stream; + vectors.av_fmt_ctx_get_duration_estimation_method = av_fmt_ctx_get_duration_estimation_method; + vectors.av_fopen_utf8 = av_fopen_utf8; + vectors.av_force_cpu_flags = av_force_cpu_flags; + vectors.av_format_inject_global_side_data = av_format_inject_global_side_data; + vectors.av_fourcc_make_string = av_fourcc_make_string; + vectors.av_frame_alloc = av_frame_alloc; + vectors.av_frame_apply_cropping = av_frame_apply_cropping; + vectors.av_frame_clone = av_frame_clone; + vectors.av_frame_copy = av_frame_copy; + vectors.av_frame_copy_props = av_frame_copy_props; + vectors.av_frame_free = av_frame_free; + vectors.av_frame_get_buffer = av_frame_get_buffer; + vectors.av_frame_get_plane_buffer = av_frame_get_plane_buffer; + vectors.av_frame_get_side_data = av_frame_get_side_data; + vectors.av_frame_is_writable = av_frame_is_writable; + vectors.av_frame_make_writable = av_frame_make_writable; + vectors.av_frame_move_ref = av_frame_move_ref; + vectors.av_frame_new_side_data = av_frame_new_side_data; + vectors.av_frame_new_side_data_from_buf = av_frame_new_side_data_from_buf; + vectors.av_frame_ref = av_frame_ref; + vectors.av_frame_remove_side_data = av_frame_remove_side_data; + vectors.av_frame_side_data_name = av_frame_side_data_name; + vectors.av_frame_unref = av_frame_unref; + vectors.av_free = av_free; + vectors.av_freep = av_freep; + vectors.av_gcd = av_gcd; + vectors.av_gcd_q = av_gcd_q; + vectors.av_get_alt_sample_fmt = av_get_alt_sample_fmt; + vectors.av_get_audio_frame_duration = av_get_audio_frame_duration; + vectors.av_get_audio_frame_duration2 = av_get_audio_frame_duration2; + vectors.av_get_bits_per_pixel = av_get_bits_per_pixel; + vectors.av_get_bits_per_sample = av_get_bits_per_sample; + vectors.av_get_bytes_per_sample = av_get_bytes_per_sample; + vectors.av_get_channel_description = av_get_channel_description; + vectors.av_get_channel_layout = av_get_channel_layout; + vectors.av_get_channel_layout_channel_index = av_get_channel_layout_channel_index; + vectors.av_get_channel_layout_nb_channels = av_get_channel_layout_nb_channels; + vectors.av_get_channel_layout_string = av_get_channel_layout_string; + vectors.av_get_channel_name = av_get_channel_name; + vectors.av_get_colorspace_name = av_get_colorspace_name; + vectors.av_get_cpu_flags = av_get_cpu_flags; + vectors.av_get_default_channel_layout = av_get_default_channel_layout; + vectors.av_get_exact_bits_per_sample = av_get_exact_bits_per_sample; + vectors.av_get_extended_channel_layout = av_get_extended_channel_layout; + vectors.av_get_frame_filename = av_get_frame_filename; + vectors.av_get_frame_filename2 = av_get_frame_filename2; + vectors.av_get_media_type_string = av_get_media_type_string; + vectors.av_get_output_timestamp = av_get_output_timestamp; + vectors.av_get_packed_sample_fmt = av_get_packed_sample_fmt; + vectors.av_get_packet = av_get_packet; + vectors.av_get_padded_bits_per_pixel = av_get_padded_bits_per_pixel; + vectors.av_get_pcm_codec = av_get_pcm_codec; + vectors.av_get_picture_type_char = av_get_picture_type_char; + vectors.av_get_pix_fmt = av_get_pix_fmt; + vectors.av_get_pix_fmt_loss = av_get_pix_fmt_loss; + vectors.av_get_pix_fmt_name = av_get_pix_fmt_name; + vectors.av_get_pix_fmt_string = av_get_pix_fmt_string; + vectors.av_get_planar_sample_fmt = av_get_planar_sample_fmt; + vectors.av_get_profile_name = av_get_profile_name; + vectors.av_get_sample_fmt = av_get_sample_fmt; + vectors.av_get_sample_fmt_name = av_get_sample_fmt_name; + vectors.av_get_sample_fmt_string = av_get_sample_fmt_string; + vectors.av_get_standard_channel_layout = av_get_standard_channel_layout; + vectors.av_get_time_base_q = av_get_time_base_q; + vectors.av_gettime = av_gettime; + vectors.av_gettime_relative = av_gettime_relative; + vectors.av_gettime_relative_is_monotonic = av_gettime_relative_is_monotonic; + vectors.av_grow_packet = av_grow_packet; + vectors.av_guess_codec = av_guess_codec; + vectors.av_guess_format = av_guess_format; + vectors.av_guess_frame_rate = av_guess_frame_rate; + vectors.av_guess_sample_aspect_ratio = av_guess_sample_aspect_ratio; + vectors.av_hex_dump = av_hex_dump; + vectors.av_hex_dump_log = av_hex_dump_log; + vectors.av_hwdevice_ctx_alloc = av_hwdevice_ctx_alloc; + vectors.av_hwdevice_ctx_create = av_hwdevice_ctx_create; + vectors.av_hwdevice_ctx_create_derived = av_hwdevice_ctx_create_derived; + vectors.av_hwdevice_ctx_create_derived_opts = av_hwdevice_ctx_create_derived_opts; + vectors.av_hwdevice_ctx_init = av_hwdevice_ctx_init; + vectors.av_hwdevice_find_type_by_name = av_hwdevice_find_type_by_name; + vectors.av_hwdevice_get_hwframe_constraints = av_hwdevice_get_hwframe_constraints; + vectors.av_hwdevice_get_type_name = av_hwdevice_get_type_name; + vectors.av_hwdevice_hwconfig_alloc = av_hwdevice_hwconfig_alloc; + vectors.av_hwdevice_iterate_types = av_hwdevice_iterate_types; + vectors.av_hwframe_constraints_free = av_hwframe_constraints_free; + vectors.av_hwframe_ctx_alloc = av_hwframe_ctx_alloc; + vectors.av_hwframe_ctx_create_derived = av_hwframe_ctx_create_derived; + vectors.av_hwframe_ctx_init = av_hwframe_ctx_init; + vectors.av_hwframe_get_buffer = av_hwframe_get_buffer; + vectors.av_hwframe_map = av_hwframe_map; + vectors.av_hwframe_transfer_data = av_hwframe_transfer_data; + vectors.av_hwframe_transfer_get_formats = av_hwframe_transfer_get_formats; + vectors.av_image_alloc = av_image_alloc; + vectors.av_image_check_sar = av_image_check_sar; + vectors.av_image_check_size = av_image_check_size; + vectors.av_image_check_size2 = av_image_check_size2; + vectors.av_image_copy = av_image_copy; + vectors.av_image_copy_plane = av_image_copy_plane; + vectors.av_image_copy_plane_uc_from = av_image_copy_plane_uc_from; + vectors.av_image_copy_to_buffer = av_image_copy_to_buffer; + vectors.av_image_copy_uc_from = av_image_copy_uc_from; + vectors.av_image_fill_arrays = av_image_fill_arrays; + vectors.av_image_fill_black = av_image_fill_black; + vectors.av_image_fill_linesizes = av_image_fill_linesizes; + vectors.av_image_fill_max_pixsteps = av_image_fill_max_pixsteps; + vectors.av_image_fill_plane_sizes = av_image_fill_plane_sizes; + vectors.av_image_fill_pointers = av_image_fill_pointers; + vectors.av_image_get_buffer_size = av_image_get_buffer_size; + vectors.av_image_get_linesize = av_image_get_linesize; + vectors.av_index_search_timestamp = av_index_search_timestamp; + vectors.av_init_packet = av_init_packet; + vectors.av_input_audio_device_next = av_input_audio_device_next; + vectors.av_input_video_device_next = av_input_video_device_next; + vectors.av_int_list_length_for_size = av_int_list_length_for_size; + vectors.av_interleaved_write_frame = av_interleaved_write_frame; + vectors.av_interleaved_write_uncoded_frame = av_interleaved_write_uncoded_frame; + vectors.av_log = av_log; + vectors.av_log_default_callback = av_log_default_callback; + vectors.av_log_format_line = av_log_format_line; + vectors.av_log_format_line2 = av_log_format_line2; + vectors.av_log_get_flags = av_log_get_flags; + vectors.av_log_get_level = av_log_get_level; + vectors.av_log_once = av_log_once; + vectors.av_log_set_callback = av_log_set_callback; + vectors.av_log_set_flags = av_log_set_flags; + vectors.av_log_set_level = av_log_set_level; + vectors.av_log2 = av_log2; + vectors.av_log2_16bit = av_log2_16bit; + vectors.av_malloc = av_malloc; + vectors.av_malloc_array = av_malloc_array; + vectors.av_mallocz = av_mallocz; + vectors.av_mallocz_array = av_mallocz_array; + vectors.av_mastering_display_metadata_alloc = av_mastering_display_metadata_alloc; + vectors.av_mastering_display_metadata_create_side_data = av_mastering_display_metadata_create_side_data; + vectors.av_match_ext = av_match_ext; + vectors.av_max_alloc = av_max_alloc; + vectors.av_memcpy_backptr = av_memcpy_backptr; + vectors.av_memdup = av_memdup; + vectors.av_mul_q = av_mul_q; + vectors.av_muxer_iterate = av_muxer_iterate; + vectors.av_nearer_q = av_nearer_q; + vectors.av_new_packet = av_new_packet; + vectors.av_new_program = av_new_program; + vectors.av_opt_child_class_iterate = av_opt_child_class_iterate; + vectors.av_opt_child_next = av_opt_child_next; + vectors.av_opt_copy = av_opt_copy; + vectors.av_opt_eval_double = av_opt_eval_double; + vectors.av_opt_eval_flags = av_opt_eval_flags; + vectors.av_opt_eval_float = av_opt_eval_float; + vectors.av_opt_eval_int = av_opt_eval_int; + vectors.av_opt_eval_int64 = av_opt_eval_int64; + vectors.av_opt_eval_q = av_opt_eval_q; + vectors.av_opt_find = av_opt_find; + vectors.av_opt_find2 = av_opt_find2; + vectors.av_opt_flag_is_set = av_opt_flag_is_set; + vectors.av_opt_free = av_opt_free; + vectors.av_opt_freep_ranges = av_opt_freep_ranges; + vectors.av_opt_get = av_opt_get; + vectors.av_opt_get_channel_layout = av_opt_get_channel_layout; + vectors.av_opt_get_chlayout = av_opt_get_chlayout; + vectors.av_opt_get_dict_val = av_opt_get_dict_val; + vectors.av_opt_get_double = av_opt_get_double; + vectors.av_opt_get_image_size = av_opt_get_image_size; + vectors.av_opt_get_int = av_opt_get_int; + vectors.av_opt_get_key_value = av_opt_get_key_value; + vectors.av_opt_get_pixel_fmt = av_opt_get_pixel_fmt; + vectors.av_opt_get_q = av_opt_get_q; + vectors.av_opt_get_sample_fmt = av_opt_get_sample_fmt; + vectors.av_opt_get_video_rate = av_opt_get_video_rate; + vectors.av_opt_is_set_to_default = av_opt_is_set_to_default; + vectors.av_opt_is_set_to_default_by_name = av_opt_is_set_to_default_by_name; + vectors.av_opt_next = av_opt_next; + vectors.av_opt_ptr = av_opt_ptr; + vectors.av_opt_query_ranges = av_opt_query_ranges; + vectors.av_opt_query_ranges_default = av_opt_query_ranges_default; + vectors.av_opt_serialize = av_opt_serialize; + vectors.av_opt_set = av_opt_set; + vectors.av_opt_set_bin = av_opt_set_bin; + vectors.av_opt_set_channel_layout = av_opt_set_channel_layout; + vectors.av_opt_set_chlayout = av_opt_set_chlayout; + vectors.av_opt_set_defaults = av_opt_set_defaults; + vectors.av_opt_set_defaults2 = av_opt_set_defaults2; + vectors.av_opt_set_dict = av_opt_set_dict; + vectors.av_opt_set_dict_val = av_opt_set_dict_val; + vectors.av_opt_set_dict2 = av_opt_set_dict2; + vectors.av_opt_set_double = av_opt_set_double; + vectors.av_opt_set_from_string = av_opt_set_from_string; + vectors.av_opt_set_image_size = av_opt_set_image_size; + vectors.av_opt_set_int = av_opt_set_int; + vectors.av_opt_set_pixel_fmt = av_opt_set_pixel_fmt; + vectors.av_opt_set_q = av_opt_set_q; + vectors.av_opt_set_sample_fmt = av_opt_set_sample_fmt; + vectors.av_opt_set_video_rate = av_opt_set_video_rate; + vectors.av_opt_show2 = av_opt_show2; + vectors.av_output_audio_device_next = av_output_audio_device_next; + vectors.av_output_video_device_next = av_output_video_device_next; + vectors.av_packet_add_side_data = av_packet_add_side_data; + vectors.av_packet_alloc = av_packet_alloc; + vectors.av_packet_clone = av_packet_clone; + vectors.av_packet_copy_props = av_packet_copy_props; + vectors.av_packet_free = av_packet_free; + vectors.av_packet_free_side_data = av_packet_free_side_data; + vectors.av_packet_from_data = av_packet_from_data; + vectors.av_packet_get_side_data = av_packet_get_side_data; + vectors.av_packet_make_refcounted = av_packet_make_refcounted; + vectors.av_packet_make_writable = av_packet_make_writable; + vectors.av_packet_move_ref = av_packet_move_ref; + vectors.av_packet_new_side_data = av_packet_new_side_data; + vectors.av_packet_pack_dictionary = av_packet_pack_dictionary; + vectors.av_packet_ref = av_packet_ref; + vectors.av_packet_rescale_ts = av_packet_rescale_ts; + vectors.av_packet_shrink_side_data = av_packet_shrink_side_data; + vectors.av_packet_side_data_name = av_packet_side_data_name; + vectors.av_packet_unpack_dictionary = av_packet_unpack_dictionary; + vectors.av_packet_unref = av_packet_unref; + vectors.av_parse_cpu_caps = av_parse_cpu_caps; + vectors.av_parser_close = av_parser_close; + vectors.av_parser_init = av_parser_init; + vectors.av_parser_iterate = av_parser_iterate; + vectors.av_parser_parse2 = av_parser_parse2; + vectors.av_pix_fmt_count_planes = av_pix_fmt_count_planes; + vectors.av_pix_fmt_desc_get = av_pix_fmt_desc_get; + vectors.av_pix_fmt_desc_get_id = av_pix_fmt_desc_get_id; + vectors.av_pix_fmt_desc_next = av_pix_fmt_desc_next; + vectors.av_pix_fmt_get_chroma_sub_sample = av_pix_fmt_get_chroma_sub_sample; + vectors.av_pix_fmt_swap_endianness = av_pix_fmt_swap_endianness; + vectors.av_pkt_dump_log2 = av_pkt_dump_log2; + vectors.av_pkt_dump2 = av_pkt_dump2; + vectors.av_probe_input_buffer = av_probe_input_buffer; + vectors.av_probe_input_buffer2 = av_probe_input_buffer2; + vectors.av_probe_input_format = av_probe_input_format; + vectors.av_probe_input_format2 = av_probe_input_format2; + vectors.av_probe_input_format3 = av_probe_input_format3; + vectors.av_program_add_stream_index = av_program_add_stream_index; + vectors.av_q2intfloat = av_q2intfloat; + vectors.av_read_frame = av_read_frame; + vectors.av_read_image_line = av_read_image_line; + vectors.av_read_image_line2 = av_read_image_line2; + vectors.av_read_pause = av_read_pause; + vectors.av_read_play = av_read_play; + vectors.av_realloc = av_realloc; + vectors.av_realloc_array = av_realloc_array; + vectors.av_realloc_f = av_realloc_f; + vectors.av_reallocp = av_reallocp; + vectors.av_reallocp_array = av_reallocp_array; + vectors.av_reduce = av_reduce; + vectors.av_rescale = av_rescale; + vectors.av_rescale_delta = av_rescale_delta; + vectors.av_rescale_q = av_rescale_q; + vectors.av_rescale_q_rnd = av_rescale_q_rnd; + vectors.av_rescale_rnd = av_rescale_rnd; + vectors.av_sample_fmt_is_planar = av_sample_fmt_is_planar; + vectors.av_samples_alloc = av_samples_alloc; + vectors.av_samples_alloc_array_and_samples = av_samples_alloc_array_and_samples; + vectors.av_samples_copy = av_samples_copy; + vectors.av_samples_fill_arrays = av_samples_fill_arrays; + vectors.av_samples_get_buffer_size = av_samples_get_buffer_size; + vectors.av_samples_set_silence = av_samples_set_silence; + vectors.av_sdp_create = av_sdp_create; + vectors.av_seek_frame = av_seek_frame; + vectors.av_set_options_string = av_set_options_string; + vectors.av_shrink_packet = av_shrink_packet; + vectors.av_size_mult = av_size_mult; + vectors.av_strdup = av_strdup; + vectors.av_stream_add_side_data = av_stream_add_side_data; + vectors.av_stream_get_class = av_stream_get_class; + vectors.av_stream_get_codec_timebase = av_stream_get_codec_timebase; + vectors.av_stream_get_end_pts = av_stream_get_end_pts; + vectors.av_stream_get_parser = av_stream_get_parser; + vectors.av_stream_get_side_data = av_stream_get_side_data; + vectors.av_stream_new_side_data = av_stream_new_side_data; + vectors.av_strerror = av_strerror; + vectors.av_strndup = av_strndup; + vectors.av_sub_q = av_sub_q; + vectors.av_tempfile = av_tempfile; + vectors.av_timecode_adjust_ntsc_framenum2 = av_timecode_adjust_ntsc_framenum2; + vectors.av_timecode_check_frame_rate = av_timecode_check_frame_rate; + vectors.av_timecode_get_smpte = av_timecode_get_smpte; + vectors.av_timecode_get_smpte_from_framenum = av_timecode_get_smpte_from_framenum; + vectors.av_timecode_init = av_timecode_init; + vectors.av_timecode_init_from_components = av_timecode_init_from_components; + vectors.av_timecode_init_from_string = av_timecode_init_from_string; + vectors.av_timecode_make_mpeg_tc_string = av_timecode_make_mpeg_tc_string; + vectors.av_timecode_make_smpte_tc_string = av_timecode_make_smpte_tc_string; + vectors.av_timecode_make_smpte_tc_string2 = av_timecode_make_smpte_tc_string2; + vectors.av_timecode_make_string = av_timecode_make_string; + vectors.av_tree_destroy = av_tree_destroy; + vectors.av_tree_enumerate = av_tree_enumerate; + vectors.av_tree_find = av_tree_find; + vectors.av_tree_insert = av_tree_insert; + vectors.av_tree_node_alloc = av_tree_node_alloc; + vectors.av_url_split = av_url_split; + vectors.av_usleep = av_usleep; + vectors.av_version_info = av_version_info; + vectors.av_vlog = av_vlog; + vectors.av_write_frame = av_write_frame; + vectors.av_write_image_line = av_write_image_line; + vectors.av_write_image_line2 = av_write_image_line2; + vectors.av_write_trailer = av_write_trailer; + vectors.av_write_uncoded_frame = av_write_uncoded_frame; + vectors.av_write_uncoded_frame_query = av_write_uncoded_frame_query; + vectors.av_xiphlacing = av_xiphlacing; + vectors.avcodec_align_dimensions = avcodec_align_dimensions; + vectors.avcodec_align_dimensions2 = avcodec_align_dimensions2; + vectors.avcodec_alloc_context3 = avcodec_alloc_context3; + vectors.avcodec_chroma_pos_to_enum = avcodec_chroma_pos_to_enum; + vectors.avcodec_close = avcodec_close; + vectors.avcodec_configuration = avcodec_configuration; + vectors.avcodec_decode_subtitle2 = avcodec_decode_subtitle2; + vectors.avcodec_default_execute = avcodec_default_execute; + vectors.avcodec_default_execute2 = avcodec_default_execute2; + vectors.avcodec_default_get_buffer2 = avcodec_default_get_buffer2; + vectors.avcodec_default_get_encode_buffer = avcodec_default_get_encode_buffer; + vectors.avcodec_default_get_format = avcodec_default_get_format; + vectors.avcodec_descriptor_get = avcodec_descriptor_get; + vectors.avcodec_descriptor_get_by_name = avcodec_descriptor_get_by_name; + vectors.avcodec_descriptor_next = avcodec_descriptor_next; + vectors.avcodec_encode_subtitle = avcodec_encode_subtitle; + vectors.avcodec_enum_to_chroma_pos = avcodec_enum_to_chroma_pos; + vectors.avcodec_fill_audio_frame = avcodec_fill_audio_frame; + vectors.avcodec_find_best_pix_fmt_of_list = avcodec_find_best_pix_fmt_of_list; + vectors.avcodec_find_decoder = avcodec_find_decoder; + vectors.avcodec_find_decoder_by_name = avcodec_find_decoder_by_name; + vectors.avcodec_find_encoder = avcodec_find_encoder; + vectors.avcodec_find_encoder_by_name = avcodec_find_encoder_by_name; + vectors.avcodec_flush_buffers = avcodec_flush_buffers; + vectors.avcodec_free_context = avcodec_free_context; + vectors.avcodec_get_class = avcodec_get_class; + vectors.avcodec_get_frame_class = avcodec_get_frame_class; + vectors.avcodec_get_hw_config = avcodec_get_hw_config; + vectors.avcodec_get_hw_frames_parameters = avcodec_get_hw_frames_parameters; + vectors.avcodec_get_name = avcodec_get_name; + vectors.avcodec_get_subtitle_rect_class = avcodec_get_subtitle_rect_class; + vectors.avcodec_get_type = avcodec_get_type; + vectors.avcodec_is_open = avcodec_is_open; + vectors.avcodec_license = avcodec_license; + vectors.avcodec_open2 = avcodec_open2; + vectors.avcodec_parameters_alloc = avcodec_parameters_alloc; + vectors.avcodec_parameters_copy = avcodec_parameters_copy; + vectors.avcodec_parameters_free = avcodec_parameters_free; + vectors.avcodec_parameters_from_context = avcodec_parameters_from_context; + vectors.avcodec_parameters_to_context = avcodec_parameters_to_context; + vectors.avcodec_pix_fmt_to_codec_tag = avcodec_pix_fmt_to_codec_tag; + vectors.avcodec_profile_name = avcodec_profile_name; + vectors.avcodec_receive_frame = avcodec_receive_frame; + vectors.avcodec_receive_packet = avcodec_receive_packet; + vectors.avcodec_send_frame = avcodec_send_frame; + vectors.avcodec_send_packet = avcodec_send_packet; + vectors.avcodec_string = avcodec_string; + vectors.avcodec_version = avcodec_version; + vectors.avdevice_app_to_dev_control_message = avdevice_app_to_dev_control_message; + vectors.avdevice_capabilities_create = avdevice_capabilities_create; + vectors.avdevice_capabilities_free = avdevice_capabilities_free; + vectors.avdevice_configuration = avdevice_configuration; + vectors.avdevice_dev_to_app_control_message = avdevice_dev_to_app_control_message; + vectors.avdevice_free_list_devices = avdevice_free_list_devices; + vectors.avdevice_license = avdevice_license; + vectors.avdevice_list_devices = avdevice_list_devices; + vectors.avdevice_list_input_sources = avdevice_list_input_sources; + vectors.avdevice_list_output_sinks = avdevice_list_output_sinks; + vectors.avdevice_register_all = avdevice_register_all; + vectors.avdevice_version = avdevice_version; + vectors.avfilter_config_links = avfilter_config_links; + vectors.avfilter_configuration = avfilter_configuration; + vectors.avfilter_filter_pad_count = avfilter_filter_pad_count; + vectors.avfilter_free = avfilter_free; + vectors.avfilter_get_by_name = avfilter_get_by_name; + vectors.avfilter_get_class = avfilter_get_class; + vectors.avfilter_graph_alloc = avfilter_graph_alloc; + vectors.avfilter_graph_alloc_filter = avfilter_graph_alloc_filter; + vectors.avfilter_graph_config = avfilter_graph_config; + vectors.avfilter_graph_create_filter = avfilter_graph_create_filter; + vectors.avfilter_graph_dump = avfilter_graph_dump; + vectors.avfilter_graph_free = avfilter_graph_free; + vectors.avfilter_graph_get_filter = avfilter_graph_get_filter; + vectors.avfilter_graph_parse = avfilter_graph_parse; + vectors.avfilter_graph_parse_ptr = avfilter_graph_parse_ptr; + vectors.avfilter_graph_parse2 = avfilter_graph_parse2; + vectors.avfilter_graph_queue_command = avfilter_graph_queue_command; + vectors.avfilter_graph_request_oldest = avfilter_graph_request_oldest; + vectors.avfilter_graph_send_command = avfilter_graph_send_command; + vectors.avfilter_graph_set_auto_convert = avfilter_graph_set_auto_convert; + vectors.avfilter_init_dict = avfilter_init_dict; + vectors.avfilter_init_str = avfilter_init_str; + vectors.avfilter_inout_alloc = avfilter_inout_alloc; + vectors.avfilter_inout_free = avfilter_inout_free; + vectors.avfilter_insert_filter = avfilter_insert_filter; + vectors.avfilter_license = avfilter_license; + vectors.avfilter_link = avfilter_link; + vectors.avfilter_link_free = avfilter_link_free; + vectors.avfilter_pad_count = avfilter_pad_count; + vectors.avfilter_pad_get_name = avfilter_pad_get_name; + vectors.avfilter_pad_get_type = avfilter_pad_get_type; + vectors.avfilter_process_command = avfilter_process_command; + vectors.avfilter_version = avfilter_version; + vectors.avformat_alloc_context = avformat_alloc_context; + vectors.avformat_alloc_output_context2 = avformat_alloc_output_context2; + vectors.avformat_close_input = avformat_close_input; + vectors.avformat_configuration = avformat_configuration; + vectors.avformat_find_stream_info = avformat_find_stream_info; + vectors.avformat_flush = avformat_flush; + vectors.avformat_free_context = avformat_free_context; + vectors.avformat_get_class = avformat_get_class; + vectors.avformat_get_mov_audio_tags = avformat_get_mov_audio_tags; + vectors.avformat_get_mov_video_tags = avformat_get_mov_video_tags; + vectors.avformat_get_riff_audio_tags = avformat_get_riff_audio_tags; + vectors.avformat_get_riff_video_tags = avformat_get_riff_video_tags; + vectors.avformat_index_get_entries_count = avformat_index_get_entries_count; + vectors.avformat_index_get_entry = avformat_index_get_entry; + vectors.avformat_index_get_entry_from_timestamp = avformat_index_get_entry_from_timestamp; + vectors.avformat_init_output = avformat_init_output; + vectors.avformat_license = avformat_license; + vectors.avformat_match_stream_specifier = avformat_match_stream_specifier; + vectors.avformat_network_deinit = avformat_network_deinit; + vectors.avformat_network_init = avformat_network_init; + vectors.avformat_new_stream = avformat_new_stream; + vectors.avformat_open_input = avformat_open_input; + vectors.avformat_query_codec = avformat_query_codec; + vectors.avformat_queue_attached_pictures = avformat_queue_attached_pictures; + vectors.avformat_seek_file = avformat_seek_file; + vectors.avformat_transfer_internal_stream_timing_info = avformat_transfer_internal_stream_timing_info; + vectors.avformat_version = avformat_version; + vectors.avformat_write_header = avformat_write_header; + vectors.avio_accept = avio_accept; + vectors.avio_alloc_context = avio_alloc_context; + vectors.avio_check = avio_check; + vectors.avio_close = avio_close; + vectors.avio_close_dir = avio_close_dir; + vectors.avio_close_dyn_buf = avio_close_dyn_buf; + vectors.avio_closep = avio_closep; + vectors.avio_context_free = avio_context_free; + vectors.avio_enum_protocols = avio_enum_protocols; + vectors.avio_feof = avio_feof; + vectors.avio_find_protocol_name = avio_find_protocol_name; + vectors.avio_flush = avio_flush; + vectors.avio_free_directory_entry = avio_free_directory_entry; + vectors.avio_get_dyn_buf = avio_get_dyn_buf; + vectors.avio_get_str = avio_get_str; + vectors.avio_get_str16be = avio_get_str16be; + vectors.avio_get_str16le = avio_get_str16le; + vectors.avio_handshake = avio_handshake; + vectors.avio_open = avio_open; + vectors.avio_open_dir = avio_open_dir; + vectors.avio_open_dyn_buf = avio_open_dyn_buf; + vectors.avio_open2 = avio_open2; + vectors.avio_pause = avio_pause; + vectors.avio_print_string_array = avio_print_string_array; + vectors.avio_printf = avio_printf; + vectors.avio_protocol_get_class = avio_protocol_get_class; + vectors.avio_put_str = avio_put_str; + vectors.avio_put_str16be = avio_put_str16be; + vectors.avio_put_str16le = avio_put_str16le; + vectors.avio_r8 = avio_r8; + vectors.avio_rb16 = avio_rb16; + vectors.avio_rb24 = avio_rb24; + vectors.avio_rb32 = avio_rb32; + vectors.avio_rb64 = avio_rb64; + vectors.avio_read = avio_read; + vectors.avio_read_dir = avio_read_dir; + vectors.avio_read_partial = avio_read_partial; + vectors.avio_read_to_bprint = avio_read_to_bprint; + vectors.avio_rl16 = avio_rl16; + vectors.avio_rl24 = avio_rl24; + vectors.avio_rl32 = avio_rl32; + vectors.avio_rl64 = avio_rl64; + vectors.avio_seek = avio_seek; + vectors.avio_seek_time = avio_seek_time; + vectors.avio_size = avio_size; + vectors.avio_skip = avio_skip; + vectors.avio_vprintf = avio_vprintf; + vectors.avio_w8 = avio_w8; + vectors.avio_wb16 = avio_wb16; + vectors.avio_wb24 = avio_wb24; + vectors.avio_wb32 = avio_wb32; + vectors.avio_wb64 = avio_wb64; + vectors.avio_wl16 = avio_wl16; + vectors.avio_wl24 = avio_wl24; + vectors.avio_wl32 = avio_wl32; + vectors.avio_wl64 = avio_wl64; + vectors.avio_write = avio_write; + vectors.avio_write_marker = avio_write_marker; + vectors.avsubtitle_free = avsubtitle_free; + vectors.avutil_configuration = avutil_configuration; + vectors.avutil_license = avutil_license; + vectors.avutil_version = avutil_version; + vectors.postproc_configuration = postproc_configuration; + vectors.postproc_license = postproc_license; + vectors.postproc_version = postproc_version; + vectors.pp_free_context = pp_free_context; + vectors.pp_free_mode = pp_free_mode; + vectors.pp_get_context = pp_get_context; + vectors.pp_get_mode_by_name_and_quality = pp_get_mode_by_name_and_quality; + vectors.pp_postprocess = pp_postprocess; + vectors.swr_alloc = swr_alloc; + vectors.swr_alloc_set_opts = swr_alloc_set_opts; + vectors.swr_alloc_set_opts2 = swr_alloc_set_opts2; + vectors.swr_build_matrix = swr_build_matrix; + vectors.swr_build_matrix2 = swr_build_matrix2; + vectors.swr_close = swr_close; + vectors.swr_config_frame = swr_config_frame; + vectors.swr_convert = swr_convert; + vectors.swr_convert_frame = swr_convert_frame; + vectors.swr_drop_output = swr_drop_output; + vectors.swr_free = swr_free; + vectors.swr_get_class = swr_get_class; + vectors.swr_get_delay = swr_get_delay; + vectors.swr_get_out_samples = swr_get_out_samples; + vectors.swr_init = swr_init; + vectors.swr_inject_silence = swr_inject_silence; + vectors.swr_is_initialized = swr_is_initialized; + vectors.swr_next_pts = swr_next_pts; + vectors.swr_set_channel_mapping = swr_set_channel_mapping; + vectors.swr_set_compensation = swr_set_compensation; + vectors.swr_set_matrix = swr_set_matrix; + vectors.swresample_configuration = swresample_configuration; + vectors.swresample_license = swresample_license; + vectors.swresample_version = swresample_version; + vectors.sws_alloc_context = sws_alloc_context; + vectors.sws_allocVec = sws_allocVec; + vectors.sws_convertPalette8ToPacked24 = sws_convertPalette8ToPacked24; + vectors.sws_convertPalette8ToPacked32 = sws_convertPalette8ToPacked32; + vectors.sws_frame_end = sws_frame_end; + vectors.sws_frame_start = sws_frame_start; + vectors.sws_freeContext = sws_freeContext; + vectors.sws_freeFilter = sws_freeFilter; + vectors.sws_freeVec = sws_freeVec; + vectors.sws_get_class = sws_get_class; + vectors.sws_getCachedContext = sws_getCachedContext; + vectors.sws_getCoefficients = sws_getCoefficients; + vectors.sws_getColorspaceDetails = sws_getColorspaceDetails; + vectors.sws_getContext = sws_getContext; + vectors.sws_getDefaultFilter = sws_getDefaultFilter; + vectors.sws_getGaussianVec = sws_getGaussianVec; + vectors.sws_init_context = sws_init_context; + vectors.sws_isSupportedEndiannessConversion = sws_isSupportedEndiannessConversion; + vectors.sws_isSupportedInput = sws_isSupportedInput; + vectors.sws_isSupportedOutput = sws_isSupportedOutput; + vectors.sws_normalizeVec = sws_normalizeVec; + vectors.sws_receive_slice = sws_receive_slice; + vectors.sws_receive_slice_alignment = sws_receive_slice_alignment; + vectors.sws_scale = sws_scale; + vectors.sws_scale_frame = sws_scale_frame; + vectors.sws_scaleVec = sws_scaleVec; + vectors.sws_send_slice = sws_send_slice; + vectors.sws_setColorspaceDetails = sws_setColorspaceDetails; + vectors.swscale_configuration = swscale_configuration; + vectors.swscale_license = swscale_license; + vectors.swscale_version = swscale_version; + } +} diff --git a/FFmpeg.AutoGen.ClangMacroParser.Test/FFmpeg.AutoGen.ClangMacroParser.Test.csproj b/FFmpeg.AutoGen.ClangMacroParser.Test/FFmpeg.AutoGen.ClangMacroParser.Test.csproj index 044e14b4..facc239c 100644 --- a/FFmpeg.AutoGen.ClangMacroParser.Test/FFmpeg.AutoGen.ClangMacroParser.Test.csproj +++ b/FFmpeg.AutoGen.ClangMacroParser.Test/FFmpeg.AutoGen.ClangMacroParser.Test.csproj @@ -9,9 +9,9 @@ - - - + + + @@ -19,4 +19,8 @@ + + + + \ No newline at end of file diff --git a/FFmpeg.AutoGen.ClangMacroParser/FFmpeg.AutoGen.ClangMacroParser.csproj b/FFmpeg.AutoGen.ClangMacroParser/FFmpeg.AutoGen.ClangMacroParser.csproj index b9c378bd..895aa17f 100644 --- a/FFmpeg.AutoGen.ClangMacroParser/FFmpeg.AutoGen.ClangMacroParser.csproj +++ b/FFmpeg.AutoGen.ClangMacroParser/FFmpeg.AutoGen.ClangMacroParser.csproj @@ -5,4 +5,8 @@ enable + + + + \ No newline at end of file diff --git a/FFmpeg.AutoGen.ClangMacroParser/Parser.cs b/FFmpeg.AutoGen.ClangMacroParser/Parser.cs index f7b4e27e..45f624f1 100644 --- a/FFmpeg.AutoGen.ClangMacroParser/Parser.cs +++ b/FFmpeg.AutoGen.ClangMacroParser/Parser.cs @@ -11,7 +11,7 @@ public static class Parser { public static IExpression Parse(string expression) { - var tokens = Tokenizer.Tokenize(expression).ToArray(); + Token[] tokens = Tokenizer.Tokenize(expression).ToArray(); var i = 0; bool CanRead() => i < tokens.Length; diff --git a/FFmpeg.AutoGen.ClangMacroParser/Tokenization/Tokenizer.cs b/FFmpeg.AutoGen.ClangMacroParser/Tokenization/Tokenizer.cs index 72ad73f4..6bc2140e 100644 --- a/FFmpeg.AutoGen.ClangMacroParser/Tokenization/Tokenizer.cs +++ b/FFmpeg.AutoGen.ClangMacroParser/Tokenization/Tokenizer.cs @@ -31,7 +31,7 @@ public static IEnumerable Tokenize(string expression) bool IsIdentifierStart(char x) => x == '_' || IsAz(x); bool IsId(char x) => IsIdentifierStart(x) || Digits.Contains(x) || x == '.'; - var characters = expression.ToCharArray(); + char[] characters = expression.ToCharArray(); var i = 0; bool CanRead() => i < characters.Length; diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/CliOptions.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/CliOptions.cs index dd2f9a5c..fcbb6e35 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/CliOptions.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/CliOptions.cs @@ -15,11 +15,11 @@ public class CliOptions HelpText = "The namespace that will contain the generated symbols.")] public string Namespace { get; set; } - [Option('c', - "class", + [Option('t', + "type", Default = "ffmpeg", - HelpText = "The name of the class that contains the FFmpeg unmanaged method calls.")] - public string ClassName { get; set; } + HelpText = "The name of the type that contains the FFmpeg unmanaged method calls.")] + public string TypeName { get; set; } /// /// See http://ybeernet.blogspot.ro/2011/03/techniques-of-calling-unmanaged-code.html. @@ -49,11 +49,11 @@ public class CliOptions HelpText = "The path to the directory that contains the FFmpeg binaries.")] public string FFmpegBinDir { get; set; } - [Option('o', + [Option('s', "output", Required = false, - HelpText = "The path to the directory where to output the generated files.")] - public string OutputDir { get; set; } + HelpText = "The path to the solution directory.")] + public string SolutionDir { get; set; } [Option('v', HelpText = "Print details during execution.")] @@ -61,7 +61,7 @@ public class CliOptions public static CliOptions ParseArgumentsStrict(string[] args) { - var result = Parser.Default.ParseArguments(args); + var result = CommandLine.Parser.Default.ParseArguments(args); var options = result.MapResult(x => x, x => new CliOptions()); options.Normalize(); return options; @@ -70,15 +70,13 @@ public static CliOptions ParseArgumentsStrict(string[] args) private void Normalize() { // Support for the original path setup - var solutionDir = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "../../../../"); + if (string.IsNullOrWhiteSpace(SolutionDir)) SolutionDir = Path.GetFullPath(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "../../../../")); if (string.IsNullOrWhiteSpace(FFmpegDir) && string.IsNullOrWhiteSpace(FFmpegIncludesDir) && string.IsNullOrWhiteSpace(FFmpegBinDir)) - FFmpegDir = Path.Combine(solutionDir, "FFmpeg"); - - if (string.IsNullOrWhiteSpace(OutputDir)) OutputDir = Path.Combine(solutionDir, "FFmpeg.AutoGen/"); - + FFmpegDir = Path.Combine(SolutionDir, "FFmpeg"); + // If the FFmpegDir option is specified, it will take precedence if (!string.IsNullOrWhiteSpace(FFmpegDir)) { @@ -116,16 +114,5 @@ private void Normalize() "the FFmpeg headers does not exist."); Environment.Exit(1); } - - if (!Directory.Exists(OutputDir)) - { - Console.WriteLine("The output directory does not exist."); - Environment.Exit(1); - } - - // Resolve paths - FFmpegIncludesDir = Path.GetFullPath(FFmpegIncludesDir); - FFmpegBinDir = Path.GetFullPath(FFmpegBinDir); - OutputDir = Path.GetFullPath(OutputDir); } } diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/DelegateDefinition.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/DelegateDefinition.cs index b64a1267..96b16d27 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/DelegateDefinition.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/DelegateDefinition.cs @@ -1,9 +1,10 @@ -namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; +using System; -internal class DelegateDefinition : TypeDefinition, ICanGenerateXmlDoc +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; + +internal record DelegateDefinition : TypeDefinition { public string FunctionName { get; init; } public TypeDefinition ReturnType { get; init; } - public FunctionParameter[] Parameters { get; init; } - public string Content { get; set; } + public FunctionParameter[] Parameters { get; init; } = Array.Empty(); } diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/EnumerationDefinition.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/EnumerationDefinition.cs index ee3ffbf0..9f648e0a 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/EnumerationDefinition.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/EnumerationDefinition.cs @@ -1,6 +1,8 @@ +using System; + namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; -internal class EnumerationDefinition : NamedDefinition, IDefinition +internal record EnumerationDefinition : NamedDefinition, IDefinition { - public EnumerationItem[] Items { get; set; } + public EnumerationItem[] Items { get; init; } = Array.Empty(); } diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/EnumerationItem.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/EnumerationItem.cs index ea85e387..4f161bb9 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/EnumerationItem.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/EnumerationItem.cs @@ -1,6 +1,6 @@ namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; -internal class EnumerationItem : ICanGenerateXmlDoc +internal record EnumerationItem : ICanGenerateXmlDoc { public string Name { get; init; } public string Value { get; init; } diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/ExportFunctionDefinition.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/ExportFunctionDefinition.cs index f1bf168f..ade75bdc 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/ExportFunctionDefinition.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/ExportFunctionDefinition.cs @@ -1,7 +1,7 @@ namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; -internal class ExportFunctionDefinition : FunctionDefinitionBase +internal record ExportFunctionDefinition : FunctionDefinitionBase { - public string LibraryName { get; set; } - public int LibraryVersion { get; set; } + public string LibraryName { get; init; } + public int LibraryVersion { get; init; } } diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/FixedArrayDefinition.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/FixedArrayDefinition.cs index a1e3f39f..c5ae20f3 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/FixedArrayDefinition.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/FixedArrayDefinition.cs @@ -1,8 +1,9 @@ namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; -internal class FixedArrayDefinition : TypeDefinition +internal record FixedArrayDefinition : TypeDefinition { public TypeDefinition ElementType { get; init; } - public int Size { get; init; } + public int Length { get; init; } public bool IsPrimitive { get; init; } + public bool IsPointer { get; init; } } diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/FunctionDefinitionBase.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/FunctionDefinitionBase.cs index f37e8e0c..c019216e 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/FunctionDefinitionBase.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/FunctionDefinitionBase.cs @@ -1,9 +1,11 @@ +using System; + namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; -internal class FunctionDefinitionBase : IDefinition, ICanGenerateXmlDoc, IObsoletionAware +internal record FunctionDefinitionBase : IDefinition, ICanGenerateXmlDoc, IObsoletionAware { public TypeDefinition ReturnType { get; set; } - public FunctionParameter[] Parameters { get; set; } + public FunctionParameter[] Parameters { get; set; } = Array.Empty(); public string ReturnComment { get; set; } public string Content { get; set; } public string Name { get; set; } diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/FunctionParameter.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/FunctionParameter.cs index c0539103..2f5385a9 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/FunctionParameter.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/FunctionParameter.cs @@ -1,8 +1,11 @@ namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; -internal class FunctionParameter : ICanGenerateXmlDoc +internal record FunctionParameter : ICanGenerateXmlDoc { public string Name { get; init; } public TypeDefinition Type { get; init; } - public string Content { get; set; } + public string Content { get; init; } + public bool IsConstant { get; init; } + public bool IsIndirect { get; init; } + public bool ByReference { get; init; } } diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/InlineFunctionDefinition.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/InlineFunctionDefinition.cs index cf63a6e3..16fd0017 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/InlineFunctionDefinition.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/InlineFunctionDefinition.cs @@ -1,7 +1,7 @@ namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; -internal class InlineFunctionDefinition : FunctionDefinitionBase +internal record InlineFunctionDefinition : FunctionDefinitionBase { - public string Body { get; set; } - public string OriginalBodyHash { get; set; } + public string Body { get; init; } + public string OriginalBodyHash { get; init; } } diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/MacroDefinition.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/MacroDefinition.cs index 5984aa72..f6393dd9 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/MacroDefinition.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/MacroDefinition.cs @@ -1,6 +1,6 @@ namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; -internal class MacroDefinition : IDefinition, ICanGenerateXmlDoc +internal record MacroDefinition : IDefinition, ICanGenerateXmlDoc { public string Expression { get; set; } public string TypeName { get; set; } diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/NamedDefinition.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/NamedDefinition.cs index 8272ae47..30ad731c 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/NamedDefinition.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/NamedDefinition.cs @@ -1,6 +1,6 @@ namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; -internal class NamedDefinition : ICanGenerateXmlDoc, IObsoletionAware +internal record NamedDefinition : ICanGenerateXmlDoc, IObsoletionAware { public string Name { get; init; } public string TypeName { get; init; } diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/Obsoletion.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/Obsoletion.cs index 7614b969..c44df48d 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/Obsoletion.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/Obsoletion.cs @@ -1,6 +1,6 @@ namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; -public struct Obsoletion +public record struct Obsoletion { public bool IsObsolete { get; init; } public string Message { get; init; } diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/StructureDefinition.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/StructureDefinition.cs index a46cdd08..ca03c592 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/StructureDefinition.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/StructureDefinition.cs @@ -1,8 +1,10 @@ -namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; +using System; -internal class StructureDefinition : NamedDefinition, IDefinition +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; + +internal record StructureDefinition : NamedDefinition, IDefinition { - public StructureField[] Fields { get; set; } = { }; + public StructureField[] Fields { get; set; } = Array.Empty(); public bool IsComplete { get; set; } public bool IsUnion { get; init; } } diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/StructureField.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/StructureField.cs index 69dfcd49..d8fbd522 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/StructureField.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/StructureField.cs @@ -1,6 +1,6 @@ namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; -internal class StructureField : ICanGenerateXmlDoc, IObsoletionAware +internal record StructureField : ICanGenerateXmlDoc, IObsoletionAware { public string Name { get; init; } public TypeDefinition FieldType { get; init; } diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/TypeDefinition.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/TypeDefinition.cs index 8a1167f0..c99e70e4 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/TypeDefinition.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Definitions/TypeDefinition.cs @@ -1,8 +1,11 @@ +using System; + namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; -internal class TypeDefinition : IDefinition +internal record TypeDefinition : IDefinition { - public string[] Attributes { get; init; } = { }; + public string[] Attributes { get; init; } = Array.Empty(); public bool ByReference { get; init; } public string Name { get; init; } + public string LegacyName { get; init; } } diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/ExistingInlineFunctionsHelper.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/ExistingInlineFunctionsHelper.cs index 63f42015..8401471c 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/ExistingInlineFunctionsHelper.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/ExistingInlineFunctionsHelper.cs @@ -1,6 +1,5 @@ // Copyright 2020 Craytive Technologies BV. All rights reserved. Company proprietary and confidential. -using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; @@ -19,12 +18,11 @@ internal static class ExistingInlineFunctionsHelper @"\s+// original body hash: (?\S+)", RegexOptions.Compiled | RegexOptions.Multiline); - public static InlineFunctionDefinition[] LoadInlineFunctions(string path) + public static IEnumerable LoadInlineFunctions(string path) { - if (!File.Exists(path)) return Array.Empty(); + if (!File.Exists(path)) yield break; var text = File.ReadAllText(path); - var functions = new List(); var nameMatches = FunctionNameRegex.Matches(text); var hashMatches = FunctionHashRegex.Matches(text); @@ -47,9 +45,7 @@ public static InlineFunctionDefinition[] LoadInlineFunctions(string path) Body = body, OriginalBodyHash = hash }; - functions.Add(function); + yield return function; } - - return functions.ToArray(); } } diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/FFmpeg.AutoGen.CppSharpUnsafeGenerator.csproj b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/FFmpeg.AutoGen.CppSharpUnsafeGenerator.csproj index 3eac99b9..fd8b7e2f 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/FFmpeg.AutoGen.CppSharpUnsafeGenerator.csproj +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/FFmpeg.AutoGen.CppSharpUnsafeGenerator.csproj @@ -7,7 +7,7 @@ - + @@ -16,7 +16,7 @@ - + diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/FunctionExport.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/FunctionExport.cs index 057e0382..583a7974 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/FunctionExport.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/FunctionExport.cs @@ -2,8 +2,8 @@ namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator; -[DebuggerDisplay("{Name}, {LibraryName}")] -internal class FunctionExport +[DebuggerDisplay("{Name}, {LibraryName}-{LibraryVersion}")] +internal record FunctionExport { public string Name { get; init; } public string LibraryName { get; init; } diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/DelegatesGenerator.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/DelegatesGenerator.cs new file mode 100644 index 00000000..c655731d --- /dev/null +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/DelegatesGenerator.cs @@ -0,0 +1,45 @@ +using System.Collections.Generic; +using System.Linq; +using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; + +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Generation; + +internal sealed class DelegatesGenerator : GeneratorBase +{ + public DelegatesGenerator(string path, GenerationContext context) : base(path, context) + { + } + + public static void Generate(string path, GenerationContext context) + { + using var g = new DelegatesGenerator(path, context); + g.Generate(); + } + + public override IEnumerable Usings() + { + yield return "System"; + yield return "System.Runtime.InteropServices"; + } + + protected override void GenerateDefinition(DelegateDefinition @delegate) + { + @delegate.Parameters.ToList().ForEach(x => this.WriteParam(x, x.Name)); + + var parameters = ParametersHelper.GetParameters(@delegate.Parameters, Context.IsLegacyGenerationOn); + WriteLine("[UnmanagedFunctionPointer(CallingConvention.Cdecl)]"); + WriteLine($"public unsafe delegate {@delegate.ReturnType.Name} {@delegate.FunctionName} ({parameters});"); + + WriteLine($"public unsafe struct {@delegate.Name}"); + + using (BeginBlock()) + { + WriteLine("public IntPtr Pointer;"); + Write($"public static implicit operator {@delegate.Name}({@delegate.FunctionName} func) => "); + Write($"new {@delegate.Name} {{ Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }};"); + WriteLine(); + } + + WriteLine(); + } +} diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/DocExtensions.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/DocExtensions.cs new file mode 100644 index 00000000..0d3f48ae --- /dev/null +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/DocExtensions.cs @@ -0,0 +1,35 @@ +using System.Security; +using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; + +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Generation; + +internal static class DocExtensions +{ + public static void WriteParam(this GeneratorBase generator, ICanGenerateXmlDoc value, string name) + { + var content = value.Content?.Trim(); + + if (!string.IsNullOrWhiteSpace(content)) generator.WriteLine($"/// {EscapeXmlString(content)}"); + } + + public static void WriteSummary(this GeneratorBase generator, ICanGenerateXmlDoc xmlDoc) + { + var content = xmlDoc.Content?.Trim(); + if (!string.IsNullOrWhiteSpace(content)) generator.WriteLine($"/// {EscapeXmlString(content)}"); + } + + public static void WriteReturnComment(this GeneratorBase generator, FunctionDefinitionBase function) + { + var content = function.ReturnComment?.Trim(); + if (!string.IsNullOrWhiteSpace(content)) generator.WriteLine($"/// {EscapeXmlString(content)}"); + } + + public static void WriteObsoletion(this GeneratorBase generator, IObsoletionAware obsoletionAware) + { + var obsoletion = obsoletionAware.Obsoletion; + if (obsoletion.IsObsolete) generator.WriteLine(string.IsNullOrWhiteSpace(obsoletion.Message) ? "[Obsolete()]" : $"[Obsolete(\"{EscapeQuotes(obsoletion.Message)}\")]"); + } + + private static string EscapeXmlString(string content) => SecurityElement.Escape(content); + private static string EscapeQuotes(string s) => s.Replace("\"", "\\\""); +} diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/EnumsGenerator.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/EnumsGenerator.cs new file mode 100644 index 00000000..08590874 --- /dev/null +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/EnumsGenerator.cs @@ -0,0 +1,32 @@ +using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; + +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Generation; + +internal sealed class EnumsGenerator : GeneratorBase +{ + public EnumsGenerator(string path, GenerationContext context) : base(path, context) + { + } + + public static void Generate(string path, GenerationContext context) + { + using var g = new EnumsGenerator(path, context); + g.Generate(); + } + + protected override void GenerateDefinition(EnumerationDefinition @enum) + { + this.WriteSummary(@enum); + this.WriteObsoletion(@enum); + WriteLine($"public enum {@enum.Name} : {@enum.TypeName}"); + + using (BeginBlock()) + foreach (var item in @enum.Items) + { + this.WriteSummary(item); + WriteLine($"@{item.Name} = {item.Value},"); + } + + WriteLine(); + } +} diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/FixedArraysGenerator.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/FixedArraysGenerator.cs new file mode 100644 index 00000000..ee8e761c --- /dev/null +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/FixedArraysGenerator.cs @@ -0,0 +1,97 @@ +using System.Collections.Generic; +using System.Linq; +using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; + +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Generation; + +internal sealed class FixedArraysGenerator : GeneratorBase +{ + public FixedArraysGenerator(string path, GenerationContext context) : base(path, context) + { + } + + public static void Generate(string path, GenerationContext context) + { + using var g = new FixedArraysGenerator(path, context); + g.Generate(); + } + + public override IEnumerable Usings() + { + yield return "System"; + } + + protected override IEnumerable Query(IEnumerable arrays) => + arrays.OrderBy(a => a.Length).ThenBy(a => a.Name); + + protected override void GenerateDefinition(FixedArrayDefinition array) + { + var useLegacy = Context.IsLegacyGenerationOn; + var length = array.Length; + var elementType = array.ElementType; + var elementTypeName = ParametersHelper.GetTypeName(elementType, useLegacy); + var arrayName = useLegacy ? array.LegacyName : array.Name; + + WriteLine(array.IsPointer ? $"public unsafe struct {arrayName} : IFixedArray" : $"public unsafe struct {arrayName} : IFixedArray<{elementTypeName}>"); + + using (BeginBlock()) + { + var lengthPropertyName = useLegacy ? "Size" : "ArrayLength"; + WriteLine($"public static readonly int {lengthPropertyName} = {length};"); + WriteLine($"public int Length => {length};"); + + if (array.IsPrimitive) WritePrimitiveFixedArray(elementTypeName, length); + else WriteComplexFixedArray(elementTypeName, length); + + WriteLine($"public static implicit operator {elementTypeName}[]({arrayName} @struct) => @struct.ToArray();"); + } + + WriteLine(); + } + + + private void WritePrimitiveFixedArray(string elementType, int length) + { + WriteLine($"fixed {elementType} _[{length}];"); + WriteLine(); + + WriteLine($"public {elementType} this[uint i]"); + + using (BeginBlock()) + { + WriteLine("get => _[i];"); + WriteLine("set => _[i] = value;"); + } + + WriteLine($"public {elementType}[] ToArray()"); + using (BeginBlock()) + WriteLine($"var a = new {elementType}[{length}]; for (uint i = 0; i < {length}; i++) a[i] = _[i]; return a;"); + + WriteLine($"public void UpdateFrom({elementType}[] array)"); + using (BeginBlock()) + WriteLine($"uint i = 0; foreach(var value in array) {{ _[i++] = value; if (i >= {length}) return; }}"); + } + + private void WriteComplexFixedArray(string elementType, int length) + { + WriteLine(string.Join(" ", Enumerable.Range(0, length).Select(i => $"{elementType} _{i};"))); + WriteLine(); + + var @fixed = $"fixed ({elementType}* p0 = &_0)"; + WriteLine($"public {elementType} this[uint i]"); + + using (BeginBlock()) + { + WriteLine($"get {{ if (i >= {length}) throw new ArgumentOutOfRangeException(); {@fixed} {{ return *(p0 + i); }} }}"); + WriteLine($"set {{ if (i >= {length}) throw new ArgumentOutOfRangeException(); {@fixed} {{ *(p0 + i) = value; }} }}"); + } + + WriteLine($"public {elementType}[] ToArray()"); + using (BeginBlock()) + WriteLine($"{@fixed} {{ var a = new {elementType}[{length}]; for (uint i = 0; i < {length}; i++) a[i] = *(p0 + i); return a; }}"); + + WriteLine($"public void UpdateFrom({elementType}[] array)"); + using (BeginBlock()) + WriteLine($"{@fixed} {{ uint i = 0; foreach(var value in array) {{ *(p0 + i++) = value; if (i >= {length}) return; }} }}"); + } +} diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/FunctionsGenerator.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/FunctionsGenerator.cs new file mode 100644 index 00000000..bd560b7b --- /dev/null +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/FunctionsGenerator.cs @@ -0,0 +1,169 @@ +using System.Collections.Generic; +using System.Linq; +using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; + +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Generation; + +internal sealed class FunctionsGenerator : GeneratorBase +{ + private const string SuppressUnmanagedCodeSecurityAttribute = "[SuppressUnmanagedCodeSecurity]"; + private const string UnmanagedFunctionPointerAttribute = "[UnmanagedFunctionPointer(CallingConvention.Cdecl)]"; + public FunctionsGenerator(string path, GenerationContext context) : base(path, context) => IsTypeGenerationOn = true; + + public bool IsFacadeGenerationOn { get; set; } + public bool IsVectorsGenerationOn { get; set; } + public bool IsStaticallyLinkedGenerationOn { get; set; } + public bool IsDynamicallyLinkedGenerationOn { get; set; } + public bool IsDynamicallyLoadedGenerationOn { get; set; } + + public static void GenerateFacade(string path, GenerationContext context) + { + using var g = new FunctionsGenerator(path, context); + g.IsFacadeGenerationOn = true; + g.Generate(); + } + + public static void GenerateVectors(string path, GenerationContext context) + { + using var g = new FunctionsGenerator(path, context); + g.IsVectorsGenerationOn = true; + g.Generate(); + } + + + public static void GenerateStaticallyLinked(string path, GenerationContext context) + { + using var g = new FunctionsGenerator(path, context); + g.IsStaticallyLinkedGenerationOn = true; + g.Generate(); + } + + public static void GenerateDynamicallyLinked(string path, GenerationContext context) + { + using var g = new FunctionsGenerator(path, context); + g.IsDynamicallyLinkedGenerationOn = true; + g.Generate(); + } + + public static void GenerateDynamicallyLoaded(string path, GenerationContext context) + { + using var g = new FunctionsGenerator(path, context); + g.IsDynamicallyLoadedGenerationOn = true; + g.Generate(); + } + + public override IEnumerable Usings() + { + yield return "System"; + yield return "System.Runtime.InteropServices"; + if (!Context.IsLegacyGenerationOn && (IsStaticallyLinkedGenerationOn || IsDynamicallyLinkedGenerationOn || IsDynamicallyLoadedGenerationOn)) + yield return "FFmpeg.AutoGen.Abstractions"; + } + + protected override void GenerateDefinitions(ExportFunctionDefinition[] functions) + { + if (IsDynamicallyLoadedGenerationOn) + { + WriteLine("public static bool ThrowErrorIfFunctionNotFound;"); + WriteLine("public static IFunctionResolver FunctionResolver;"); + WriteLine(); + } + + base.GenerateDefinitions(functions); + + if (IsStaticallyLinkedGenerationOn || IsDynamicallyLinkedGenerationOn || IsDynamicallyLoadedGenerationOn) + { + WriteLine("public unsafe static void Initialize()"); + + using (BeginBlock()) + if (IsDynamicallyLoadedGenerationOn) + { + WriteLine("if (FunctionResolver == null) FunctionResolver = FunctionResolverFactory.Create();"); + WriteLine(); + functions.ToList().ForEach(GenerateDynamicallyLoaded); + } + else + functions.ToList().ForEach(f => WriteLine($"vectors.{f.Name} = {f.Name};")); + } + } + + protected override void GenerateDefinition(ExportFunctionDefinition function) + { + if (IsFacadeGenerationOn) GenerateFacadeFunction(function); + if (IsVectorsGenerationOn) GenerateVector(function); + if (IsStaticallyLinkedGenerationOn) GenerateDllImport(function, "__Internal"); + if (IsDynamicallyLinkedGenerationOn) GenerateDllImport(function, $"{function.LibraryName}-{function.LibraryVersion}"); + } + + public void GenerateFacadeFunction(ExportFunctionDefinition function) + { + var parameterNames = ParametersHelper.GetParameterNames(function.Parameters); + var parameters = ParametersHelper.GetParameters(function.Parameters, Context.IsLegacyGenerationOn, false); + + this.WriteSummary(function); + function.Parameters.ToList().ForEach(p => this.WriteParam(p, p.Name)); + this.WriteReturnComment(function); + this.WriteObsoletion(function); + WriteLine($"public static {function.ReturnType.Name} {function.Name}({parameters}) => vectors.{function.Name}({parameterNames});"); + WriteLine(); + } + + public void GenerateVector(ExportFunctionDefinition function) + { + GenerateDelegateType(function); + var functionDelegateName = GetFunctionDelegateName(function); + WriteLine($"public static {functionDelegateName} {function.Name};"); // todo => throw new NotSupportedException();"); + WriteLine(); + } + + private void GenerateDllImport(ExportFunctionDefinition function, string libraryName) + { + this.WriteSummary(function); + function.Parameters.ToList().ForEach(x => this.WriteParam(x, x.Name)); + this.WriteReturnComment(function); + + this.WriteObsoletion(function); + if (Context.SuppressUnmanagedCodeSecurity) WriteLine(SuppressUnmanagedCodeSecurityAttribute); + + WriteLine($"[DllImport(\"{libraryName}\", CallingConvention = CallingConvention.Cdecl)]"); + function.ReturnType.Attributes.ToList().ForEach(WriteLine); + + var parameters = ParametersHelper.GetParameters(function.Parameters, Context.IsLegacyGenerationOn); + WriteLine($"public static extern {function.ReturnType.Name} {function.Name}({parameters});"); + WriteLine(); + } + + private void GenerateDynamicallyLoaded(ExportFunctionDefinition function) + { + var delegateParameters = ParametersHelper.GetParameters(function.Parameters, Context.IsLegacyGenerationOn, false); + + var functionFieldName = $"vectors.{function.Name}"; + WriteLine($"{functionFieldName} = ({delegateParameters}) =>"); + + using (BeginBlock(true)) + { + var functionDelegateName = GetFunctionDelegateName(function); + var getDelegate = $"FunctionResolver.GetFunctionDelegate(\"{function.LibraryName}\", \"{function.Name}\", ThrowErrorIfFunctionNotFound)"; + WriteLine($"{functionFieldName} = {getDelegate} ?? delegate {{ throw new NotSupportedException(); }};"); + var returnCommand = function.ReturnType.Name == "void" ? string.Empty : "return "; + var parameterNames = ParametersHelper.GetParameterNames(function.Parameters); + WriteLine($"{returnCommand}{functionFieldName}({parameterNames});"); + } + + WriteLine(";"); + WriteLine(); + } + + + private void GenerateDelegateType(ExportFunctionDefinition function) + { + var functionDelegateName = GetFunctionDelegateName(function); + if (Context.SuppressUnmanagedCodeSecurity) WriteLine(SuppressUnmanagedCodeSecurityAttribute); + WriteLine(UnmanagedFunctionPointerAttribute); + function.ReturnType.Attributes.ToList().ForEach(WriteLine); + var parameters = ParametersHelper.GetParameters(function.Parameters, Context.IsLegacyGenerationOn); + WriteLine($"public delegate {function.ReturnType.Name} {functionDelegateName}({parameters});"); + } + + private static string GetFunctionDelegateName(ExportFunctionDefinition function) => $"{function.Name}_delegate"; +} diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/GenerationContext.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/GenerationContext.cs new file mode 100644 index 00000000..bd76f80a --- /dev/null +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/GenerationContext.cs @@ -0,0 +1,18 @@ +using System; +using System.Collections.Generic; +using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; + +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Generation; + +internal sealed record GenerationContext +{ + public string Namespace { get; init; } = string.Empty; + public string TypeName { get; init; } = string.Empty; + public bool SuppressUnmanagedCodeSecurity { get; init; } + public bool IsLegacyGenerationOn { get; init; } + public Dictionary LibraryVersionMap { get; init; } = new(); + public IDefinition[] Definitions { get; init; } = Array.Empty(); + public Dictionary ExistingInlineFunctionMap { get; init; } = new(); + public string SolutionDir { get; init; } = string.Empty; + public string OutputDir { get; init; } = string.Empty; +} diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/GeneratorBase.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/GeneratorBase.cs new file mode 100644 index 00000000..300510b7 --- /dev/null +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/GeneratorBase.cs @@ -0,0 +1,105 @@ +using System; +using System.CodeDom.Compiler; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; + +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Generation; + +internal abstract class GeneratorBase : GeneratorBase where TDefinition : IDefinition +{ + protected GeneratorBase(string path, GenerationContext context) : base(path, context) + { + } + + protected override void GenerateBody() => GenerateDefinitions(Query(Context.Definitions.OfType()).ToArray()); + + protected virtual IEnumerable Query(IEnumerable definitions) => definitions.OrderBy(d => d.Name); + + protected virtual void GenerateDefinitions(TDefinition[] definitions) => definitions.ToList().ForEach(GenerateDefinition); + + protected abstract void GenerateDefinition(TDefinition definition); +} + +internal abstract class GeneratorBase : IDisposable +{ + private readonly IndentedTextWriter _indentedTextWriter; + private readonly StreamWriter _streamWriter; + + protected GeneratorBase(string path, GenerationContext context) + { + Context = context; + var fullPath = Path.Combine(Context.OutputDir, path); + var outputDir = Path.GetDirectoryName(fullPath); + if (!Directory.Exists(outputDir)) Directory.CreateDirectory(outputDir); + _streamWriter = File.CreateText(fullPath); + _indentedTextWriter = new IndentedTextWriter(_streamWriter); + } + + protected GenerationContext Context { get; } + + protected bool IsTypeGenerationOn { get; set; } + + + void IDisposable.Dispose() + { + _streamWriter?.Dispose(); + _indentedTextWriter?.Dispose(); + } + + public virtual IEnumerable Usings() => Array.Empty(); + + public virtual void Generate() + { + var usings = Usings().ToList(); + usings.ForEach(ns => WriteLine($"using {ns};")); + if (usings.Count > 0) WriteLine(); + + WriteLine($"namespace {Context.Namespace};"); + WriteLine(); + + if (IsTypeGenerationOn) + { + WriteLine($"public static unsafe partial class {Context.TypeName}"); + + using (BeginBlock()) GenerateBody(); + } + else + GenerateBody(); + } + + protected abstract void GenerateBody(); + + protected internal void Write(string value) => _indentedTextWriter.Write(value); + + protected internal void WriteLine() => _indentedTextWriter.WriteLine(); + + protected internal void WriteLine(string line) => _indentedTextWriter.WriteLine(line); + + protected void WriteLineWithoutIntent(string line) => _indentedTextWriter.WriteLineNoTabs(line); + + protected IDisposable BeginBlock(bool inline = false) + { + WriteLine("{"); + _indentedTextWriter.Indent++; + return new Disposable(() => + { + _indentedTextWriter.Indent--; + + if (inline) + Write("}"); + else + WriteLine("}"); + }); + } + + private sealed class Disposable : IDisposable + { + private readonly Action _action; + + public Disposable(Action action) => _action = action; + + public void Dispose() => _action(); + } +} diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/InlineFunctionsGenerator.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/InlineFunctionsGenerator.cs new file mode 100644 index 00000000..52b354f4 --- /dev/null +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/InlineFunctionsGenerator.cs @@ -0,0 +1,47 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; + +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Generation; + +internal sealed class InlineFunctionsGenerator : GeneratorBase +{ + public InlineFunctionsGenerator(string path, GenerationContext context) : base(path, context) => IsTypeGenerationOn = true; + + public static void Generate(string path, GenerationContext context) + { + using var g = new InlineFunctionsGenerator(path, context); + g.Generate(); + } + + public override IEnumerable Usings() + { + yield return "System"; + } + + protected override IEnumerable Query(IEnumerable functions) => base.Query(functions).Select(RewriteFunctionBody); + + protected override void GenerateDefinition(InlineFunctionDefinition function) + { + function.ReturnType.Attributes.ToList().ForEach(WriteLine); + var parameters = ParametersHelper.GetParameters(function.Parameters, Context.IsLegacyGenerationOn, false); + + this.WriteSummary(function); + function.Parameters.ToList().ForEach(p => this.WriteParam(p, p.Name)); + this.WriteReturnComment(function); + + this.WriteObsoletion(function); + WriteLine($"public static {function.ReturnType.Name} {function.Name}({parameters})"); + + var lines = function.Body.Split(new[] { '\n', '\r' }, StringSplitOptions.RemoveEmptyEntries).ToList(); + lines.ForEach(WriteLineWithoutIntent); + WriteLine($"// original body hash: {function.OriginalBodyHash}"); + WriteLine(); + } + + private InlineFunctionDefinition RewriteFunctionBody(InlineFunctionDefinition function) => + Context.ExistingInlineFunctionMap.TryGetValue(function.Name, out var existing) && function.OriginalBodyHash == existing.OriginalBodyHash + ? function with { Body = existing.Body } + : function; +} diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/LibrariesGenerator.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/LibrariesGenerator.cs new file mode 100644 index 00000000..495e8172 --- /dev/null +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/LibrariesGenerator.cs @@ -0,0 +1,31 @@ +using System.Collections.Generic; +using System.Linq; + +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Generation; + +internal sealed class LibrariesGenerator : GeneratorBase +{ + private LibrariesGenerator(string path, GenerationContext context) : base(path, context) => IsTypeGenerationOn = true; + + public static void Generate(string path, GenerationContext options) + { + using var g = new LibrariesGenerator(path, options); + g.Generate(); + } + + public override IEnumerable Usings() + { + yield return "System.Collections.Generic"; + } + + protected override void GenerateBody() + { + WriteLine("public static Dictionary LibraryVersionMap = new Dictionary"); + + using (BeginBlock(true)) + foreach (var pair in Context.LibraryVersionMap.OrderBy(x => x.Key)) + WriteLine($"{{\"{pair.Key}\", {pair.Value}}},"); + + WriteLine(";"); + } +} diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/MacrosGenerator.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/MacrosGenerator.cs new file mode 100644 index 00000000..ad2da293 --- /dev/null +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/MacrosGenerator.cs @@ -0,0 +1,26 @@ +using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; + +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Generation; + +internal sealed class MacrosGenerator : GeneratorBase +{ + public MacrosGenerator(string path, GenerationContext context) : base(path, context) => IsTypeGenerationOn = true; + + public static void Generate(string path, GenerationContext context) + { + using var g = new MacrosGenerator(path, context); + g.Generate(); + } + + protected override void GenerateDefinition(MacroDefinition @enum) + { + if (@enum.IsValid) + { + this.WriteSummary(@enum); + var constOrStatic = @enum.IsConst ? "const" : "static readonly"; + WriteLine($"public {constOrStatic} {@enum.TypeName} {@enum.Name} = {@enum.Expression};"); + } + else + WriteLine($"// public static {@enum.Name} = {@enum.Expression};"); + } +} diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/ParametersHelper.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/ParametersHelper.cs new file mode 100644 index 00000000..3827c3f4 --- /dev/null +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/ParametersHelper.cs @@ -0,0 +1,38 @@ +using System.Linq; +using System.Text; +using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; + +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Generation; + +internal static class ParametersHelper +{ + public static string GetTypeName(TypeDefinition type, bool useLegacy) => useLegacy ? type.LegacyName ?? type.Name : type.Name; + + public static string GetParameters(FunctionParameter[] parameters, bool useLegacy, bool withAttributes = true) + { + return string.Join(", ", parameters.Select(GetParameter)); + + string GetParameter(FunctionParameter parameter) + { + var sb = new StringBuilder(); + if (withAttributes && parameter.Type.Attributes.Length > 0) sb.Append($"{string.Join("", parameter.Type.Attributes)} "); + if (parameter.IsConstant) sb.Append("in "); + if (parameter.ByReference) sb.Append("ref "); + sb.Append($"{GetTypeName(parameter.Type, useLegacy)} @{parameter.Name}"); + return sb.ToString(); + } + } + + + public static string GetParameterNames(FunctionParameter[] parameters) + { + return string.Join(", ", + parameters.Select(x => + { + var sb = new StringBuilder(); + if (x.ByReference) sb.Append("ref "); + sb.Append($"@{x.Name}"); + return sb.ToString(); + })); + } +} diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/StructuresGenerator.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/StructuresGenerator.cs new file mode 100644 index 00000000..2e079af3 --- /dev/null +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generation/StructuresGenerator.cs @@ -0,0 +1,53 @@ +using System.Collections.Generic; +using System.Linq; +using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; + +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Generation; + +internal sealed class StructuresGenerator : GeneratorBase +{ + public StructuresGenerator(string path, GenerationContext context) : base(path, context) + { + } + + public static void Generate(string path, GenerationContext context) + { + using var g = new StructuresGenerator(path, context); + g.Generate(); + } + + public override IEnumerable Usings() + { + yield return "System"; + yield return "System.Runtime.InteropServices"; + } + + protected override IEnumerable Query(IEnumerable arrays) + => arrays.OrderBy(s => s.IsComplete ? 0 : 1).ThenBy(s => s.Name); + + protected override void GenerateDefinition(StructureDefinition structure) + { + this.WriteSummary(structure); + if (!structure.IsComplete) WriteLine("/// This struct is incomplete."); + this.WriteObsoletion(structure); + if (structure.IsUnion) WriteLine("[StructLayout(LayoutKind.Explicit)]"); + WriteLine($"public unsafe partial struct {structure.Name}"); + + using (BeginBlock()) + foreach (var field in structure.Fields) + { + this.WriteSummary(field); + this.WriteObsoletion(field); + if (structure.IsUnion) WriteLine("[FieldOffset(0)]"); + var typeName = ParametersHelper.GetTypeName(field.FieldType, Context.IsLegacyGenerationOn); + + if (!Context.IsLegacyGenerationOn && typeName.Contains("_array")) + { + + } + WriteLine($"public {typeName} @{field.Name};"); + } + + WriteLine(); + } +} diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generator.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generator.cs deleted file mode 100644 index b896953a..00000000 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Generator.cs +++ /dev/null @@ -1,281 +0,0 @@ -using System; -using System.CodeDom.Compiler; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using CppSharp; -using CppSharp.AST; -using CppSharp.Parser; -using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; -using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processors; -using ClangParser = CppSharp.ClangParser; -using MacroDefinition = FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions.MacroDefinition; - -namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator; - -internal class Generator -{ - private readonly ASTProcessor _astProcessor; - private bool _hasParsingErrors; - - public Generator(ASTProcessor astProcessor) => _astProcessor = astProcessor; - - public string[] Defines { get; init; } = { }; - public string[] IncludeDirs { get; init; } = { }; - public FunctionExport[] Exports { get; init; } - - public string Namespace { get; init; } - public string ClassName { get; init; } - - public bool SuppressUnmanagedCodeSecurity { get; init; } - - public InlineFunctionDefinition[] ExistingInlineFunctions { get; init; } - - public void Parse(params string[] sourceFiles) - { - _hasParsingErrors = false; - var context = ParseInternal(sourceFiles); - if (_hasParsingErrors) - throw new InvalidOperationException(); - - Process(context); - } - - - public void WriteLibraries(string combine) - { - WriteInternal(combine, - (_, writer) => - { - writer.WriteLine("using System.Collections.Generic;"); - writer.WriteLine(); - - writer.WriteLine($"public unsafe static partial class {ClassName}"); - - using (writer.BeginBlock()) - { - writer.WriteLine("public static Dictionary LibraryVersionMap = new Dictionary"); - - using (writer.BeginBlock(true)) - { - var libraryVersionMap = Exports.Select(x => new { x.LibraryName, x.LibraryVersion }) - .Distinct() - .ToDictionary(x => x.LibraryName, x => x.LibraryVersion); - foreach (var pair in libraryVersionMap) - writer.WriteLine($"{{\"{pair.Key}\", {pair.Value}}},"); - } - - writer.WriteLine(";"); - } - }); - } - - public void WriteEnums(string outputFile) - { - WriteInternal(outputFile, - (units, writer) => - { - units.OfType() - .OrderBy(x => x.Name) - .ToList() - .ForEach(x => - { - writer.WriteEnumeration(x); - writer.WriteLine(); - }); - }); - } - - public void WriteDelegates(string outputFile) - { - WriteInternal(outputFile, - (units, writer) => - { - units.OfType().ToList().ForEach(x => - { - writer.WriteDelegate(x); - writer.WriteLine(); - }); - }); - } - - public void WriteMacros(string outputFile) - { - WriteInternal(outputFile, - (units, writer) => - { - writer.WriteLine($"public unsafe static partial class {ClassName}"); - using (writer.BeginBlock()) - units.OfType() - .OrderBy(x => x.Name) - .ToList() - .ForEach(writer.WriteMacro); - }); - } - - public void WriteExportFunctions(string outputFile) - { - WriteInternal(outputFile, - (units, writer) => - { - writer.WriteLine($"public unsafe static partial class {ClassName}"); - using var _ = writer.BeginBlock(); - writer.WriteLine("private const string PlatformNotSupportedMessageFormat = \"{0} is not supported on this platform.\";"); - writer.WriteLine(); - units.OfType() - .OrderBy(x => x.LibraryName) - .ThenBy(x => x.Name) - .ToList() - .ForEach(x => - { - writer.WriteFunction(x); - writer.WriteLine(); - }); - }); - } - - public void WriteInlineFunctions(string outputFile) - { - var existingInlineFunctionMap = ExistingInlineFunctions.ToDictionary(x => x.Name); - WriteInternal(outputFile, - (units, writer) => - { - writer.WriteLine($"public unsafe static partial class {ClassName}"); - using (writer.BeginBlock()) - units.OfType() - .OrderBy(x => x.Name) - .Select(RewriteFunctionBody) - .ToList() - .ForEach(x => - { - writer.WriteFunction(x); - writer.WriteLine(); - }); - }); - - InlineFunctionDefinition RewriteFunctionBody(InlineFunctionDefinition function) - { - if (existingInlineFunctionMap.TryGetValue(function.Name, out var existing) && - function.OriginalBodyHash == existing.OriginalBodyHash) - function.Body = existing.Body; - - return function; - } - } - - public void WriteArrays(string outputFile) - { - WriteInternal(outputFile, - (units, writer) => - { - writer.WriteLine("#pragma warning disable 169"); - writer.WriteLine(); - units.OfType() - .OrderBy(x => x.Size) - .ThenBy(x => x.Name) - .ToList().ForEach(x => - { - writer.WriteFixedArray(x); - writer.WriteLine(); - }); - }); - } - - public void WriteStructures(string outputFile) - { - WriteInternal(outputFile, - (units, writer) => - { - units.OfType() - .Where(x => x.IsComplete) - .ToList() - .ForEach(x => - { - writer.WriteStructure(x); - writer.WriteLine(); - }); - }); - } - - public void WriteIncompleteStructures(string outputFile) - { - WriteInternal(outputFile, - (units, writer) => - { - units.OfType() - .Where(x => !x.IsComplete) - .ToList() - .ForEach(x => - { - writer.WriteStructure(x); - writer.WriteLine(); - }); - }); - } - - private ASTContext ParseInternal(string[] sourceFiles) - { - var parserOptions = new ParserOptions - { - Verbose = true, - ASTContext = new CppSharp.Parser.AST.ASTContext(), - LanguageVersion = LanguageVersion.C99_GNU - }; - - parserOptions.SetupMSVC(VisualStudioVersion.VS2019); - - foreach (var includeDir in IncludeDirs) parserOptions.AddIncludeDirs(includeDir); - - foreach (var define in Defines) parserOptions.AddDefines(define); - var result = ClangParser.ParseSourceFiles(sourceFiles, parserOptions); - OnSourceFileParsed(sourceFiles, result); - return ClangParser.ConvertASTContext(parserOptions.ASTContext); - } - - private void OnSourceFileParsed(IEnumerable files, ParserResult result) - { - switch (result.Kind) - { - case ParserResultKind.Success: - Diagnostics.Message("Parsed '{0}'", string.Join(", ", files)); - break; - case ParserResultKind.Error: - Diagnostics.Error("Error parsing '{0}'", string.Join(", ", files)); - _hasParsingErrors = true; - break; - case ParserResultKind.FileNotFound: - Diagnostics.Error("A file from '{0}' was not found", string.Join(",", files)); - break; - default: - throw new ArgumentOutOfRangeException(); - } - - for (uint i = 0; i < result.DiagnosticsCount; ++i) - { - var diagnostics = result.GetDiagnostics(i); - - var message = - $"{diagnostics.FileName}({diagnostics.LineNumber},{diagnostics.ColumnNumber}): {diagnostics.Level.ToString().ToLower()}: {diagnostics.Message}"; - Diagnostics.Message(message); - } - } - - private void Process(ASTContext context) => - _astProcessor.Process(context.TranslationUnits.Where(x => !x.IsSystemHeader)); - - private void WriteInternal(string outputFile, Action, Writer> execute) - { - using var streamWriter = File.CreateText(outputFile); - using var textWriter = new IndentedTextWriter(streamWriter); - var writer = new Writer(textWriter) - { - SuppressUnmanagedCodeSecurity = SuppressUnmanagedCodeSecurity - }; - writer.WriteLine("using System;"); - writer.WriteLine("using System.Runtime.InteropServices;"); - if (SuppressUnmanagedCodeSecurity) writer.WriteLine("using System.Security;"); - writer.WriteLine(); - writer.WriteLine($"namespace {Namespace}"); - using (writer.BeginBlock()) execute(_astProcessor.Units, writer); - } -} diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Parser.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Parser.cs new file mode 100644 index 00000000..d292d7aa --- /dev/null +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Parser.cs @@ -0,0 +1,72 @@ +using System; +using System.Collections.Generic; +using CppSharp; +using CppSharp.AST; +using CppSharp.Parser; +using ClangParser = CppSharp.ClangParser; + +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator; + +internal sealed class Parser +{ + private bool _hasParsingErrors; + + public string[] Defines { get; init; } = Array.Empty(); + public string[] IncludeDirs { get; init; } = Array.Empty(); + + public ASTContext Parse(params string[] sourceFiles) + { + _hasParsingErrors = false; + var context = ParseInternal(sourceFiles); + if (_hasParsingErrors) + throw new InvalidOperationException(); + return context; + } + + private ASTContext ParseInternal(string[] sourceFiles) + { + var parserOptions = new ParserOptions + { + Verbose = true, + ASTContext = new CppSharp.Parser.AST.ASTContext(), + LanguageVersion = LanguageVersion.C99_GNU + }; + + parserOptions.SetupMSVC(VisualStudioVersion.VS2019); + + foreach (var includeDir in IncludeDirs) parserOptions.AddIncludeDirs(includeDir); + + foreach (var define in Defines) parserOptions.AddDefines(define); + var result = ClangParser.ParseSourceFiles(sourceFiles, parserOptions); + OnSourceFileParsed(sourceFiles, result); + return ClangParser.ConvertASTContext(parserOptions.ASTContext); + } + + private void OnSourceFileParsed(IEnumerable files, ParserResult result) + { + switch (result.Kind) + { + case ParserResultKind.Success: + Diagnostics.Message("Parsed '{0}'", string.Join(", ", files)); + break; + case ParserResultKind.Error: + Diagnostics.Error("Error parsing '{0}'", string.Join(", ", files)); + _hasParsingErrors = true; + break; + case ParserResultKind.FileNotFound: + Diagnostics.Error("A file from '{0}' was not found", string.Join(",", files)); + break; + default: + throw new ArgumentOutOfRangeException(); + } + + for (uint i = 0; i < result.DiagnosticsCount; ++i) + { + var diagnostics = result.GetDiagnostics(i); + + var message = + $"{diagnostics.FileName}({diagnostics.LineNumber},{diagnostics.ColumnNumber}): {diagnostics.Level.ToString().ToLower()}: {diagnostics.Message}"; + Diagnostics.Message(message); + } + } +} diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/ASTProcessor.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/ASTProcessor.cs new file mode 100644 index 00000000..5f150ed4 --- /dev/null +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/ASTProcessor.cs @@ -0,0 +1,58 @@ +using System.Linq; +using CppSharp.AST; +using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; +using MacroDefinition = FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions.MacroDefinition; + +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processing; + +internal sealed class ASTProcessor +{ + private readonly ProcessingContext _context; + private readonly EnumerationProcessor _enumerationProcessor; + private readonly FunctionProcessor _functionProcessor; + private readonly MacroPostProcessor _macroPostProcessor; + private readonly MacroProcessor _macroProcessor; + private readonly StructureProcessor _structureProcessor; + + public ASTProcessor(ProcessingContext context) + { + _context = context; + _functionProcessor = new FunctionProcessor(context); + _structureProcessor = new StructureProcessor(context); + _enumerationProcessor = new EnumerationProcessor(context); + _macroProcessor = new MacroProcessor(context); + _macroPostProcessor = new MacroPostProcessor(context); + + // inject cross processor dependencies + _structureProcessor.EnumerationProcessor = _enumerationProcessor; + _structureProcessor.FunctionProcessor = _functionProcessor; + _functionProcessor.StructureProcessor = _structureProcessor; + } + + public void Process(ASTContext context) + { + var units = context.TranslationUnits.Where(x => !x.IsSystemHeader); + + foreach (var translationUnit in units) + { + _macroProcessor.Process(translationUnit); + _enumerationProcessor.Process(translationUnit); + _structureProcessor.Process(translationUnit); + _functionProcessor.Process(translationUnit); + } + + // add all enums as known macros + var enums = _context.Definitions.OfType().ToArray(); + + foreach (var @enum in enums) + foreach (var item in @enum.Items) + { + var key = @enum.Name + "." + item.Name; + if (!_context.WellKnownMacros.ContainsKey(key)) _context.WellKnownMacros.Add(key, new TypeOrAlias(typeof(int))); + if (!_context.WellKnownEnumItems.ContainsKey(item.Name)) _context.WellKnownEnumItems.Add(item.Name, item.Value); + } + + var macros = _context.Definitions.OfType().ToArray(); + _macroPostProcessor.Process(macros); + } +} diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/EnumerationProcessor.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/EnumerationProcessor.cs similarity index 64% rename from FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/EnumerationProcessor.cs rename to FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/EnumerationProcessor.cs index f6fc9618..2e9db33e 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/EnumerationProcessor.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/EnumerationProcessor.cs @@ -4,13 +4,13 @@ using CppSharp.AST.Extensions; using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; -namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processors; +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processing; internal class EnumerationProcessor { - private readonly ASTProcessor _context; + private readonly ProcessingContext _context; - public EnumerationProcessor(ASTProcessor context) => _context = context; + public EnumerationProcessor(ProcessingContext context) => _context = context; public void Process(TranslationUnit translationUnit) { @@ -29,36 +29,35 @@ public void Process(TranslationUnit translationUnit) public void MakeDefinition(Enumeration enumeration, string name) { name = string.IsNullOrEmpty(enumeration.Name) ? name : enumeration.Name; - if (_context.IsKnownUnitName(name)) return; + if (_context.Definitions.Any(d => d.Name == name)) return; var definition = new EnumerationDefinition { Name = name, TypeName = TypeHelper.GetTypeName(enumeration.Type), Content = enumeration.Comment?.BriefText, - Obsoletion = ObsoletionHelper.CreateObsoletion(enumeration) + Obsoletion = ObsoletionHelper.CreateObsoletion(enumeration), + Items = enumeration.Items + .Select(x => + new EnumerationItem + { + Name = x.Name, + Value = ConvertValue(x.Value, enumeration.BuiltinType.Type).ToString(), + Content = x.Comment?.BriefText + }) + .ToArray() }; - definition.Items = enumeration.Items - .Select(x => - new EnumerationItem - { - Name = x.Name, - Value = ConvertValue(x.Value, enumeration.BuiltinType.Type).ToString(), - Content = x.Comment?.BriefText - }) - .ToArray(); - - _context.AddUnit(definition); + _context.AddDefinition(definition); } private static object ConvertValue(ulong value, PrimitiveType primitiveType) { return primitiveType switch { - PrimitiveType.Int => value > int.MaxValue ? (int) value : value, + PrimitiveType.Int => value > int.MaxValue ? (int)value : value, PrimitiveType.UInt => value, - PrimitiveType.Long => value > long.MaxValue ? (long) value : value, + PrimitiveType.Long => value > long.MaxValue ? (long)value : value, PrimitiveType.ULong => value, _ => throw new NotSupportedException() }; diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/FunctionProcessor.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/FunctionProcessor.cs similarity index 64% rename from FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/FunctionProcessor.cs rename to FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/FunctionProcessor.cs index 2966322a..d143c868 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/FunctionProcessor.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/FunctionProcessor.cs @@ -6,22 +6,27 @@ using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; using Type = CppSharp.AST.Type; -namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processors; +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processing; internal class FunctionProcessor { + private const string ReturnMarshalAsConstCharPtr = "[return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))]"; + private const string MarshalAsUTF8Macros = - " \r\n" + - " #if NETSTANDARD2_1_OR_GREATER\r\n" + - " [MarshalAs(UnmanagedType.LPUTF8Str)]\r\n" + - " #else\r\n" + - " [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))]\r\n" + - " #endif\r\n" + - " "; + " \r\n" + + " #if NETSTANDARD2_1_OR_GREATER\r\n" + + " [MarshalAs(UnmanagedType.LPUTF8Str)]\r\n" + + " #else\r\n" + + " [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))]\r\n" + + " #endif\r\n" + + " "; + - private readonly ASTProcessor _context; + private readonly ProcessingContext _context; - public FunctionProcessor(ASTProcessor context) => _context = context; + public FunctionProcessor(ProcessingContext context) => _context = context; + + public StructureProcessor StructureProcessor { get; set; } public void Process(TranslationUnit translationUnit) { @@ -32,7 +37,7 @@ public void Process(TranslationUnit translationUnit) void PopulateCommon(FunctionDefinitionBase inline) { inline.Name = functionName; - inline.ReturnType = GetReturnTypeName(function.ReturnType.Type, functionName); + inline.ReturnType = GetReturnType(function.ReturnType.Type, functionName); inline.Content = function.Comment?.BriefText; inline.ReturnComment = GetReturnComment(function); inline.Parameters = function.Parameters.Select((x, i) => GetParameter(function, x, i)).ToArray(); @@ -41,11 +46,13 @@ void PopulateCommon(FunctionDefinitionBase inline) if (function.IsInline) { - var inlineDefinition = new InlineFunctionDefinition(); + var inlineDefinition = new InlineFunctionDefinition + { + Body = function.Body, + OriginalBodyHash = GetSha256(function.Body) + }; PopulateCommon(inlineDefinition); - inlineDefinition.Body = function.Body; - inlineDefinition.OriginalBodyHash = GetSha256(function.Body); - _context.AddUnit(inlineDefinition); + _context.AddDefinition(inlineDefinition); continue; } @@ -55,11 +62,13 @@ void PopulateCommon(FunctionDefinitionBase inline) continue; } - var exportDefinition = new ExportFunctionDefinition(); + var exportDefinition = new ExportFunctionDefinition + { + LibraryName = export.LibraryName, + LibraryVersion = export.LibraryVersion + }; PopulateCommon(exportDefinition); - exportDefinition.LibraryName = export.LibraryName; - exportDefinition.LibraryVersion = export.LibraryVersion; - _context.AddUnit(exportDefinition); + _context.AddDefinition(exportDefinition); } } @@ -69,10 +78,10 @@ internal TypeDefinition GetDelegateType(FunctionType functionType, string name) { Name = $"{name}_func", FunctionName = name, - ReturnType = GetReturnTypeName(functionType.ReturnType.Type, name), + ReturnType = GetReturnType(functionType.ReturnType.Type, name), Parameters = functionType.Parameters.Select(GetParameter).ToArray() }; - _context.AddUnit(@delegate); + _context.AddDefinition(@delegate); return @delegate; } @@ -80,25 +89,33 @@ internal TypeDefinition GetDelegateType(FunctionType functionType, string name) private FunctionParameter GetParameter(Parameter parameter, int position) { var name = string.IsNullOrEmpty(parameter.Name) ? $"p{position}" : parameter.Name; + var parameterType = GetParameterType(parameter.Type, name); return new FunctionParameter { Name = name, - Type = GetParameterType(parameter.Type, name) + Type = parameterType, + IsConstant = ParameterIsConstantFixedArray(parameter), + IsIndirect = parameter.IsIndirect, + ByReference = !parameter.IsConst && (parameterType.ByReference || parameterType is FixedArrayDefinition) }; } private FunctionParameter GetParameter(Function function, Parameter parameter, int position) { var name = string.IsNullOrEmpty(parameter.Name) ? $"p{position}" : parameter.Name; + var parameterType = GetParameterType(parameter.Type, $"{function.Name}_{name}"); return new FunctionParameter { Name = name, - Type = GetParameterType(parameter.Type, $"{function.Name}_{name}"), - Content = GetParamComment(function, parameter.Name) + Type = parameterType, + Content = GetParamComment(function, parameter.Name), + IsConstant = ParameterIsConstantFixedArray(parameter), + IsIndirect = parameter.IsIndirect, + ByReference = !parameter.IsConst && (parameterType.ByReference || parameterType is FixedArrayDefinition) }; } - private TypeDefinition GetReturnTypeName(Type type, string name) + private TypeDefinition GetParameterType(Type type, string name) { if (type is PointerType pointerType && pointerType.QualifiedPointee.Qualifiers.IsConst && @@ -109,10 +126,7 @@ private TypeDefinition GetReturnTypeName(Type type, string name) PrimitiveType.Char => new TypeDefinition { Name = "string", - Attributes = new[] - { - "[return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))]" - } + Attributes = new[] { MarshalAsUTF8Macros } }, PrimitiveType.Void => new TypeDefinition { @@ -125,10 +139,10 @@ private TypeDefinition GetReturnTypeName(Type type, string name) }; } - return GetParameterType(type, name); + return GetNoneBuiltinParameterType(type, name); } - private TypeDefinition GetParameterType(Type type, string name) + private TypeDefinition GetReturnType(Type type, string name) { if (type is PointerType pointerType && pointerType.QualifiedPointee.Qualifiers.IsConst && @@ -139,7 +153,7 @@ private TypeDefinition GetParameterType(Type type, string name) PrimitiveType.Char => new TypeDefinition { Name = "string", - Attributes = new[] { MarshalAsUTF8Macros } + Attributes = new[] { ReturnMarshalAsConstCharPtr } }, PrimitiveType.Void => new TypeDefinition { @@ -152,15 +166,20 @@ private TypeDefinition GetParameterType(Type type, string name) }; } + return GetNoneBuiltinParameterType(type, name); + } + + private TypeDefinition GetNoneBuiltinParameterType(Type type, string name) + { // edge case when type is array of pointers to none builtin type (type[]* -> type**) if (type is ArrayType arrayType && arrayType.SizeType == ArrayType.ArraySize.Incomplete && arrayType.Type is PointerType arrayPointerType && - !(arrayPointerType.Pointee is BuiltinType || arrayPointerType.Pointee is TypedefType typedefType && - typedefType.Declaration.Type is BuiltinType)) + !(arrayPointerType.Pointee is BuiltinType || (arrayPointerType.Pointee is TypedefType typedefType && + typedefType.Declaration.Type is BuiltinType))) return new TypeDefinition { Name = $"{TypeHelper.GetTypeName(arrayPointerType)}*" }; - return _context.StructureProcessor.GetTypeDefinition(type, name); + return StructureProcessor.GetTypeDefinition(type, name); } private static string GetParamComment(Function function, string parameterName) @@ -179,6 +198,8 @@ private string GetReturnComment(Function function) return GetCommentString(comment); } + private static bool ParameterIsConstantFixedArray(Parameter parameter) => parameter.IsConst && parameter.Type is ArrayType { SizeType: ArrayType.ArraySize.Constant }; + private static string GetCommentString(BlockCommandComment comment) { return comment == null diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/MacroPostProcessor.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/MacroPostProcessor.cs similarity index 89% rename from FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/MacroPostProcessor.cs rename to FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/MacroPostProcessor.cs index 012154f5..78f23770 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/MacroPostProcessor.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/MacroPostProcessor.cs @@ -4,21 +4,20 @@ using System.Globalization; using System.Linq; using System.Text.RegularExpressions; -using FFmpeg.AutoGen.ClangMacroParser; using FFmpeg.AutoGen.ClangMacroParser.Expressions; using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; -namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processors; +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processing; internal class MacroPostProcessor { private static readonly Regex EolEscapeRegex = new(@"\\\s*[\r\n|\r|\n]\s*", RegexOptions.Compiled | RegexOptions.Multiline); - private readonly ASTProcessor _astProcessor; + private readonly ProcessingContext _context; private Dictionary _macroExpressionMap; - public MacroPostProcessor(ASTProcessor astProcessor) => _astProcessor = astProcessor; + public MacroPostProcessor(ProcessingContext context) => _context = context; public void Process(IReadOnlyList macros) { @@ -27,7 +26,7 @@ public void Process(IReadOnlyList macros) foreach (var macro in macros) try { - _macroExpressionMap.Add(macro.Name, Parser.Parse(macro.Expression)); + _macroExpressionMap.Add(macro.Name, ClangMacroParser.Parser.Parse(macro.Expression)); } catch (NotSupportedException) { @@ -46,7 +45,7 @@ private void Process(MacroDefinition macro) macro.Expression = CleanUp(macro.Expression); if (!_macroExpressionMap.TryGetValue(macro.Name, out var expression) || expression == null) return; - + var typeOrAlias = DeduceType(expression); if (typeOrAlias == null) return; @@ -56,7 +55,7 @@ private void Process(MacroDefinition macro) macro.Content = $"{macro.Name} = {CleanUp(macro.Expression)}"; macro.Expression = Serialize(expression); macro.IsConst = IsConst(expression); - macro.IsValid = !typeOrAlias.IsAlias || _astProcessor.TypeAliases.ContainsKey(typeOrAlias.Alias); + macro.IsValid = typeOrAlias.IsType || _context.TypeAliases.ContainsKey(typeOrAlias.Alias); } private static string CleanUp(string expression) @@ -126,7 +125,7 @@ private IExpression Rewrite(IExpression expression) } private IExpression Rewrite(VariableExpression expression) => - _astProcessor.WellKnownEnumItems.TryGetValue(expression.Name, out var fullName) + _context.WellKnownEnumItems.TryGetValue(expression.Name, out var fullName) ? new VariableExpression(fullName) : expression; @@ -148,15 +147,15 @@ private string Serialize(IExpression expression) internal TypeOrAlias GetWellKnownMacroType(string macroName) { - if (_astProcessor.WellKnownMacros.TryGetValue(macroName, out var alias)) + if (_context.WellKnownMacros.TryGetValue(macroName, out var alias)) return alias; - if (_astProcessor.WellKnownEnumItems.TryGetValue(macroName, out _)) + if (_context.WellKnownEnumItems.TryGetValue(macroName, out _)) return new TypeOrAlias(typeof(int)); return new TypeOrAlias(macroName); } internal TypeOrAlias GetTypeAlias(string typeName) => - _astProcessor.TypeAliases.TryGetValue(typeName, out var alias) ? alias : typeName; + _context.TypeAliases.TryGetValue(typeName, out var alias) ? alias : typeName; private string Serialize(object value) { diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/MacroProcessor.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/MacroProcessor.cs similarity index 68% rename from FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/MacroProcessor.cs rename to FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/MacroProcessor.cs index 1458ede1..0501d068 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/MacroProcessor.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/MacroProcessor.cs @@ -1,13 +1,13 @@ using System.Linq; using CppSharp.AST; -namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processors; +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processing; internal class MacroProcessor { - private readonly ASTProcessor _context; + private readonly ProcessingContext _context; - public MacroProcessor(ASTProcessor context) => _context = context; + public MacroProcessor(ProcessingContext context) => _context = context; public void Process(TranslationUnit translationUnit) { @@ -19,7 +19,7 @@ public void Process(TranslationUnit translationUnit) Name = macro.Name, Expression = macro.Expression }; - _context.AddUnit(macroDefinition); + _context.AddDefinition(macroDefinition); } } } diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/ObsoletionHelper.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/ObsoletionHelper.cs similarity index 93% rename from FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/ObsoletionHelper.cs rename to FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/ObsoletionHelper.cs index 09c3d39b..2043a8b2 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/ObsoletionHelper.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/ObsoletionHelper.cs @@ -2,7 +2,7 @@ using CppSharp.AST; using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; -namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processors; +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processing; internal static class ObsoletionHelper { diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/ProcessingContext.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/ProcessingContext.cs new file mode 100644 index 00000000..35cd3390 --- /dev/null +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/ProcessingContext.cs @@ -0,0 +1,26 @@ +using System.Collections.Generic; +using System.Linq; +using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; + +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processing; + +internal sealed record ProcessingContext +{ + public HashSet IgnoreUnitNames { get; init; } = new(); + public Dictionary TypeAliases { get; init; } = new(); + public Dictionary WellKnownMacros { get; init; } = new(); + public Dictionary WellKnownEnumItems { get; init; } = new(); + public Dictionary FunctionExportMap { get; init; } = new(); + public List Definitions { get; init; } = new(); + + public void AddDefinition(IDefinition definition) + { + if (IgnoreUnitNames.Contains(definition.Name)) return; + var existing = Definitions.FirstOrDefault(x => x.Name == definition.Name); + + if (existing != null) + Definitions.Remove(existing); + + Definitions.Add(definition); + } +} diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/StructureProcessor.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/StructureProcessor.cs similarity index 73% rename from FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/StructureProcessor.cs rename to FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/StructureProcessor.cs index e66f4a17..ddc8df2c 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/StructureProcessor.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/StructureProcessor.cs @@ -6,13 +6,16 @@ using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; using Type = CppSharp.AST.Type; -namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processors; +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processing; internal class StructureProcessor { - private readonly ASTProcessor _context; + private readonly ProcessingContext _context; - public StructureProcessor(ASTProcessor context) => _context = context; + public StructureProcessor(ProcessingContext context) => _context = context; + + public EnumerationProcessor EnumerationProcessor { get; set; } + public FunctionProcessor FunctionProcessor { get; set; } public void Process(TranslationUnit translationUnit) { @@ -33,7 +36,7 @@ private void MakeDefinition(Class @class, string name) { name = string.IsNullOrEmpty(@class.Name) ? name : @class.Name; - var definition = _context.Units.OfType().FirstOrDefault(x => x.Name == name); + var definition = _context.Definitions.OfType().FirstOrDefault(x => x.Name == name); if (definition == null) { @@ -43,7 +46,7 @@ private void MakeDefinition(Class @class, string name) IsUnion = @class.IsUnion, Obsoletion = ObsoletionHelper.CreateObsoletion(@class) }; - _context.AddUnit(definition); + _context.AddDefinition(definition); } if (@class.Comment != null) @@ -136,7 +139,7 @@ private TypeDefinition GetTypeDefinition(PointerType pointerType, string name) pointee = typedefType.Declaration.Type; if (pointee is FunctionType functionType) - return _context.FunctionProcessor.GetDelegateType(functionType, name); + return FunctionProcessor.GetDelegateType(functionType, name); var pointerTypeDefinition = GetTypeDefinition(pointee, name); return new TypeDefinition { Name = $"{pointerTypeDefinition.Name}*" }; @@ -152,7 +155,7 @@ private TypeDefinition GetFieldTypeForNestedDeclaration(Declaration declaration, MakeDefinition(@class, typeName); return new TypeDefinition { Name = typeName }; case Enumeration @enum: - _context.EnumerationProcessor.MakeDefinition(@enum, typeName); + EnumerationProcessor.MakeDefinition(@enum, typeName); return new TypeDefinition { Name = typeName }; default: throw new NotSupportedException(); @@ -165,35 +168,51 @@ private TypeDefinition GetFieldTypeForFixedArray(ArrayType arrayType) var elementType = arrayType.Type; var elementTypeDefinition = GetTypeDefinition(elementType); - var fixedSize = (int) arrayType.Size; + var fixedSize = (int)arrayType.Size; - var name = $"{elementTypeDefinition.Name}_array{fixedSize}"; + var name = $"{elementTypeDefinition.Name}{fixedSize}"; + var legacyName = $"{elementTypeDefinition.Name}_array{fixedSize}"; + + var isPointer = elementType.IsPointer(); - if (elementType.IsPointer()) - name = $"{TypeHelper.GetTypeName(elementType.GetPointee())}_ptrArray{fixedSize}"; + if (isPointer) + { + name = $"{TypeHelper.GetTypeName(elementType.GetPointee())}_ptr{fixedSize}"; + legacyName = $"{TypeHelper.GetTypeName(elementType.GetPointee())}_ptrArray{fixedSize}"; + } if (elementType is ArrayType elementArrayType) { + var typeName = TypeHelper.GetTypeName(elementArrayType.Type); + if (elementArrayType.SizeType == ArrayType.ArraySize.Constant) { - fixedSize /= (int) elementArrayType.Size; - name = $"{TypeHelper.GetTypeName(elementArrayType.Type)}_array{fixedSize}x{elementArrayType.Size}"; + fixedSize /= (int)elementArrayType.Size; + name = $"{typeName}{fixedSize}x{elementArrayType.Size}"; + legacyName = $"{typeName}_array{fixedSize}x{elementArrayType.Size}"; } else - name = $"{TypeHelper.GetTypeName(elementArrayType.Type)}_arrayOfArray{fixedSize}"; + { + name = $"{typeName}_ref{fixedSize}"; + legacyName = $"{typeName}_arrayOfArray{fixedSize}"; + throw new NotImplementedException("this branch is never concurring with current include files and needs investigation if it happens in future"); + } } - if (_context.IsKnownUnitName(name)) - return new TypeDefinition { Name = name, ByReference = !arrayType.QualifiedType.Qualifiers.IsConst }; + var knownDefinition = _context.Definitions.FirstOrDefault(d => d.Name == name); + if (knownDefinition != null) + return (FixedArrayDefinition)knownDefinition; var fixedArray = new FixedArrayDefinition { Name = name, - Size = fixedSize, + LegacyName = legacyName, + Length = fixedSize, ElementType = elementTypeDefinition, - IsPrimitive = elementType.IsPrimitiveType() + IsPrimitive = elementType.IsPrimitiveType(), + IsPointer = isPointer }; - _context.AddUnit(fixedArray); + _context.AddDefinition(fixedArray); return fixedArray; } } diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/TypeHelper.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/TypeHelper.cs similarity index 97% rename from FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/TypeHelper.cs rename to FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/TypeHelper.cs index 1ddce52d..e76ef3e7 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/TypeHelper.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/TypeHelper.cs @@ -2,7 +2,7 @@ using CppSharp.AST; using Type = CppSharp.AST.Type; -namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processors; +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processing; internal static class TypeHelper { diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/TypeOrAlias.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/TypeOrAlias.cs similarity index 87% rename from FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/TypeOrAlias.cs rename to FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/TypeOrAlias.cs index e4d033b1..ac90d1a0 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/TypeOrAlias.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processing/TypeOrAlias.cs @@ -1,8 +1,8 @@ using System; -namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processors; +namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processing; -internal class TypeOrAlias +internal record TypeOrAlias { public TypeOrAlias(Type type) { @@ -12,7 +12,7 @@ public TypeOrAlias(Type type) public TypeOrAlias(string alias) => Alias = alias; - public bool IsType => Alias != null; + public bool IsType => Type != null; public bool IsAlias => Alias != null; public Type Type { get; } public string Alias { get; } @@ -34,6 +34,7 @@ private int GetPrecedence() public override string ToString() { + if (IsAlias) return Alias; if (Type == typeof(bool)) return "bool"; if (Type == typeof(double)) return "double"; if (Type == typeof(float)) return "float"; @@ -43,7 +44,7 @@ public override string ToString() if (Type == typeof(ulong)) return "ulong"; if (Type == typeof(int)) return "int"; if (Type == typeof(uint)) return "uint"; - return Alias; + throw new NotSupportedException(); } public static implicit operator TypeOrAlias(Type type) => new(type); diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/ASTProcessor.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/ASTProcessor.cs deleted file mode 100644 index 2437ac0b..00000000 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Processors/ASTProcessor.cs +++ /dev/null @@ -1,78 +0,0 @@ -using System.Collections.Generic; -using System.Linq; -using CppSharp.AST; -using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; -using MacroDefinition = FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions.MacroDefinition; - -namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processors; - -internal class ASTProcessor -{ - private readonly List _units; - - public ASTProcessor() - { - _units = new List(); - IgnoreUnitNames = new HashSet(); - TypeAliases = new Dictionary(); - WellKnownMacros = new Dictionary(); - WellKnownEnumItems = new Dictionary(); - FunctionProcessor = new FunctionProcessor(this); - StructureProcessor = new StructureProcessor(this); - EnumerationProcessor = new EnumerationProcessor(this); - MacroProcessor = new MacroProcessor(this); - MacroPostProcessor = new MacroPostProcessor(this); - } - - public HashSet IgnoreUnitNames { get; } - public Dictionary TypeAliases { get; } - public Dictionary WellKnownMacros { get; } - public Dictionary WellKnownEnumItems { get; } - public MacroProcessor MacroProcessor { get; } - public EnumerationProcessor EnumerationProcessor { get; } - public StructureProcessor StructureProcessor { get; } - public FunctionProcessor FunctionProcessor { get; } - public MacroPostProcessor MacroPostProcessor { get; } - - public Dictionary FunctionExportMap { get; init; } - public IReadOnlyList Units => _units; - - public bool IsKnownUnitName(string name) - { - return _units.Any(x => x.Name == name); - } - - public void AddUnit(IDefinition definition) - { - if (IgnoreUnitNames.Contains(definition.Name)) return; - var existing = _units.FirstOrDefault(x => x.Name == definition.Name); - if (existing != null) - _units.Remove(existing); - _units.Add(definition); - } - - public void Process(IEnumerable units) - { - foreach (var translationUnit in units) - { - MacroProcessor.Process(translationUnit); - EnumerationProcessor.Process(translationUnit); - StructureProcessor.Process(translationUnit); - FunctionProcessor.Process(translationUnit); - } - - var enums = _units.OfType().ToArray(); - - // add all enums as known macros - foreach (var @enum in enums) - foreach (var item in @enum.Items) - { - var key = @enum.Name + "." + item.Name; - if (!WellKnownMacros.ContainsKey(key)) WellKnownMacros.Add(key, new TypeOrAlias(typeof(int))); - if (!WellKnownEnumItems.ContainsKey(item.Name)) WellKnownEnumItems.Add(item.Name, item.Value); - } - - var macros = Units.OfType().ToArray(); - MacroPostProcessor.Process(macros); - } -} diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Program.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Program.cs index e02b24cf..0bc6b529 100644 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Program.cs +++ b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Program.cs @@ -1,8 +1,10 @@ using System; -using System.Diagnostics; +using System.Collections.Generic; using System.IO; using System.Linq; -using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processors; +using CppSharp.AST; +using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Generation; +using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Processing; namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator; @@ -14,94 +16,187 @@ internal static void Main(string[] args) if (options.Verbose) { - Console.WriteLine($"Working dir: {Environment.CurrentDirectory}"); - Console.WriteLine($"Output dir: {options.OutputDir}"); - Console.WriteLine($"FFmpeg headers dir: {options.FFmpegIncludesDir}"); - Console.WriteLine($"FFmpeg bin dir: {options.FFmpegBinDir}"); + Console.WriteLine($"Working path: {Environment.CurrentDirectory}"); + Console.WriteLine($"Solution path: {options.SolutionDir}"); + Console.WriteLine($"FFmpeg headers path: {options.FFmpegIncludesDir}"); + Console.WriteLine($"FFmpeg binaries path: {options.FFmpegBinDir}"); Console.WriteLine($"Namespace name: {options.Namespace}"); - Console.WriteLine($"Class name: {options.ClassName}"); + Console.WriteLine($"Type name: {options.TypeName}"); } - var existingInlineFunctions = - ExistingInlineFunctionsHelper.LoadInlineFunctions(Path.Combine(options.OutputDir, - "FFmpeg.functions.inline.g.cs")); + // parse headers + var astContexts = Parse(options.FFmpegIncludesDir).ToList(); - var exports = FunctionExportHelper.LoadFunctionExports(options.FFmpegBinDir).ToArray(); - - var astProcessor = new ASTProcessor + // process + var functionExports = FunctionExportHelper.LoadFunctionExports(options.FFmpegBinDir).ToArray(); + var processingContext = new ProcessingContext { - FunctionExportMap = exports - .GroupBy(x => x.Name).Select(x => x.First()) // Eliminate duplicated names + IgnoreUnitNames = new HashSet { "__NSConstantString_tag" }, + TypeAliases = { { "int64_t", typeof(long) } }, + WellKnownMacros = + { + { "FFERRTAG", typeof(int) }, + { "MKTAG", typeof(int) }, + { "UINT64_C", typeof(ulong) }, + { "AV_VERSION_INT", typeof(int) }, + { "AV_VERSION", typeof(string) }, + { "_DHUGE_EXP", typeof(int) }, + { "_DMAX", typeof(long) }, + { "_FMAX", typeof(long) }, + { "_LMAX", typeof(long) } + }, + FunctionExportMap = functionExports + .GroupBy(x => x.Name) + .Select(x => x.First()) // Eliminate duplicated names .ToDictionary(x => x.Name) }; - astProcessor.IgnoreUnitNames.Add("__NSConstantString_tag"); - astProcessor.TypeAliases.Add("int64_t", typeof(long)); - astProcessor.WellKnownMacros.Add("FFERRTAG", typeof(int)); - astProcessor.WellKnownMacros.Add("MKTAG", typeof(int)); - astProcessor.WellKnownMacros.Add("UINT64_C", typeof(ulong)); - astProcessor.WellKnownMacros.Add("AV_VERSION_INT", typeof(int)); - astProcessor.WellKnownMacros.Add("AV_VERSION", typeof(string)); - astProcessor.WellKnownMacros.Add("_DHUGE_EXP", typeof(int)); - astProcessor.WellKnownMacros.Add("_DMAX", typeof(long)); - astProcessor.WellKnownMacros.Add("_FMAX", typeof(long)); - astProcessor.WellKnownMacros.Add("_LMAX", typeof(long)); - - var defines = new[] { "__STDC_CONSTANT_MACROS" }; - - var g = new Generator(astProcessor) + var processor = new ASTProcessor(processingContext); + astContexts.ForEach(processor.Process); + + // generate files + var inlineFunctions = ExistingInlineFunctionsHelper.LoadInlineFunctions(Path.Combine(options.SolutionDir, "FFmpeg.AutoGen/generated/FFmpeg.functions.inline.g.cs")); + var generationContext = new GenerationContext { - IncludeDirs = new[] { options.FFmpegIncludesDir }, - Defines = defines, - Exports = exports, Namespace = options.Namespace, - ClassName = options.ClassName, - ExistingInlineFunctions = existingInlineFunctions, - SuppressUnmanagedCodeSecurity = options.SuppressUnmanagedCodeSecurity + TypeName = options.TypeName, + SuppressUnmanagedCodeSecurity = options.SuppressUnmanagedCodeSecurity, + LibraryVersionMap = functionExports + .Select(x => new { x.LibraryName, x.LibraryVersion }) + .Distinct() + .ToDictionary(x => x.LibraryName, x => x.LibraryVersion), + Definitions = processingContext.Definitions.ToArray(), + ExistingInlineFunctionMap = inlineFunctions.ToDictionary(f => f.Name), + SolutionDir = options.SolutionDir + }; + + GenerateLegacyFFmpegAutoGen(generationContext); + GenerateAbstractions(generationContext); + GenerateStaticallyLinkedBindings(generationContext); + GenerateDynamicallyLinkedBindings(generationContext); + GenerateDynamicallyLoadedBindings(generationContext); + } + + private static IEnumerable Parse(string includesDir) + { + var p = new Parser + { + IncludeDirs = new[] { includesDir }, + Defines = new[] { "__STDC_CONSTANT_MACROS" } + }; + + // libavutil + yield return p.Parse("libavutil/avutil.h"); + yield return p.Parse("libavutil/audio_fifo.h"); + yield return p.Parse("libavutil/channel_layout.h"); + yield return p.Parse("libavutil/cpu.h"); + yield return p.Parse("libavutil/file.h"); + yield return p.Parse("libavutil/frame.h"); + yield return p.Parse("libavutil/opt.h"); + yield return p.Parse("libavutil/imgutils.h"); + yield return p.Parse("libavutil/time.h"); + yield return p.Parse("libavutil/timecode.h"); + yield return p.Parse("libavutil/tree.h"); + yield return p.Parse("libavutil/hwcontext.h"); + yield return p.Parse("libavutil/hwcontext_dxva2.h"); + yield return p.Parse("libavutil/hwcontext_d3d11va.h"); + yield return p.Parse("libavutil/hdr_dynamic_metadata.h"); + yield return p.Parse("libavutil/mastering_display_metadata.h"); + + // libswresample + yield return p.Parse("libswresample/swresample.h"); + + // libpostproc + yield return p.Parse("libpostproc/postprocess.h"); + + // libswscale + yield return p.Parse("libswscale/swscale.h"); + + // libavcodec + yield return p.Parse("libavcodec/avcodec.h"); + yield return p.Parse("libavcodec/bsf.h"); + yield return p.Parse("libavcodec/dxva2.h"); + yield return p.Parse("libavcodec/d3d11va.h"); + + // libavformat + yield return p.Parse("libavformat/avformat.h"); + + // libavfilter + yield return p.Parse("libavfilter/avfilter.h"); + yield return p.Parse("libavfilter/buffersrc.h"); + yield return p.Parse("libavfilter/buffersink.h"); + + // libavdevice + yield return p.Parse("libavdevice/avdevice.h"); + } + + private static void GenerateLegacyFFmpegAutoGen(GenerationContext baseContext) + { + var context = baseContext with + { + IsLegacyGenerationOn = true, + OutputDir = Path.Combine(baseContext.SolutionDir, @"FFmpeg.AutoGen\generated") + }; + + LibrariesGenerator.Generate($"{context.TypeName}.libraries.g.cs", context); + MacrosGenerator.Generate($"{context.TypeName}.macros.g.cs", context); + EnumsGenerator.Generate("Enums.g.cs", context); + DelegatesGenerator.Generate("Delegates.g.cs", context); + FixedArraysGenerator.Generate("Arrays.g.cs", context); + StructuresGenerator.Generate("Structs.g.cs", context); + FunctionsGenerator.GenerateFacade($"{context.TypeName}.functions.facade.g.cs", context); + InlineFunctionsGenerator.Generate($"{context.TypeName}.functions.inline.g.cs", context); + FunctionsGenerator.GenerateVectors("vectors.g.cs", context with { TypeName = "vectors" }); + FunctionsGenerator.GenerateDynamicallyLoaded("DynamicallyLoadedBindings.g.cs", context with { TypeName = "DynamicallyLoadedBindings" }); + } + + private static void GenerateAbstractions(GenerationContext baseContext) + { + var context = baseContext with + { + Namespace = $"{baseContext.Namespace}.Abstractions", + OutputDir = Path.Combine(baseContext.SolutionDir, @"FFmpeg.AutoGen.Abstractions\generated") + }; + + MacrosGenerator.Generate($"{context.TypeName}.macros.g.cs", context); + EnumsGenerator.Generate("Enums.g.cs", context); + DelegatesGenerator.Generate("Delegates.g.cs", context); + FixedArraysGenerator.Generate("Arrays.g.cs", context); + StructuresGenerator.Generate("Structs.g.cs", context); + FunctionsGenerator.GenerateFacade($"{context.TypeName}.functions.facade.g.cs", context); + FunctionsGenerator.GenerateVectors("vectors.g.cs", context with { TypeName = "vectors" }); + InlineFunctionsGenerator.Generate($"{context.TypeName}.functions.inline.g.cs", context); + } + + private static void GenerateStaticallyLinkedBindings(GenerationContext baseContext) + { + var context = baseContext with + { + Namespace = $"{baseContext.Namespace}.Bindings.StaticallyLinked", TypeName = "StaticallyLinkedBindings", + OutputDir = Path.Combine(baseContext.SolutionDir, @"FFmpeg.AutoGen.Bindings.StaticallyLinked\generated") + }; + FunctionsGenerator.GenerateStaticallyLinked("StaticallyLinkedBindings.g.cs", context); + } + + private static void GenerateDynamicallyLinkedBindings(GenerationContext baseContext) + { + var context = baseContext with + { + Namespace = $"{baseContext.Namespace}.Bindings.DynamicallyLinked", TypeName = "DynamicallyLinkedBindings", + OutputDir = Path.Combine(baseContext.SolutionDir, @"FFmpeg.AutoGen.Bindings.DynamicallyLinked\generated") + }; + + FunctionsGenerator.GenerateDynamicallyLinked("DynamicallyLinkedBindings.g.cs", context); + } + + private static void GenerateDynamicallyLoadedBindings(GenerationContext baseContext) + { + var context = baseContext with + { + Namespace = $"{baseContext.Namespace}.Bindings.DynamicallyLoaded", TypeName = "DynamicallyLoadedBindings", + OutputDir = Path.Combine(baseContext.SolutionDir, @"FFmpeg.AutoGen.Bindings.DynamicallyLoaded\generated") }; - g.Parse("libavutil/avutil.h"); - g.Parse("libavutil/audio_fifo.h"); - g.Parse("libavutil/channel_layout.h"); - g.Parse("libavutil/cpu.h"); - g.Parse("libavutil/file.h"); - g.Parse("libavutil/frame.h"); - g.Parse("libavutil/opt.h"); - g.Parse("libavutil/imgutils.h"); - g.Parse("libavutil/time.h"); - g.Parse("libavutil/timecode.h"); - g.Parse("libavutil/tree.h"); - g.Parse("libavutil/hwcontext.h"); - g.Parse("libavutil/hwcontext_dxva2.h"); - g.Parse("libavutil/hwcontext_d3d11va.h"); - g.Parse("libavutil/hdr_dynamic_metadata.h"); - g.Parse("libavutil/mastering_display_metadata.h"); - - g.Parse("libswresample/swresample.h"); - - g.Parse("libpostproc/postprocess.h"); - - g.Parse("libswscale/swscale.h"); - - g.Parse("libavcodec/avcodec.h"); - g.Parse("libavcodec/dxva2.h"); - g.Parse("libavcodec/d3d11va.h"); - - g.Parse("libavformat/avformat.h"); - - g.Parse("libavfilter/avfilter.h"); - g.Parse("libavfilter/buffersrc.h"); - g.Parse("libavfilter/buffersink.h"); - - g.Parse("libavdevice/avdevice.h"); - - g.WriteLibraries(Path.Combine(options.OutputDir, "FFmpeg.libraries.g.cs")); - g.WriteMacros(Path.Combine(options.OutputDir, "FFmpeg.macros.g.cs")); - g.WriteEnums(Path.Combine(options.OutputDir, "FFmpeg.enums.g.cs")); - g.WriteDelegates(Path.Combine(options.OutputDir, "FFmpeg.delegates.g.cs")); - g.WriteArrays(Path.Combine(options.OutputDir, "FFmpeg.arrays.g.cs")); - g.WriteStructures(Path.Combine(options.OutputDir, "FFmpeg.structs.g.cs")); - g.WriteIncompleteStructures(Path.Combine(options.OutputDir, "FFmpeg.structs.incomplete.g.cs")); - g.WriteExportFunctions(Path.Combine(options.OutputDir, "FFmpeg.functions.export.g.cs")); - g.WriteInlineFunctions(Path.Combine(options.OutputDir, "FFmpeg.functions.inline.g.cs")); + LibrariesGenerator.Generate("DynamicallyLoadedBindings.libraries.g.cs", context); + FunctionsGenerator.GenerateDynamicallyLoaded("DynamicallyLoadedBindings.g.cs", context); } } diff --git a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Writer.cs b/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Writer.cs deleted file mode 100644 index 654ab266..00000000 --- a/FFmpeg.AutoGen.CppSharpUnsafeGenerator/Writer.cs +++ /dev/null @@ -1,311 +0,0 @@ -using System; -using System.CodeDom.Compiler; -using System.Linq; -using System.Security; -using System.Text; -using FFmpeg.AutoGen.CppSharpUnsafeGenerator.Definitions; - -namespace FFmpeg.AutoGen.CppSharpUnsafeGenerator; - -internal class Writer -{ - private readonly IndentedTextWriter _writer; - - public Writer(IndentedTextWriter writer) => _writer = writer; - - public bool SuppressUnmanagedCodeSecurity { get; init; } - - public void WriteMacro(MacroDefinition macro) - { - if (macro.IsValid) - { - WriteSummary(macro); - var constOrStatic = macro.IsConst ? "const" : "static readonly"; - WriteLine($"public {constOrStatic} {macro.TypeName} {macro.Name} = {macro.Expression};"); - } - else - WriteLine($"// public static {macro.Name} = {macro.Expression};"); - } - - public void WriteEnumeration(EnumerationDefinition enumeration) - { - WriteSummary(enumeration); - WriteObsoletion(enumeration); - WriteLine($"public enum {enumeration.Name} : {enumeration.TypeName}"); - - using (BeginBlock()) - foreach (var item in enumeration.Items) - { - WriteSummary(item); - WriteLine($"@{item.Name} = {item.Value},"); - } - } - - public void WriteStructure(StructureDefinition structure) - { - WriteSummary(structure); - if (!structure.IsComplete) WriteLine("/// This struct is incomplete."); - WriteObsoletion(structure); - if (structure.IsUnion) WriteLine("[StructLayout(LayoutKind.Explicit)]"); - WriteLine($"public unsafe partial struct {structure.Name}"); - - using (BeginBlock()) - foreach (var field in structure.Fields) - { - WriteSummary(field); - WriteObsoletion(field); - if (structure.IsUnion) WriteLine("[FieldOffset(0)]"); - WriteLine($"public {field.FieldType.Name} @{field.Name};"); - } - } - - public void WriteFixedArray(FixedArrayDefinition array) - { - WriteLine($"public unsafe struct {array.Name}"); - using var _ = BeginBlock(); - var prefix = "_"; - var size = array.Size; - var elementType = array.ElementType.Name; - - WriteLine($"public static readonly int Size = {size};"); - - if (array.IsPrimitive) WritePrimitiveFixedArray(array.Name, elementType, size, prefix); - else WriteComplexFixedArray(elementType, size, prefix); - - WriteLine($"public static implicit operator {elementType}[]({array.Name} @struct) => @struct.ToArray();"); - } - - public void WriteFunction(ExportFunctionDefinition function) - { - function.ReturnType.Attributes.ToList().ForEach(WriteLine); - var parameterNames = GetParameterNames(function.Parameters); - var parameters = GetParameters(function.Parameters); - var functionPtrName = function.Name + "_fptr"; - var functionDelegateName = function.Name + "_delegate"; - var returnCommand = function.ReturnType.Name == "void" ? string.Empty : "return "; - - if (SuppressUnmanagedCodeSecurity) WriteLine("[SuppressUnmanagedCodeSecurity]"); - WriteLine("[UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)]"); - WriteLine($"private delegate {function.ReturnType.Name} {functionDelegateName}({parameters});"); - Write($"private static {functionDelegateName} {functionPtrName} = "); - WriteDefaultFunctionDelegateExpression(function, parameterNames, functionDelegateName, functionPtrName, returnCommand); - WriteLine(";"); - - WriteSummary(function); - function.Parameters.ToList().ForEach(x => WriteParam(x, x.Name)); - WriteReturnComment(function.ReturnComment); - - WriteObsoletion(function); - WriteLine($"public static {function.ReturnType.Name} {function.Name}({parameters})"); - using (BeginBlock()) WriteLine($"{returnCommand}{functionPtrName}({parameterNames});"); - WriteLine(); - } - - public void WriteFunction(InlineFunctionDefinition function) - { - function.ReturnType.Attributes.ToList().ForEach(WriteLine); - var parameters = GetParameters(function.Parameters); - - WriteSummary(function); - function.Parameters.ToList().ForEach(x => WriteParam(x, x.Name)); - WriteReturnComment(function.ReturnComment); - - WriteObsoletion(function); - WriteLine($"public static {function.ReturnType.Name} {function.Name}({parameters})"); - - var lines = function.Body.Split(new[] { '\n', '\r' }, StringSplitOptions.RemoveEmptyEntries).ToList(); - lines.ForEach(WriteLineWithoutIntent); - WriteLine($"// original body hash: {function.OriginalBodyHash}"); - WriteLine(); - } - - - private void WriteDefaultFunctionDelegateExpression(ExportFunctionDefinition function, - string parameterNames, string functionDelegateName, string functionPtrName, string returnCommand) - { - var delegateParameters = GetParameters(function.Parameters, false); - - WriteLine($"({delegateParameters}) =>"); - - using (BeginBlock(true)) - { - var getOrLoadLibrary = $"GetOrLoadLibrary(\"{function.LibraryName}\")"; - var getDelegate = $"GetFunctionDelegate<{functionDelegateName}>({getOrLoadLibrary}, \"{function.Name}\")"; - - WriteLine($"{functionPtrName} = {getDelegate};"); - WriteLine($"if ({functionPtrName} == null)"); - - using (BeginBlock()) - { - Write($"{functionPtrName} = "); - WriteNotSupportedFunctionDelegateExpression(function); - WriteLine(";"); - } - - WriteLine($"{returnCommand}{functionPtrName}({parameterNames});"); - } - } - - private void WriteNotSupportedFunctionDelegateExpression(ExportFunctionDefinition function) - { - WriteLine("delegate "); - using var _ = BeginBlock(true); - WriteLine($"throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, \"{function.Name}\"));"); - } - - public void WriteDelegate(DelegateDefinition @delegate) - { - WriteSummary(@delegate); - @delegate.Parameters.ToList().ForEach(x => WriteParam(x, x.Name)); - - var parameters = GetParameters(@delegate.Parameters); - WriteLine("[UnmanagedFunctionPointer(CallingConvention.Cdecl)]"); - WriteLine($"public unsafe delegate {@delegate.ReturnType.Name} {@delegate.FunctionName} ({parameters});"); - - WriteLine($"public unsafe struct {@delegate.Name}"); - using var _ = BeginBlock(); - WriteLine("public IntPtr Pointer;"); - Write($"public static implicit operator {@delegate.Name}({@delegate.FunctionName} func) => "); - Write($"new {@delegate.Name} {{ Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }};"); - WriteLine(); - } - - public void WriteLine() - { - _writer.WriteLine(); - } - - public void WriteLine(string line) - { - _writer.WriteLine(line); - } - - public void WriteLineWithoutIntent(string line) - { - _writer.WriteLineNoTabs(line); - } - - public IDisposable BeginBlock(bool inline = false) - { - WriteLine("{"); - _writer.Indent++; - return new End(() => - { - _writer.Indent--; - - if (inline) - _writer.Write("}"); - else - _writer.WriteLine("}"); - }); - } - - private void WritePrimitiveFixedArray(string arrayName, string elementType, int size, string prefix) - { - WriteLine($"fixed {elementType} {prefix}[{size}];"); - WriteLine(); - - var @fixed = $"fixed ({arrayName}* p = &this)"; - - WriteLine($"public {elementType} this[uint i]"); - - using (BeginBlock()) - { - WriteLine($"get {{ if (i >= Size) throw new ArgumentOutOfRangeException(); {@fixed} {{ return p->{prefix}[i]; }} }}"); - WriteLine($"set {{ if (i >= Size) throw new ArgumentOutOfRangeException(); {@fixed} {{ p->{prefix}[i] = value; }} }}"); - } - - WriteLine($"public {elementType}[] ToArray()"); - using (BeginBlock()) - WriteLine($"{@fixed} {{ var a = new {elementType}[Size]; for (uint i = 0; i < Size; i++) a[i] = p->{prefix}[i]; return a; }}"); - - WriteLine($"public void UpdateFrom({elementType}[] array)"); - using (BeginBlock()) - WriteLine($"{@fixed} {{ uint i = 0; foreach(var value in array) {{ p->{prefix}[i++] = value; if (i >= Size) return; }} }}"); - } - - private void WriteComplexFixedArray(string elementType, int size, string prefix) - { - WriteLine(string.Join(" ", Enumerable.Range(0, size).Select(i => $"{elementType} {prefix}{i};"))); - WriteLine(); - - var @fixed = $"fixed ({elementType}* p0 = &{prefix}0)"; - - WriteLine($"public {elementType} this[uint i]"); - - using (BeginBlock()) - { - WriteLine($"get {{ if (i >= Size) throw new ArgumentOutOfRangeException(); {@fixed} {{ return *(p0 + i); }} }}"); - WriteLine($"set {{ if (i >= Size) throw new ArgumentOutOfRangeException(); {@fixed} {{ *(p0 + i) = value; }} }}"); - } - - WriteLine($"public {elementType}[] ToArray()"); - using (BeginBlock()) - WriteLine($"{@fixed} {{ var a = new {elementType}[Size]; for (uint i = 0; i < Size; i++) a[i] = *(p0 + i); return a; }}"); - - WriteLine($"public void UpdateFrom({elementType}[] array)"); - using (BeginBlock()) - WriteLine($"{@fixed} {{ uint i = 0; foreach(var value in array) {{ *(p0 + i++) = value; if (i >= Size) return; }} }}"); - } - - private static string GetParameters(FunctionParameter[] parameters, bool withAttributes = true) - { - return string.Join(", ", - parameters.Select(x => - { - var sb = new StringBuilder(); - if (withAttributes && x.Type.Attributes.Any()) sb.Append($"{string.Join("", x.Type.Attributes)} "); - if (x.Type.ByReference) sb.Append("ref "); - sb.Append($"{x.Type.Name} @{x.Name}"); - return sb.ToString(); - })); - } - - private static string GetParameterNames(FunctionParameter[] parameters) - { - return string.Join(", ", - parameters.Select(x => - { - var sb = new StringBuilder(); - if (x.Type.ByReference) sb.Append("ref "); - sb.Append($"@{x.Name}"); - return sb.ToString(); - })); - } - - private void WriteSummary(ICanGenerateXmlDoc value) - { - if (!string.IsNullOrWhiteSpace(value.Content)) WriteLine($"/// {SecurityElement.Escape(value.Content.Trim())}"); - } - - private void WriteParam(ICanGenerateXmlDoc value, string name) - { - if (!string.IsNullOrWhiteSpace(value.Content)) WriteLine($"/// {SecurityElement.Escape(value.Content.Trim())}"); - } - - private void WriteReturnComment(string content) - { - if (!string.IsNullOrWhiteSpace(content)) WriteLine($"/// {SecurityElement.Escape(content.Trim())}"); - } - - private void WriteObsoletion(IObsoletionAware obsoletionAware) - { - var obsoletion = obsoletionAware.Obsoletion; - if (obsoletion.IsObsolete) WriteLine($"[Obsolete(\"{EscapeQuotes(obsoletion.Message)}\")]"); - } - - private void Write(string value) - { - _writer.Write(value); - } - private static string EscapeQuotes(string s) => s.Replace("\"", "\\\""); - - private class End : IDisposable - { - private readonly Action _action; - - public End(Action action) => _action = action; - - public void Dispose() => _action(); - } -} diff --git a/FFmpeg.AutoGen.Example/FFmpeg.AutoGen.Example.csproj b/FFmpeg.AutoGen.Example/FFmpeg.AutoGen.Example.csproj index 624d3b0c..8b61bf26 100644 --- a/FFmpeg.AutoGen.Example/FFmpeg.AutoGen.Example.csproj +++ b/FFmpeg.AutoGen.Example/FFmpeg.AutoGen.Example.csproj @@ -16,11 +16,11 @@ - + - + diff --git a/FFmpeg.AutoGen.Example/FFmpegBinariesHelper.cs b/FFmpeg.AutoGen.Example/FFmpegBinariesHelper.cs index 8a90127d..50e8a60a 100644 --- a/FFmpeg.AutoGen.Example/FFmpegBinariesHelper.cs +++ b/FFmpeg.AutoGen.Example/FFmpegBinariesHelper.cs @@ -1,36 +1,34 @@ using System; using System.IO; using System.Runtime.InteropServices; +using FFmpeg.AutoGen.Bindings.DynamicallyLoaded; -namespace FFmpeg.AutoGen.Example +namespace FFmpeg.AutoGen.Example; + +public class FFmpegBinariesHelper { - public class FFmpegBinariesHelper + internal static void RegisterFFmpegBinaries() { - internal static void RegisterFFmpegBinaries() + if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { - if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + var current = Environment.CurrentDirectory; + var probe = Path.Combine("FFmpeg", "bin", Environment.Is64BitProcess ? "x64" : "x86"); + + while (current != null) { - var current = Environment.CurrentDirectory; - var probe = Path.Combine("FFmpeg", "bin", Environment.Is64BitProcess ? "x64" : "x86"); + var ffmpegBinaryPath = Path.Combine(current, probe); - while (current != null) + if (Directory.Exists(ffmpegBinaryPath)) { - var ffmpegBinaryPath = Path.Combine(current, probe); - - if (Directory.Exists(ffmpegBinaryPath)) - { - Console.WriteLine($"FFmpeg binaries found in: {ffmpegBinaryPath}"); - ffmpeg.RootPath = ffmpegBinaryPath; - return; - } - - current = Directory.GetParent(current)?.FullName; + Console.WriteLine($"FFmpeg binaries found in: {ffmpegBinaryPath}"); + DynamicallyLoadedBindings.LibrariesPath = ffmpegBinaryPath; + return; } + + current = Directory.GetParent(current)?.FullName; } - else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) - ffmpeg.RootPath = "/lib/x86_64-linux-gnu/"; - else - throw new NotSupportedException(); // fell free add support for platform of your choose } + else + throw new NotSupportedException(); // fell free add support for platform of your choose } } diff --git a/FFmpeg.AutoGen.Example/FFmpegHelper.cs b/FFmpeg.AutoGen.Example/FFmpegHelper.cs index 04c78e7f..2cf08128 100644 --- a/FFmpeg.AutoGen.Example/FFmpegHelper.cs +++ b/FFmpeg.AutoGen.Example/FFmpegHelper.cs @@ -1,23 +1,23 @@ using System; using System.Runtime.InteropServices; +using FFmpeg.AutoGen.Abstractions; -namespace FFmpeg.AutoGen.Example +namespace FFmpeg.AutoGen.Example; + +internal static class FFmpegHelper { - internal static class FFmpegHelper + public static unsafe string av_strerror(int error) { - public static unsafe string av_strerror(int error) - { - var bufferSize = 1024; - var buffer = stackalloc byte[bufferSize]; - ffmpeg.av_strerror(error, buffer, (ulong) bufferSize); - var message = Marshal.PtrToStringAnsi((IntPtr) buffer); - return message; - } + var bufferSize = 1024; + var buffer = stackalloc byte[bufferSize]; + ffmpeg.av_strerror(error, buffer, (ulong)bufferSize); + var message = Marshal.PtrToStringAnsi((IntPtr)buffer); + return message; + } - public static int ThrowExceptionIfError(this int error) - { - if (error < 0) throw new ApplicationException(av_strerror(error)); - return error; - } + public static int ThrowExceptionIfError(this int error) + { + if (error < 0) throw new ApplicationException(av_strerror(error)); + return error; } } diff --git a/FFmpeg.AutoGen.Example/H264VideoStreamEncoder.cs b/FFmpeg.AutoGen.Example/H264VideoStreamEncoder.cs index ee2bd130..ddffdb43 100644 --- a/FFmpeg.AutoGen.Example/H264VideoStreamEncoder.cs +++ b/FFmpeg.AutoGen.Example/H264VideoStreamEncoder.cs @@ -1,191 +1,191 @@ using System; using System.Drawing; using System.IO; +using FFmpeg.AutoGen.Abstractions; -namespace FFmpeg.AutoGen.Example +namespace FFmpeg.AutoGen.Example; + +public sealed unsafe class H264VideoStreamEncoder : IDisposable { - public sealed unsafe class H264VideoStreamEncoder : IDisposable + private readonly Size _frameSize; + private readonly int _linesizeU; + private readonly int _linesizeV; + private readonly int _linesizeY; + private readonly AVCodec* _pCodec; + private readonly AVCodecContext* _pCodecContext; + private readonly Stream _stream; + private readonly int _uSize; + private readonly int _ySize; + + public H264VideoStreamEncoder(Stream stream, int fps, Size frameSize) { - private readonly Size _frameSize; - private readonly int _linesizeU; - private readonly int _linesizeV; - private readonly int _linesizeY; - private readonly AVCodec* _pCodec; - private readonly AVCodecContext* _pCodecContext; - private readonly Stream _stream; - private readonly int _uSize; - private readonly int _ySize; - - public H264VideoStreamEncoder(Stream stream, int fps, Size frameSize) - { - _stream = stream; - _frameSize = frameSize; + _stream = stream; + _frameSize = frameSize; - var codecId = AVCodecID.AV_CODEC_ID_H264; - _pCodec = ffmpeg.avcodec_find_encoder(codecId); - if (_pCodec == null) throw new InvalidOperationException("Codec not found."); + var codecId = AVCodecID.AV_CODEC_ID_H264; + _pCodec = ffmpeg.avcodec_find_encoder(codecId); + if (_pCodec == null) throw new InvalidOperationException("Codec not found."); - _pCodecContext = ffmpeg.avcodec_alloc_context3(_pCodec); - _pCodecContext->width = frameSize.Width; - _pCodecContext->height = frameSize.Height; - _pCodecContext->time_base = new AVRational { num = 1, den = fps }; - _pCodecContext->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P; - ffmpeg.av_opt_set(_pCodecContext->priv_data, "preset", "veryslow", 0); + _pCodecContext = ffmpeg.avcodec_alloc_context3(_pCodec); + _pCodecContext->width = frameSize.Width; + _pCodecContext->height = frameSize.Height; + _pCodecContext->time_base = new AVRational { num = 1, den = fps }; + _pCodecContext->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P; + ffmpeg.av_opt_set(_pCodecContext->priv_data, "preset", "veryslow", 0); - ffmpeg.avcodec_open2(_pCodecContext, _pCodec, null).ThrowExceptionIfError(); + ffmpeg.avcodec_open2(_pCodecContext, _pCodec, null).ThrowExceptionIfError(); - _linesizeY = frameSize.Width; - _linesizeU = frameSize.Width / 2; - _linesizeV = frameSize.Width / 2; + _linesizeY = frameSize.Width; + _linesizeU = frameSize.Width / 2; + _linesizeV = frameSize.Width / 2; - _ySize = _linesizeY * frameSize.Height; - _uSize = _linesizeU * frameSize.Height / 2; - } + _ySize = _linesizeY * frameSize.Height; + _uSize = _linesizeU * frameSize.Height / 2; + } - public void Dispose() - { - ffmpeg.avcodec_close(_pCodecContext); - ffmpeg.av_free(_pCodecContext); - } + public void Dispose() + { + ffmpeg.avcodec_close(_pCodecContext); + ffmpeg.av_free(_pCodecContext); + } - public void Encode(AVFrame frame) + public void Encode(AVFrame frame) + { + if (frame.format != (int)_pCodecContext->pix_fmt) + throw new ArgumentException("Invalid pixel format.", nameof(frame)); + if (frame.width != _frameSize.Width) throw new ArgumentException("Invalid width.", nameof(frame)); + if (frame.height != _frameSize.Height) throw new ArgumentException("Invalid height.", nameof(frame)); + if (frame.linesize[0] < _linesizeY) throw new ArgumentException("Invalid Y linesize.", nameof(frame)); + if (frame.linesize[1] < _linesizeU) throw new ArgumentException("Invalid U linesize.", nameof(frame)); + if (frame.linesize[2] < _linesizeV) throw new ArgumentException("Invalid V linesize.", nameof(frame)); + if (frame.data[1] - frame.data[0] < _ySize) + throw new ArgumentException("Invalid Y data size.", nameof(frame)); + if (frame.data[2] - frame.data[1] < _uSize) + throw new ArgumentException("Invalid U data size.", nameof(frame)); + + var pPacket = ffmpeg.av_packet_alloc(); + + try { - if (frame.format != (int) _pCodecContext->pix_fmt) - throw new ArgumentException("Invalid pixel format.", nameof(frame)); - if (frame.width != _frameSize.Width) throw new ArgumentException("Invalid width.", nameof(frame)); - if (frame.height != _frameSize.Height) throw new ArgumentException("Invalid height.", nameof(frame)); - if (frame.linesize[0] < _linesizeY) throw new ArgumentException("Invalid Y linesize.", nameof(frame)); - if (frame.linesize[1] < _linesizeU) throw new ArgumentException("Invalid U linesize.", nameof(frame)); - if (frame.linesize[2] < _linesizeV) throw new ArgumentException("Invalid V linesize.", nameof(frame)); - if (frame.data[1] - frame.data[0] < _ySize) - throw new ArgumentException("Invalid Y data size.", nameof(frame)); - if (frame.data[2] - frame.data[1] < _uSize) - throw new ArgumentException("Invalid U data size.", nameof(frame)); - - var pPacket = ffmpeg.av_packet_alloc(); - - try + // Basic encoding loop explained: + // https://ffmpeg.org/doxygen/4.1/group__lavc__encdec.html + + // Give the encoder a frame to encode + ffmpeg.avcodec_send_frame(_pCodecContext, &frame).ThrowExceptionIfError(); + + // From https://ffmpeg.org/doxygen/4.1/group__lavc__encdec.html: + // For encoding, call avcodec_receive_packet(). On success, it will return an AVPacket with a compressed frame. + // Repeat this call until it returns AVERROR(EAGAIN) or an error. + // The AVERROR(EAGAIN) return value means that new input data is required to return new output. + // In this case, continue with sending input. + // For each input frame/packet, the codec will typically return 1 output frame/packet, but it can also be 0 or more than 1. + bool hasFinishedWithThisFrame; + + do { - // Basic encoding loop explained: - // https://ffmpeg.org/doxygen/4.1/group__lavc__encdec.html + // Clear/wipe the receiving packet + // (not sure if this is needed, since docs for avcoded_receive_packet say that it will call that first-thing + ffmpeg.av_packet_unref(pPacket); - // Give the encoder a frame to encode - ffmpeg.avcodec_send_frame(_pCodecContext, &frame).ThrowExceptionIfError(); + // Receive back a packet; there might be 0, 1 or many packets to receive for an input frame. + var response = ffmpeg.avcodec_receive_packet(_pCodecContext, pPacket); - // From https://ffmpeg.org/doxygen/4.1/group__lavc__encdec.html: - // For encoding, call avcodec_receive_packet(). On success, it will return an AVPacket with a compressed frame. - // Repeat this call until it returns AVERROR(EAGAIN) or an error. - // The AVERROR(EAGAIN) return value means that new input data is required to return new output. - // In this case, continue with sending input. - // For each input frame/packet, the codec will typically return 1 output frame/packet, but it can also be 0 or more than 1. - bool hasFinishedWithThisFrame; + bool isPacketValid; - do + if (response == 0) { - // Clear/wipe the receiving packet - // (not sure if this is needed, since docs for avcoded_receive_packet say that it will call that first-thing - ffmpeg.av_packet_unref(pPacket); - - // Receive back a packet; there might be 0, 1 or many packets to receive for an input frame. - var response = ffmpeg.avcodec_receive_packet(_pCodecContext, pPacket); - - bool isPacketValid; - - if (response == 0) - { - // 0 on success; as in, successfully retrieved a packet, and expecting us to retrieve another one. - isPacketValid = true; - hasFinishedWithThisFrame = false; - } - else if (response == ffmpeg.AVERROR(ffmpeg.EAGAIN)) - { - // EAGAIN: there's no more output is available in the current state - user must try to send more input - isPacketValid = false; - hasFinishedWithThisFrame = true; - } - else if (response == ffmpeg.AVERROR(ffmpeg.AVERROR_EOF)) - { - // EOF: the encoder has been fully flushed, and there will be no more output packets - isPacketValid = false; - hasFinishedWithThisFrame = true; - } - else - { - // AVERROR(EINVAL): codec not opened, or it is a decoder other errors: legitimate encoding errors - // , otherwise negative error code: - throw new InvalidOperationException($"error from avcodec_receive_packet: {response}"); - } - - if (isPacketValid) - { - using var packetStream = new UnmanagedMemoryStream(pPacket->data, pPacket->size); - packetStream.CopyTo(_stream); - } - } while (!hasFinishedWithThisFrame); - } - finally - { - ffmpeg.av_packet_free(&pPacket); - } + // 0 on success; as in, successfully retrieved a packet, and expecting us to retrieve another one. + isPacketValid = true; + hasFinishedWithThisFrame = false; + } + else if (response == ffmpeg.AVERROR(ffmpeg.EAGAIN)) + { + // EAGAIN: there's no more output is available in the current state - user must try to send more input + isPacketValid = false; + hasFinishedWithThisFrame = true; + } + else if (response == ffmpeg.AVERROR(ffmpeg.AVERROR_EOF)) + { + // EOF: the encoder has been fully flushed, and there will be no more output packets + isPacketValid = false; + hasFinishedWithThisFrame = true; + } + else + { + // AVERROR(EINVAL): codec not opened, or it is a decoder other errors: legitimate encoding errors + // , otherwise negative error code: + throw new InvalidOperationException($"error from avcodec_receive_packet: {response}"); + } + + if (isPacketValid) + { + using var packetStream = new UnmanagedMemoryStream(pPacket->data, pPacket->size); + packetStream.CopyTo(_stream); + } + } while (!hasFinishedWithThisFrame); + } + finally + { + ffmpeg.av_packet_free(&pPacket); } + } - public void Drain() + public void Drain() + { + // From https://ffmpeg.org/doxygen/4.1/group__lavc__encdec.html: + // End of stream situations. These require "flushing" (aka draining) the codec, as the codec might buffer multiple frames or packets internally for performance or out of necessity (consider B-frames). This is handled as follows: + // Instead of valid input, send NULL to the avcodec_send_packet() (decoding) or avcodec_send_frame() (encoding) functions. This will enter draining mode. + // Call avcodec_receive_frame() (decoding) or avcodec_receive_packet() (encoding) in a loop until AVERROR_EOF is returned. The functions will not return AVERROR(EAGAIN), unless you forgot to enter draining mode. + + var pPacket = ffmpeg.av_packet_alloc(); + + try { - // From https://ffmpeg.org/doxygen/4.1/group__lavc__encdec.html: - // End of stream situations. These require "flushing" (aka draining) the codec, as the codec might buffer multiple frames or packets internally for performance or out of necessity (consider B-frames). This is handled as follows: - // Instead of valid input, send NULL to the avcodec_send_packet() (decoding) or avcodec_send_frame() (encoding) functions. This will enter draining mode. - // Call avcodec_receive_frame() (decoding) or avcodec_receive_packet() (encoding) in a loop until AVERROR_EOF is returned. The functions will not return AVERROR(EAGAIN), unless you forgot to enter draining mode. + // Send a null frame to enter draining mode + ffmpeg.avcodec_send_frame(_pCodecContext, null).ThrowExceptionIfError(); - var pPacket = ffmpeg.av_packet_alloc(); + bool hasFinishedDraining; - try + do { - // Send a null frame to enter draining mode - ffmpeg.avcodec_send_frame(_pCodecContext, null).ThrowExceptionIfError(); + // Clear/wipe the receiving packet + // (not sure if this is needed, since docs for avcoded_receive_packet say that it will call that first-thing + ffmpeg.av_packet_unref(pPacket); - bool hasFinishedDraining; + var response = ffmpeg.avcodec_receive_packet(_pCodecContext, pPacket); - do + bool isPacketValid; + + if (response == 0) { - // Clear/wipe the receiving packet - // (not sure if this is needed, since docs for avcoded_receive_packet say that it will call that first-thing - ffmpeg.av_packet_unref(pPacket); - - var response = ffmpeg.avcodec_receive_packet(_pCodecContext, pPacket); - - bool isPacketValid; - - if (response == 0) - { - // 0 on success; as in, successfully retrieved a packet, and expecting us to retrieve another one. - isPacketValid = true; - hasFinishedDraining = false; - } - else if (response == ffmpeg.AVERROR(ffmpeg.AVERROR_EOF)) - { - // EOF: the encoder has been fully flushed, and there will be no more output packets - isPacketValid = false; - hasFinishedDraining = true; - } - else - { - // Some other error. - // Should probably throw here, but in testing we get error -541478725 - isPacketValid = false; - hasFinishedDraining = true; - } - - if (isPacketValid) - { - using var packetStream = new UnmanagedMemoryStream(pPacket->data, pPacket->size); - packetStream.CopyTo(_stream); - } - } while (!hasFinishedDraining); - } - finally - { - ffmpeg.av_packet_free(&pPacket); - } + // 0 on success; as in, successfully retrieved a packet, and expecting us to retrieve another one. + isPacketValid = true; + hasFinishedDraining = false; + } + else if (response == ffmpeg.AVERROR(ffmpeg.AVERROR_EOF)) + { + // EOF: the encoder has been fully flushed, and there will be no more output packets + isPacketValid = false; + hasFinishedDraining = true; + } + else + { + // Some other error. + // Should probably throw here, but in testing we get error -541478725 + isPacketValid = false; + hasFinishedDraining = true; + } + + if (isPacketValid) + { + using var packetStream = new UnmanagedMemoryStream(pPacket->data, pPacket->size); + packetStream.CopyTo(_stream); + } + } while (!hasFinishedDraining); + } + finally + { + ffmpeg.av_packet_free(&pPacket); } } } diff --git a/FFmpeg.AutoGen.Example/MediaDecoder.cs b/FFmpeg.AutoGen.Example/MediaDecoder.cs index 0e8d269b..b7a7d548 100644 --- a/FFmpeg.AutoGen.Example/MediaDecoder.cs +++ b/FFmpeg.AutoGen.Example/MediaDecoder.cs @@ -1,64 +1,64 @@ using System; using System.Drawing; using System.Runtime.InteropServices; +using FFmpeg.AutoGen.Abstractions; -namespace FFmpeg.AutoGen.Example +namespace FFmpeg.AutoGen.Example; + +public sealed unsafe class VideoConverter : IDisposable { - public sealed unsafe class VideoConverter : IDisposable - { - private readonly SwsContext* _pConvertContext; + private readonly SwsContext* _pConvertContext; - public VideoConverter(Size sourceSize, AVPixelFormat sourcePixelFormat, - Size destinationSize, AVPixelFormat destinationPixelFormat) - { - _pConvertContext = ffmpeg.sws_getContext(sourceSize.Width, - sourceSize.Height, - sourcePixelFormat, - destinationSize.Width, - destinationSize.Height, - destinationPixelFormat, - ffmpeg.SWS_FAST_BILINEAR, - null, - null, - null); - if (_pConvertContext == null) - throw new ApplicationException("Could not initialize the conversion context."); + public VideoConverter(Size sourceSize, AVPixelFormat sourcePixelFormat, + Size destinationSize, AVPixelFormat destinationPixelFormat) + { + _pConvertContext = ffmpeg.sws_getContext(sourceSize.Width, + sourceSize.Height, + sourcePixelFormat, + destinationSize.Width, + destinationSize.Height, + destinationPixelFormat, + ffmpeg.SWS_FAST_BILINEAR, + null, + null, + null); + if (_pConvertContext == null) + throw new ApplicationException("Could not initialize the conversion context."); - var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, - destinationSize.Width, - destinationSize.Height, - 1); - var convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize); - var dstData = new byte_ptrArray4(); - var dstLinesize = new int_array4(); + var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, + destinationSize.Width, + destinationSize.Height, + 1); + var convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize); + var dstData = new byte_ptr4(); + var dstLinesize = new int4(); - ffmpeg.av_image_fill_arrays(ref dstData, - ref dstLinesize, - (byte*) convertedFrameBufferPtr, - destinationPixelFormat, - destinationSize.Width, - destinationSize.Height, - 1); - } + ffmpeg.av_image_fill_arrays(ref dstData, + ref dstLinesize, + (byte*)convertedFrameBufferPtr, + destinationPixelFormat, + destinationSize.Width, + destinationSize.Height, + 1); + } - public void Dispose() - { - } + public void Dispose() + { + } - public AVFrame Convert(AVFrame sourceFrame) - { - var dstData = new byte_ptrArray4(); - var dstLinesize = new int_array4(); + public AVFrame Convert(AVFrame sourceFrame) + { + var dstData = new byte_ptr4(); + var dstLinesize = new int4(); - ffmpeg.sws_scale(_pConvertContext, - sourceFrame.data, - sourceFrame.linesize, - 0, - sourceFrame.height, - dstData, - dstLinesize); + ffmpeg.sws_scale(_pConvertContext, + sourceFrame.data, + sourceFrame.linesize, + 0, + sourceFrame.height, + dstData, + dstLinesize); - return new AVFrame(); - } + return new AVFrame(); } } diff --git a/FFmpeg.AutoGen.Example/Program.cs b/FFmpeg.AutoGen.Example/Program.cs index 181cf4f4..3a26525a 100644 --- a/FFmpeg.AutoGen.Example/Program.cs +++ b/FFmpeg.AutoGen.Example/Program.cs @@ -5,213 +5,217 @@ using System.IO; using System.Linq; using System.Runtime.InteropServices; +using FFmpeg.AutoGen.Abstractions; +using FFmpeg.AutoGen.Bindings.DynamicallyLoaded; -namespace FFmpeg.AutoGen.Example +namespace FFmpeg.AutoGen.Example; + +internal class Program { - internal class Program + private static void Main(string[] args) { - private static void Main(string[] args) - { - Console.WriteLine("Current directory: " + Environment.CurrentDirectory); - Console.WriteLine("Running in {0}-bit mode.", Environment.Is64BitProcess ? "64" : "32"); + Console.WriteLine("Current directory: " + Environment.CurrentDirectory); + Console.WriteLine("Running in {0}-bit mode.", Environment.Is64BitProcess ? "64" : "32"); - FFmpegBinariesHelper.RegisterFFmpegBinaries(); + FFmpegBinariesHelper.RegisterFFmpegBinaries(); + + DynamicallyLoadedBindings.Initialize(); - Console.WriteLine($"FFmpeg version info: {ffmpeg.av_version_info()}"); + Console.WriteLine($"FFmpeg version info: {ffmpeg.av_version_info()}"); - SetupLogging(); - ConfigureHWDecoder(out var deviceType); + SetupLogging(); + ConfigureHWDecoder(out var deviceType); - Console.WriteLine("Decoding..."); - DecodeAllFramesToImages(deviceType); + Directory.CreateDirectory("frames"); + + Console.WriteLine("Decoding..."); + DecodeAllFramesToImages(deviceType); - Console.WriteLine("Encoding..."); - EncodeImagesToH264(); - } + Console.WriteLine("Encoding..."); + EncodeImagesToH264(); + } + + private static void ConfigureHWDecoder(out AVHWDeviceType HWtype) + { + HWtype = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE; + Console.WriteLine("Use hardware acceleration for decoding?[n]"); + var key = Console.ReadLine(); + var availableHWDecoders = new Dictionary(); - private static void ConfigureHWDecoder(out AVHWDeviceType HWtype) + if (key == "y") { - HWtype = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE; - Console.WriteLine("Use hardware acceleration for decoding?[n]"); - var key = Console.ReadLine(); - var availableHWDecoders = new Dictionary(); + Console.WriteLine("Select hardware decoder:"); + var type = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE; + var number = 0; - if (key == "y") + while ((type = ffmpeg.av_hwdevice_iterate_types(type)) != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE) { - Console.WriteLine("Select hardware decoder:"); - var type = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE; - var number = 0; - - while ((type = ffmpeg.av_hwdevice_iterate_types(type)) != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE) - { - Console.WriteLine($"{++number}. {type}"); - availableHWDecoders.Add(number, type); - } - - if (availableHWDecoders.Count == 0) - { - Console.WriteLine("Your system have no hardware decoders."); - HWtype = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE; - return; - } - - var decoderNumber = availableHWDecoders - .SingleOrDefault(t => t.Value == AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2).Key; - if (decoderNumber == 0) - decoderNumber = availableHWDecoders.First().Key; - Console.WriteLine($"Selected [{decoderNumber}]"); - int.TryParse(Console.ReadLine(), out var inputDecoderNumber); - availableHWDecoders.TryGetValue(inputDecoderNumber == 0 ? decoderNumber : inputDecoderNumber, - out HWtype); + Console.WriteLine($"{++number}. {type}"); + availableHWDecoders.Add(number, type); } - } - - private static unsafe void SetupLogging() - { - ffmpeg.av_log_set_level(ffmpeg.AV_LOG_VERBOSE); - // do not convert to local function - av_log_set_callback_callback logCallback = (p0, level, format, vl) => + if (availableHWDecoders.Count == 0) { - if (level > ffmpeg.av_log_get_level()) return; - - var lineSize = 1024; - var lineBuffer = stackalloc byte[lineSize]; - var printPrefix = 1; - ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix); - var line = Marshal.PtrToStringAnsi((IntPtr) lineBuffer); - Console.ForegroundColor = ConsoleColor.Yellow; - Console.Write(line); - Console.ResetColor(); - }; - - ffmpeg.av_log_set_callback(logCallback); + Console.WriteLine("Your system have no hardware decoders."); + HWtype = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE; + return; + } + + var decoderNumber = availableHWDecoders + .SingleOrDefault(t => t.Value == AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2).Key; + if (decoderNumber == 0) + decoderNumber = availableHWDecoders.First().Key; + Console.WriteLine($"Selected [{decoderNumber}]"); + int.TryParse(Console.ReadLine(), out var inputDecoderNumber); + availableHWDecoders.TryGetValue(inputDecoderNumber == 0 ? decoderNumber : inputDecoderNumber, + out HWtype); } + } + + private static unsafe void SetupLogging() + { + ffmpeg.av_log_set_level(ffmpeg.AV_LOG_VERBOSE); - private static unsafe void DecodeAllFramesToImages(AVHWDeviceType HWDevice) + // do not convert to local function + av_log_set_callback_callback logCallback = (p0, level, format, vl) => { - // decode all frames from url, please not it might local resorce, e.g. string url = "../../sample_mpeg4.mp4"; - var url = "http://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4"; // be advised this file holds 1440 frames - using var vsd = new VideoStreamDecoder(url, HWDevice); + if (level > ffmpeg.av_log_get_level()) return; + + var lineSize = 1024; + var lineBuffer = stackalloc byte[lineSize]; + var printPrefix = 1; + ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix); + var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer); + Console.ForegroundColor = ConsoleColor.Yellow; + Console.Write(line); + Console.ResetColor(); + }; + + ffmpeg.av_log_set_callback(logCallback); + } - Console.WriteLine($"codec name: {vsd.CodecName}"); + private static unsafe void DecodeAllFramesToImages(AVHWDeviceType HWDevice) + { + // decode all frames from url, please not it might local resorce, e.g. string url = "../../sample_mpeg4.mp4"; + + var url = "http://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4"; // be advised this file holds 1440 frames + using var vsd = new VideoStreamDecoder(url, HWDevice); - var info = vsd.GetContextInfo(); - info.ToList().ForEach(x => Console.WriteLine($"{x.Key} = {x.Value}")); + Console.WriteLine($"codec name: {vsd.CodecName}"); - var sourceSize = vsd.FrameSize; - var sourcePixelFormat = HWDevice == AVHWDeviceType.AV_HWDEVICE_TYPE_NONE - ? vsd.PixelFormat - : GetHWPixelFormat(HWDevice); - var destinationSize = sourceSize; - var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24; - using var vfc = - new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat); + var info = vsd.GetContextInfo(); + info.ToList().ForEach(x => Console.WriteLine($"{x.Key} = {x.Value}")); - var frameNumber = 0; + var sourceSize = vsd.FrameSize; + var sourcePixelFormat = HWDevice == AVHWDeviceType.AV_HWDEVICE_TYPE_NONE + ? vsd.PixelFormat + : GetHWPixelFormat(HWDevice); + var destinationSize = sourceSize; + var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24; + using var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat); - while (vsd.TryDecodeNextFrame(out var frame)) - { - var convertedFrame = vfc.Convert(frame); + var frameNumber = 0; - using (var bitmap = new Bitmap(convertedFrame.width, - convertedFrame.height, - convertedFrame.linesize[0], - PixelFormat.Format24bppRgb, - (IntPtr) convertedFrame.data[0])) - bitmap.Save($"frame.{frameNumber:D8}.jpg", ImageFormat.Jpeg); + while (vsd.TryDecodeNextFrame(out var frame)) + { + var convertedFrame = vfc.Convert(frame); - Console.WriteLine($"frame: {frameNumber}"); - frameNumber++; - } - } + using (var bitmap = new Bitmap(convertedFrame.width, + convertedFrame.height, + convertedFrame.linesize[0], + PixelFormat.Format24bppRgb, + (IntPtr)convertedFrame.data[0])) + bitmap.Save($"frames/frame.{frameNumber:D8}.jpg", ImageFormat.Jpeg); - private static AVPixelFormat GetHWPixelFormat(AVHWDeviceType hWDevice) - { - return hWDevice switch - { - AVHWDeviceType.AV_HWDEVICE_TYPE_NONE => AVPixelFormat.AV_PIX_FMT_NONE, - AVHWDeviceType.AV_HWDEVICE_TYPE_VDPAU => AVPixelFormat.AV_PIX_FMT_VDPAU, - AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA => AVPixelFormat.AV_PIX_FMT_CUDA, - AVHWDeviceType.AV_HWDEVICE_TYPE_VAAPI => AVPixelFormat.AV_PIX_FMT_VAAPI, - AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2 => AVPixelFormat.AV_PIX_FMT_NV12, - AVHWDeviceType.AV_HWDEVICE_TYPE_QSV => AVPixelFormat.AV_PIX_FMT_QSV, - AVHWDeviceType.AV_HWDEVICE_TYPE_VIDEOTOOLBOX => AVPixelFormat.AV_PIX_FMT_VIDEOTOOLBOX, - AVHWDeviceType.AV_HWDEVICE_TYPE_D3D11VA => AVPixelFormat.AV_PIX_FMT_NV12, - AVHWDeviceType.AV_HWDEVICE_TYPE_DRM => AVPixelFormat.AV_PIX_FMT_DRM_PRIME, - AVHWDeviceType.AV_HWDEVICE_TYPE_OPENCL => AVPixelFormat.AV_PIX_FMT_OPENCL, - AVHWDeviceType.AV_HWDEVICE_TYPE_MEDIACODEC => AVPixelFormat.AV_PIX_FMT_MEDIACODEC, - _ => AVPixelFormat.AV_PIX_FMT_NONE - }; + Console.WriteLine($"frame: {frameNumber}"); + frameNumber++; } + } - private static unsafe void EncodeImagesToH264() + private static AVPixelFormat GetHWPixelFormat(AVHWDeviceType hWDevice) + { + return hWDevice switch { - var frameFiles = Directory.GetFiles(".", "frame.*.jpg").OrderBy(x => x).ToArray(); - var fistFrameImage = Image.FromFile(frameFiles.First()); + AVHWDeviceType.AV_HWDEVICE_TYPE_NONE => AVPixelFormat.AV_PIX_FMT_NONE, + AVHWDeviceType.AV_HWDEVICE_TYPE_VDPAU => AVPixelFormat.AV_PIX_FMT_VDPAU, + AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA => AVPixelFormat.AV_PIX_FMT_CUDA, + AVHWDeviceType.AV_HWDEVICE_TYPE_VAAPI => AVPixelFormat.AV_PIX_FMT_VAAPI, + AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2 => AVPixelFormat.AV_PIX_FMT_NV12, + AVHWDeviceType.AV_HWDEVICE_TYPE_QSV => AVPixelFormat.AV_PIX_FMT_QSV, + AVHWDeviceType.AV_HWDEVICE_TYPE_VIDEOTOOLBOX => AVPixelFormat.AV_PIX_FMT_VIDEOTOOLBOX, + AVHWDeviceType.AV_HWDEVICE_TYPE_D3D11VA => AVPixelFormat.AV_PIX_FMT_NV12, + AVHWDeviceType.AV_HWDEVICE_TYPE_DRM => AVPixelFormat.AV_PIX_FMT_DRM_PRIME, + AVHWDeviceType.AV_HWDEVICE_TYPE_OPENCL => AVPixelFormat.AV_PIX_FMT_OPENCL, + AVHWDeviceType.AV_HWDEVICE_TYPE_MEDIACODEC => AVPixelFormat.AV_PIX_FMT_MEDIACODEC, + _ => AVPixelFormat.AV_PIX_FMT_NONE + }; + } - var outputFileName = "out.h264"; - var fps = 25; - var sourceSize = fistFrameImage.Size; - var sourcePixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24; - var destinationSize = sourceSize; - var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_YUV420P; - using var vfc = - new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat); + private static unsafe void EncodeImagesToH264() + { + var frameFiles = Directory.GetFiles("./frames", "frame.*.jpg").OrderBy(x => x).ToArray(); + var fistFrameImage = Image.FromFile(frameFiles.First()); - using var fs = File.Open(outputFileName, FileMode.Create); + var outputFileName = "frames/out.h264"; + var fps = 25; + var sourceSize = fistFrameImage.Size; + var sourcePixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24; + var destinationSize = sourceSize; + var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_YUV420P; + using var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat); - using var vse = new H264VideoStreamEncoder(fs, fps, destinationSize); + using var fs = File.Open(outputFileName, FileMode.Create); - var frameNumber = 0; + using var vse = new H264VideoStreamEncoder(fs, fps, destinationSize); - foreach (var frameFile in frameFiles) - { - byte[] bitmapData; + var frameNumber = 0; + + foreach (var frameFile in frameFiles) + { + byte[] bitmapData; - using (var frameImage = Image.FromFile(frameFile)) - using (var frameBitmap = frameImage is Bitmap bitmap ? bitmap : new Bitmap(frameImage)) - bitmapData = GetBitmapData(frameBitmap); + using (var frameImage = Image.FromFile(frameFile)) + using (var frameBitmap = frameImage is Bitmap bitmap ? bitmap : new Bitmap(frameImage)) + bitmapData = GetBitmapData(frameBitmap); - fixed (byte* pBitmapData = bitmapData) + fixed (byte* pBitmapData = bitmapData) + { + var data = new byte_ptr8 { [0] = pBitmapData }; + var linesize = new int8 { [0] = bitmapData.Length / sourceSize.Height }; + var frame = new AVFrame { - var data = new byte_ptrArray8 { [0] = pBitmapData }; - var linesize = new int_array8 { [0] = bitmapData.Length / sourceSize.Height }; - var frame = new AVFrame - { - data = data, - linesize = linesize, - height = sourceSize.Height - }; - var convertedFrame = vfc.Convert(frame); - convertedFrame.pts = frameNumber * fps; - vse.Encode(convertedFrame); - } - - Console.WriteLine($"frame: {frameNumber}"); - frameNumber++; + data = data, + linesize = linesize, + height = sourceSize.Height + }; + var convertedFrame = vfc.Convert(frame); + convertedFrame.pts = frameNumber * fps; + vse.Encode(convertedFrame); } - vse.Drain(); + Console.WriteLine($"frame: {frameNumber}"); + frameNumber++; } - private static byte[] GetBitmapData(Bitmap frameBitmap) - { - var bitmapData = frameBitmap.LockBits(new Rectangle(Point.Empty, frameBitmap.Size), - ImageLockMode.ReadOnly, - PixelFormat.Format24bppRgb); + vse.Drain(); + } - try - { - var length = bitmapData.Stride * bitmapData.Height; - var data = new byte[length]; - Marshal.Copy(bitmapData.Scan0, data, 0, length); - return data; - } - finally - { - frameBitmap.UnlockBits(bitmapData); - } + private static byte[] GetBitmapData(Bitmap frameBitmap) + { + var bitmapData = frameBitmap.LockBits(new Rectangle(Point.Empty, frameBitmap.Size), + ImageLockMode.ReadOnly, + PixelFormat.Format24bppRgb); + + try + { + var length = bitmapData.Stride * bitmapData.Height; + var data = new byte[length]; + Marshal.Copy(bitmapData.Scan0, data, 0, length); + return data; + } + finally + { + frameBitmap.UnlockBits(bitmapData); } } } diff --git a/FFmpeg.AutoGen.Example/VideoFrameConverter.cs b/FFmpeg.AutoGen.Example/VideoFrameConverter.cs index 68e7fb76..d301d4f8 100644 --- a/FFmpeg.AutoGen.Example/VideoFrameConverter.cs +++ b/FFmpeg.AutoGen.Example/VideoFrameConverter.cs @@ -1,80 +1,80 @@ using System; using System.Drawing; using System.Runtime.InteropServices; +using FFmpeg.AutoGen.Abstractions; -namespace FFmpeg.AutoGen.Example +namespace FFmpeg.AutoGen.Example; + +public sealed unsafe class VideoFrameConverter : IDisposable { - public sealed unsafe class VideoFrameConverter : IDisposable - { - private readonly IntPtr _convertedFrameBufferPtr; - private readonly Size _destinationSize; - private readonly byte_ptrArray4 _dstData; - private readonly int_array4 _dstLinesize; - private readonly SwsContext* _pConvertContext; + private readonly IntPtr _convertedFrameBufferPtr; + private readonly Size _destinationSize; + private readonly byte_ptr4 _dstData; + private readonly int4 _dstLinesize; + private readonly SwsContext* _pConvertContext; - public VideoFrameConverter(Size sourceSize, AVPixelFormat sourcePixelFormat, - Size destinationSize, AVPixelFormat destinationPixelFormat) - { - _destinationSize = destinationSize; + public VideoFrameConverter(Size sourceSize, AVPixelFormat sourcePixelFormat, + Size destinationSize, AVPixelFormat destinationPixelFormat) + { + _destinationSize = destinationSize; - _pConvertContext = ffmpeg.sws_getContext(sourceSize.Width, - sourceSize.Height, - sourcePixelFormat, - destinationSize.Width, - destinationSize.Height, - destinationPixelFormat, - ffmpeg.SWS_FAST_BILINEAR, - null, - null, - null); - if (_pConvertContext == null) - throw new ApplicationException("Could not initialize the conversion context."); + _pConvertContext = ffmpeg.sws_getContext(sourceSize.Width, + sourceSize.Height, + sourcePixelFormat, + destinationSize.Width, + destinationSize.Height, + destinationPixelFormat, + ffmpeg.SWS_FAST_BILINEAR, + null, + null, + null); + if (_pConvertContext == null) + throw new ApplicationException("Could not initialize the conversion context."); - var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, - destinationSize.Width, - destinationSize.Height, - 1); - _convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize); - _dstData = new byte_ptrArray4(); - _dstLinesize = new int_array4(); + var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, + destinationSize.Width, + destinationSize.Height, + 1); + _convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize); + _dstData = new byte_ptr4(); + _dstLinesize = new int4(); - ffmpeg.av_image_fill_arrays(ref _dstData, - ref _dstLinesize, - (byte*) _convertedFrameBufferPtr, - destinationPixelFormat, - destinationSize.Width, - destinationSize.Height, - 1); - } + ffmpeg.av_image_fill_arrays(ref _dstData, + ref _dstLinesize, + (byte*)_convertedFrameBufferPtr, + destinationPixelFormat, + destinationSize.Width, + destinationSize.Height, + 1); + } - public void Dispose() - { - Marshal.FreeHGlobal(_convertedFrameBufferPtr); - ffmpeg.sws_freeContext(_pConvertContext); - } + public void Dispose() + { + Marshal.FreeHGlobal(_convertedFrameBufferPtr); + ffmpeg.sws_freeContext(_pConvertContext); + } - public AVFrame Convert(AVFrame sourceFrame) - { - ffmpeg.sws_scale(_pConvertContext, - sourceFrame.data, - sourceFrame.linesize, - 0, - sourceFrame.height, - _dstData, - _dstLinesize); + public AVFrame Convert(AVFrame sourceFrame) + { + ffmpeg.sws_scale(_pConvertContext, + sourceFrame.data, + sourceFrame.linesize, + 0, + sourceFrame.height, + _dstData, + _dstLinesize); - var data = new byte_ptrArray8(); - data.UpdateFrom(_dstData); - var linesize = new int_array8(); - linesize.UpdateFrom(_dstLinesize); + var data = new byte_ptr8(); + data.UpdateFrom(_dstData); + var linesize = new int8(); + linesize.UpdateFrom(_dstLinesize); - return new AVFrame - { - data = data, - linesize = linesize, - width = _destinationSize.Width, - height = _destinationSize.Height - }; - } + return new AVFrame + { + data = data, + linesize = linesize, + width = _destinationSize.Width, + height = _destinationSize.Height + }; } } diff --git a/FFmpeg.AutoGen.Example/VideoStreamDecoder.cs b/FFmpeg.AutoGen.Example/VideoStreamDecoder.cs index 4bf7cab3..a8a0fbbe 100644 --- a/FFmpeg.AutoGen.Example/VideoStreamDecoder.cs +++ b/FFmpeg.AutoGen.Example/VideoStreamDecoder.cs @@ -2,122 +2,126 @@ using System.Collections.Generic; using System.Drawing; using System.Runtime.InteropServices; +using FFmpeg.AutoGen.Abstractions; -namespace FFmpeg.AutoGen.Example +namespace FFmpeg.AutoGen.Example; + +public sealed unsafe class VideoStreamDecoder : IDisposable { - public sealed unsafe class VideoStreamDecoder : IDisposable + private readonly AVCodecContext* _pCodecContext; + private readonly AVFormatContext* _pFormatContext; + private readonly AVFrame* _pFrame; + private readonly AVPacket* _pPacket; + private readonly AVFrame* _receivedFrame; + private readonly int _streamIndex; + + public VideoStreamDecoder(string url, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE) { - private readonly AVCodecContext* _pCodecContext; - private readonly AVFormatContext* _pFormatContext; - private readonly AVFrame* _pFrame; - private readonly AVPacket* _pPacket; - private readonly AVFrame* _receivedFrame; - private readonly int _streamIndex; - - public VideoStreamDecoder(string url, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE) + _pFormatContext = ffmpeg.avformat_alloc_context(); + _receivedFrame = ffmpeg.av_frame_alloc(); + var pFormatContext = _pFormatContext; + ffmpeg.avformat_open_input(&pFormatContext, url, null, null).ThrowExceptionIfError(); + ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError(); + AVCodec* codec = null; + _streamIndex = ffmpeg + .av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &codec, 0) + .ThrowExceptionIfError(); + _pCodecContext = ffmpeg.avcodec_alloc_context3(codec); + + if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE) { - _pFormatContext = ffmpeg.avformat_alloc_context(); - _receivedFrame = ffmpeg.av_frame_alloc(); - var pFormatContext = _pFormatContext; - ffmpeg.avformat_open_input(&pFormatContext, url, null, null).ThrowExceptionIfError(); - ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError(); - AVCodec* codec = null; - _streamIndex = ffmpeg - .av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &codec, 0) - .ThrowExceptionIfError(); - _pCodecContext = ffmpeg.avcodec_alloc_context3(codec); - if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE) - ffmpeg.av_hwdevice_ctx_create(&_pCodecContext->hw_device_ctx, HWDeviceType, null, null, 0) - .ThrowExceptionIfError(); - ffmpeg.avcodec_parameters_to_context(_pCodecContext, _pFormatContext->streams[_streamIndex]->codecpar) + ffmpeg.av_hwdevice_ctx_create(&_pCodecContext->hw_device_ctx, HWDeviceType, null, null, 0) .ThrowExceptionIfError(); - ffmpeg.avcodec_open2(_pCodecContext, codec, null).ThrowExceptionIfError(); - - CodecName = ffmpeg.avcodec_get_name(codec->id); - FrameSize = new Size(_pCodecContext->width, _pCodecContext->height); - PixelFormat = _pCodecContext->pix_fmt; - - _pPacket = ffmpeg.av_packet_alloc(); - _pFrame = ffmpeg.av_frame_alloc(); } - public string CodecName { get; } - public Size FrameSize { get; } - public AVPixelFormat PixelFormat { get; } + ffmpeg.avcodec_parameters_to_context(_pCodecContext, _pFormatContext->streams[_streamIndex]->codecpar) + .ThrowExceptionIfError(); + ffmpeg.avcodec_open2(_pCodecContext, codec, null).ThrowExceptionIfError(); - public void Dispose() - { - var pFrame = _pFrame; - ffmpeg.av_frame_free(&pFrame); + CodecName = ffmpeg.avcodec_get_name(codec->id); + FrameSize = new Size(_pCodecContext->width, _pCodecContext->height); + PixelFormat = _pCodecContext->pix_fmt; - var pPacket = _pPacket; - ffmpeg.av_packet_free(&pPacket); + _pPacket = ffmpeg.av_packet_alloc(); + _pFrame = ffmpeg.av_frame_alloc(); + } - ffmpeg.avcodec_close(_pCodecContext); - var pFormatContext = _pFormatContext; - ffmpeg.avformat_close_input(&pFormatContext); - } + public string CodecName { get; } + public Size FrameSize { get; } + public AVPixelFormat PixelFormat { get; } - public bool TryDecodeNextFrame(out AVFrame frame) - { - ffmpeg.av_frame_unref(_pFrame); - ffmpeg.av_frame_unref(_receivedFrame); - int error; + public void Dispose() + { + var pFrame = _pFrame; + ffmpeg.av_frame_free(&pFrame); - do - { - try - { - do - { - ffmpeg.av_packet_unref(_pPacket); - error = ffmpeg.av_read_frame(_pFormatContext, _pPacket); + var pPacket = _pPacket; + ffmpeg.av_packet_free(&pPacket); - if (error == ffmpeg.AVERROR_EOF) - { - frame = *_pFrame; - return false; - } + ffmpeg.avcodec_close(_pCodecContext); + var pFormatContext = _pFormatContext; + ffmpeg.avformat_close_input(&pFormatContext); + } - error.ThrowExceptionIfError(); - } while (_pPacket->stream_index != _streamIndex); + public bool TryDecodeNextFrame(out AVFrame frame) + { + ffmpeg.av_frame_unref(_pFrame); + ffmpeg.av_frame_unref(_receivedFrame); + int error; - ffmpeg.avcodec_send_packet(_pCodecContext, _pPacket).ThrowExceptionIfError(); - } - finally + do + { + try + { + do { ffmpeg.av_packet_unref(_pPacket); - } + error = ffmpeg.av_read_frame(_pFormatContext, _pPacket); - error = ffmpeg.avcodec_receive_frame(_pCodecContext, _pFrame); - } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN)); + if (error == ffmpeg.AVERROR_EOF) + { + frame = *_pFrame; + return false; + } - error.ThrowExceptionIfError(); + error.ThrowExceptionIfError(); + } while (_pPacket->stream_index != _streamIndex); - if (_pCodecContext->hw_device_ctx != null) + ffmpeg.avcodec_send_packet(_pCodecContext, _pPacket).ThrowExceptionIfError(); + } + finally { - ffmpeg.av_hwframe_transfer_data(_receivedFrame, _pFrame, 0).ThrowExceptionIfError(); - frame = *_receivedFrame; + ffmpeg.av_packet_unref(_pPacket); } - else - frame = *_pFrame; - return true; - } + error = ffmpeg.avcodec_receive_frame(_pCodecContext, _pFrame); + } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN)); + + error.ThrowExceptionIfError(); - public IReadOnlyDictionary GetContextInfo() + if (_pCodecContext->hw_device_ctx != null) { - AVDictionaryEntry* tag = null; - var result = new Dictionary(); + ffmpeg.av_hwframe_transfer_data(_receivedFrame, _pFrame, 0).ThrowExceptionIfError(); + frame = *_receivedFrame; + } + else + frame = *_pFrame; - while ((tag = ffmpeg.av_dict_get(_pFormatContext->metadata, "", tag, ffmpeg.AV_DICT_IGNORE_SUFFIX)) != null) - { - var key = Marshal.PtrToStringAnsi((IntPtr) tag->key); - var value = Marshal.PtrToStringAnsi((IntPtr) tag->value); - result.Add(key, value); - } + return true; + } - return result; + public IReadOnlyDictionary GetContextInfo() + { + AVDictionaryEntry* tag = null; + var result = new Dictionary(); + + while ((tag = ffmpeg.av_dict_get(_pFormatContext->metadata, "", tag, ffmpeg.AV_DICT_IGNORE_SUFFIX)) != null) + { + var key = Marshal.PtrToStringAnsi((IntPtr)tag->key); + var value = Marshal.PtrToStringAnsi((IntPtr)tag->value); + result.Add(key, value); } + + return result; } } diff --git a/FFmpeg.AutoGen.Examples.Encode/FFmpeg.AutoGen.Examples.Encode.csproj b/FFmpeg.AutoGen.Examples.Encode/FFmpeg.AutoGen.Examples.Encode.csproj index 39652187..e4e5c1dc 100644 --- a/FFmpeg.AutoGen.Examples.Encode/FFmpeg.AutoGen.Examples.Encode.csproj +++ b/FFmpeg.AutoGen.Examples.Encode/FFmpeg.AutoGen.Examples.Encode.csproj @@ -12,7 +12,7 @@ - + diff --git a/FFmpeg.AutoGen.Examples.ResamplingAudio/FFmpeg.AutoGen.Examples.ResamplingAudio.csproj b/FFmpeg.AutoGen.Examples.ResamplingAudio/FFmpeg.AutoGen.Examples.ResamplingAudio.csproj index 39652187..e4e5c1dc 100644 --- a/FFmpeg.AutoGen.Examples.ResamplingAudio/FFmpeg.AutoGen.Examples.ResamplingAudio.csproj +++ b/FFmpeg.AutoGen.Examples.ResamplingAudio/FFmpeg.AutoGen.Examples.ResamplingAudio.csproj @@ -12,7 +12,7 @@ - + diff --git a/FFmpeg.AutoGen.sln b/FFmpeg.AutoGen.sln index 254cf0bf..678b8e5a 100644 --- a/FFmpeg.AutoGen.sln +++ b/FFmpeg.AutoGen.sln @@ -7,7 +7,7 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFmpeg.AutoGen", "FFmpeg.Au EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFmpeg.AutoGen.Example", "FFmpeg.AutoGen.Example\FFmpeg.AutoGen.Example.csproj", "{5BD870D6-27B4-4208-ACBF-496F2809326A}" EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{CF48D45D-979F-4574-9E33-A3AC1CB542B5}" +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "0. Solution Items", "0. Solution Items", "{CF48D45D-979F-4574-9E33-A3AC1CB542B5}" ProjectSection(SolutionItems) = preProject .travis.yml = .travis.yml Directory.Build.props = Directory.Build.props @@ -24,6 +24,24 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFmpeg.AutoGen.ClangMacroPa EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFmpeg.AutoGen.ClangMacroParser.Test", "FFmpeg.AutoGen.ClangMacroParser.Test\FFmpeg.AutoGen.ClangMacroParser.Test.csproj", "{07F3A6D4-5599-4D77-9197-A666E8FC8EEC}" EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Bindings", "Bindings", "{F969EEDD-27A8-434B-A438-39C48BBAEFE6}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFmpeg.AutoGen.Abstractions", "FFmpeg.AutoGen.Abstractions\FFmpeg.AutoGen.Abstractions.csproj", "{1294EC3A-9667-40A2-8189-C9EC12ECFDD6}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFmpeg.AutoGen.Bindings.StaticallyLinked", "FFmpeg.AutoGen.Bindings.StaticallyLinked\FFmpeg.AutoGen.Bindings.StaticallyLinked.csproj", "{ED2E790B-9233-40BC-BA66-E3291AAA9878}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFmpeg.AutoGen.Bindings.DynamicallyLinked", "FFmpeg.AutoGen.Bindings.DynamicallyLinked\FFmpeg.AutoGen.Bindings.DynamicallyLinked.csproj", "{C272CD0A-E13B-4E80-8CA2-2BEC8396D8D0}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFmpeg.AutoGen.Bindings.DynamicallyLoaded", "FFmpeg.AutoGen.Bindings.DynamicallyLoaded\FFmpeg.AutoGen.Bindings.DynamicallyLoaded.csproj", "{028113A5-39CE-40A2-9118-56E99F30B4F8}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "1. Generation", "1. Generation", "{E9CC1A7F-ADD1-486B-BEB3-B6E388B35731}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "4. Examples", "4. Examples", "{660FBA30-C9C0-44A6-B4C5-1C5030ED271D}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "3. Legacy", "3. Legacy", "{0771FEE5-9700-4F09-99C4-EC36FD5AFEA9}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "2. Abstractions", "2. Abstractions", "{1B9CC2D2-096F-4D35-8940-DA6344450430}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -58,10 +76,40 @@ Global {07F3A6D4-5599-4D77-9197-A666E8FC8EEC}.Debug|Any CPU.Build.0 = Debug|Any CPU {07F3A6D4-5599-4D77-9197-A666E8FC8EEC}.Release|Any CPU.ActiveCfg = Release|Any CPU {07F3A6D4-5599-4D77-9197-A666E8FC8EEC}.Release|Any CPU.Build.0 = Release|Any CPU + {1294EC3A-9667-40A2-8189-C9EC12ECFDD6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1294EC3A-9667-40A2-8189-C9EC12ECFDD6}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1294EC3A-9667-40A2-8189-C9EC12ECFDD6}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1294EC3A-9667-40A2-8189-C9EC12ECFDD6}.Release|Any CPU.Build.0 = Release|Any CPU + {ED2E790B-9233-40BC-BA66-E3291AAA9878}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {ED2E790B-9233-40BC-BA66-E3291AAA9878}.Debug|Any CPU.Build.0 = Debug|Any CPU + {ED2E790B-9233-40BC-BA66-E3291AAA9878}.Release|Any CPU.ActiveCfg = Release|Any CPU + {ED2E790B-9233-40BC-BA66-E3291AAA9878}.Release|Any CPU.Build.0 = Release|Any CPU + {C272CD0A-E13B-4E80-8CA2-2BEC8396D8D0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C272CD0A-E13B-4E80-8CA2-2BEC8396D8D0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C272CD0A-E13B-4E80-8CA2-2BEC8396D8D0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C272CD0A-E13B-4E80-8CA2-2BEC8396D8D0}.Release|Any CPU.Build.0 = Release|Any CPU + {028113A5-39CE-40A2-9118-56E99F30B4F8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {028113A5-39CE-40A2-9118-56E99F30B4F8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {028113A5-39CE-40A2-9118-56E99F30B4F8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {028113A5-39CE-40A2-9118-56E99F30B4F8}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {A536B02A-B0B1-4753-8328-17369EF09976} = {0771FEE5-9700-4F09-99C4-EC36FD5AFEA9} + {5BD870D6-27B4-4208-ACBF-496F2809326A} = {660FBA30-C9C0-44A6-B4C5-1C5030ED271D} + {2A8E06C6-5A68-4FB4-AE0C-F43B644E3737} = {E9CC1A7F-ADD1-486B-BEB3-B6E388B35731} + {136668DD-ECE0-4153-B21E-511882F358F9} = {660FBA30-C9C0-44A6-B4C5-1C5030ED271D} + {A192914F-DAA8-400B-B5CA-BA188AEBB42B} = {660FBA30-C9C0-44A6-B4C5-1C5030ED271D} + {4557AF4F-4680-4764-A7A0-F739664B4DA1} = {E9CC1A7F-ADD1-486B-BEB3-B6E388B35731} + {07F3A6D4-5599-4D77-9197-A666E8FC8EEC} = {E9CC1A7F-ADD1-486B-BEB3-B6E388B35731} + {F969EEDD-27A8-434B-A438-39C48BBAEFE6} = {1B9CC2D2-096F-4D35-8940-DA6344450430} + {1294EC3A-9667-40A2-8189-C9EC12ECFDD6} = {1B9CC2D2-096F-4D35-8940-DA6344450430} + {ED2E790B-9233-40BC-BA66-E3291AAA9878} = {F969EEDD-27A8-434B-A438-39C48BBAEFE6} + {C272CD0A-E13B-4E80-8CA2-2BEC8396D8D0} = {F969EEDD-27A8-434B-A438-39C48BBAEFE6} + {028113A5-39CE-40A2-9118-56E99F30B4F8} = {F969EEDD-27A8-434B-A438-39C48BBAEFE6} + EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {4BAFD2E8-8AAC-4483-B5FA-6E4C136A59F5} EndGlobalSection diff --git a/FFmpeg.AutoGen/ConstCharPtrMarshaler.cs b/FFmpeg.AutoGen/ConstCharPtrMarshaler.cs index 4d422cc5..3bcca9c6 100644 --- a/FFmpeg.AutoGen/ConstCharPtrMarshaler.cs +++ b/FFmpeg.AutoGen/ConstCharPtrMarshaler.cs @@ -1,26 +1,24 @@ using System; using System.Runtime.InteropServices; -namespace FFmpeg.AutoGen -{ - internal class ConstCharPtrMarshaler : ICustomMarshaler - { - public object MarshalNativeToManaged(IntPtr pNativeData) => Marshal.PtrToStringAnsi(pNativeData); +namespace FFmpeg.AutoGen; - public IntPtr MarshalManagedToNative(object managedObj) => IntPtr.Zero; +public class ConstCharPtrMarshaler : ICustomMarshaler +{ + private static readonly ConstCharPtrMarshaler Instance = new(); + public object MarshalNativeToManaged(IntPtr pNativeData) => Marshal.PtrToStringAnsi(pNativeData); - public void CleanUpNativeData(IntPtr pNativeData) - { - } + public IntPtr MarshalManagedToNative(object managedObj) => IntPtr.Zero; - public void CleanUpManagedData(object managedObj) - { - } + public void CleanUpNativeData(IntPtr pNativeData) + { + } - public int GetNativeDataSize() => IntPtr.Size; + public void CleanUpManagedData(object managedObj) + { + } - private static readonly ConstCharPtrMarshaler Instance = new(); + public int GetNativeDataSize() => IntPtr.Size; - public static ICustomMarshaler GetInstance(string cookie) => Instance; - } -} \ No newline at end of file + public static ICustomMarshaler GetInstance(string cookie) => Instance; +} diff --git a/FFmpeg.AutoGen/FFmpeg.AutoGen.csproj b/FFmpeg.AutoGen/FFmpeg.AutoGen.csproj index b4815a87..3094c627 100644 --- a/FFmpeg.AutoGen/FFmpeg.AutoGen.csproj +++ b/FFmpeg.AutoGen/FFmpeg.AutoGen.csproj @@ -1,14 +1,14 @@  - netstandard2.1;netstandard2.0;net472;net45 - true + netstandard2.1;netstandard2.0;net45 FFmpeg auto generated unsafe bindings for C#/.NET and Mono. + true True - 1701;1702;1705;169;108;1591;1573;1591 + 108;169;618;1573;1591;1701;1702;1705 false bin\$(Configuration)\$(TargetFramework)\$(AssemblyName).xml diff --git a/FFmpeg.AutoGen/FFmpeg.arrays.g.cs b/FFmpeg.AutoGen/FFmpeg.arrays.g.cs deleted file mode 100644 index 12ff6e28..00000000 --- a/FFmpeg.AutoGen/FFmpeg.arrays.g.cs +++ /dev/null @@ -1,554 +0,0 @@ -using System; -using System.Runtime.InteropServices; - -namespace FFmpeg.AutoGen -{ - #pragma warning disable 169 - - public unsafe struct AVRational_array2 - { - public static readonly int Size = 2; - AVRational _0; AVRational _1; - - public AVRational this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { return *(p0 + i); } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { *(p0 + i) = value; } } - } - public AVRational[] ToArray() - { - fixed (AVRational* p0 = &_0) { var a = new AVRational[Size]; for (uint i = 0; i < Size; i++) a[i] = *(p0 + i); return a; } - } - public void UpdateFrom(AVRational[] array) - { - fixed (AVRational* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= Size) return; } } - } - public static implicit operator AVRational[](AVRational_array2 @struct) => @struct.ToArray(); - } - - public unsafe struct short_array2 - { - public static readonly int Size = 2; - fixed short _[2]; - - public short this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (short_array2* p = &this) { return p->_[i]; } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (short_array2* p = &this) { p->_[i] = value; } } - } - public short[] ToArray() - { - fixed (short_array2* p = &this) { var a = new short[Size]; for (uint i = 0; i < Size; i++) a[i] = p->_[i]; return a; } - } - public void UpdateFrom(short[] array) - { - fixed (short_array2* p = &this) { uint i = 0; foreach(var value in array) { p->_[i++] = value; if (i >= Size) return; } } - } - public static implicit operator short[](short_array2 @struct) => @struct.ToArray(); - } - - public unsafe struct void_ptrArray2 - { - public static readonly int Size = 2; - void* _0; void* _1; - - public void* this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (void** p0 = &_0) { return *(p0 + i); } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (void** p0 = &_0) { *(p0 + i) = value; } } - } - public void*[] ToArray() - { - fixed (void** p0 = &_0) { var a = new void*[Size]; for (uint i = 0; i < Size; i++) a[i] = *(p0 + i); return a; } - } - public void UpdateFrom(void*[] array) - { - fixed (void** p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= Size) return; } } - } - public static implicit operator void*[](void_ptrArray2 @struct) => @struct.ToArray(); - } - - public unsafe struct AVHDRPlusColorTransformParams_array3 - { - public static readonly int Size = 3; - AVHDRPlusColorTransformParams _0; AVHDRPlusColorTransformParams _1; AVHDRPlusColorTransformParams _2; - - public AVHDRPlusColorTransformParams this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (AVHDRPlusColorTransformParams* p0 = &_0) { return *(p0 + i); } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (AVHDRPlusColorTransformParams* p0 = &_0) { *(p0 + i) = value; } } - } - public AVHDRPlusColorTransformParams[] ToArray() - { - fixed (AVHDRPlusColorTransformParams* p0 = &_0) { var a = new AVHDRPlusColorTransformParams[Size]; for (uint i = 0; i < Size; i++) a[i] = *(p0 + i); return a; } - } - public void UpdateFrom(AVHDRPlusColorTransformParams[] array) - { - fixed (AVHDRPlusColorTransformParams* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= Size) return; } } - } - public static implicit operator AVHDRPlusColorTransformParams[](AVHDRPlusColorTransformParams_array3 @struct) => @struct.ToArray(); - } - - public unsafe struct AVRational_array3 - { - public static readonly int Size = 3; - AVRational _0; AVRational _1; AVRational _2; - - public AVRational this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { return *(p0 + i); } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { *(p0 + i) = value; } } - } - public AVRational[] ToArray() - { - fixed (AVRational* p0 = &_0) { var a = new AVRational[Size]; for (uint i = 0; i < Size; i++) a[i] = *(p0 + i); return a; } - } - public void UpdateFrom(AVRational[] array) - { - fixed (AVRational* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= Size) return; } } - } - public static implicit operator AVRational[](AVRational_array3 @struct) => @struct.ToArray(); - } - - public unsafe struct AVRational_array3x2 - { - public static readonly int Size = 3; - AVRational_array2 _0; AVRational_array2 _1; AVRational_array2 _2; - - public AVRational_array2 this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (AVRational_array2* p0 = &_0) { return *(p0 + i); } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (AVRational_array2* p0 = &_0) { *(p0 + i) = value; } } - } - public AVRational_array2[] ToArray() - { - fixed (AVRational_array2* p0 = &_0) { var a = new AVRational_array2[Size]; for (uint i = 0; i < Size; i++) a[i] = *(p0 + i); return a; } - } - public void UpdateFrom(AVRational_array2[] array) - { - fixed (AVRational_array2* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= Size) return; } } - } - public static implicit operator AVRational_array2[](AVRational_array3x2 @struct) => @struct.ToArray(); - } - - public unsafe struct byte_ptrArray3 - { - public static readonly int Size = 3; - byte* _0; byte* _1; byte* _2; - - public byte* this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (byte** p0 = &_0) { return *(p0 + i); } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (byte** p0 = &_0) { *(p0 + i) = value; } } - } - public byte*[] ToArray() - { - fixed (byte** p0 = &_0) { var a = new byte*[Size]; for (uint i = 0; i < Size; i++) a[i] = *(p0 + i); return a; } - } - public void UpdateFrom(byte*[] array) - { - fixed (byte** p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= Size) return; } } - } - public static implicit operator byte*[](byte_ptrArray3 @struct) => @struct.ToArray(); - } - - public unsafe struct int_array3 - { - public static readonly int Size = 3; - fixed int _[3]; - - public int this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (int_array3* p = &this) { return p->_[i]; } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (int_array3* p = &this) { p->_[i] = value; } } - } - public int[] ToArray() - { - fixed (int_array3* p = &this) { var a = new int[Size]; for (uint i = 0; i < Size; i++) a[i] = p->_[i]; return a; } - } - public void UpdateFrom(int[] array) - { - fixed (int_array3* p = &this) { uint i = 0; foreach(var value in array) { p->_[i++] = value; if (i >= Size) return; } } - } - public static implicit operator int[](int_array3 @struct) => @struct.ToArray(); - } - - public unsafe struct short_array3x2 - { - public static readonly int Size = 3; - short_array2 _0; short_array2 _1; short_array2 _2; - - public short_array2 this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (short_array2* p0 = &_0) { return *(p0 + i); } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (short_array2* p0 = &_0) { *(p0 + i) = value; } } - } - public short_array2[] ToArray() - { - fixed (short_array2* p0 = &_0) { var a = new short_array2[Size]; for (uint i = 0; i < Size; i++) a[i] = *(p0 + i); return a; } - } - public void UpdateFrom(short_array2[] array) - { - fixed (short_array2* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= Size) return; } } - } - public static implicit operator short_array2[](short_array3x2 @struct) => @struct.ToArray(); - } - - public unsafe struct AVComponentDescriptor_array4 - { - public static readonly int Size = 4; - AVComponentDescriptor _0; AVComponentDescriptor _1; AVComponentDescriptor _2; AVComponentDescriptor _3; - - public AVComponentDescriptor this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (AVComponentDescriptor* p0 = &_0) { return *(p0 + i); } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (AVComponentDescriptor* p0 = &_0) { *(p0 + i) = value; } } - } - public AVComponentDescriptor[] ToArray() - { - fixed (AVComponentDescriptor* p0 = &_0) { var a = new AVComponentDescriptor[Size]; for (uint i = 0; i < Size; i++) a[i] = *(p0 + i); return a; } - } - public void UpdateFrom(AVComponentDescriptor[] array) - { - fixed (AVComponentDescriptor* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= Size) return; } } - } - public static implicit operator AVComponentDescriptor[](AVComponentDescriptor_array4 @struct) => @struct.ToArray(); - } - - public unsafe struct byte_ptrArray4 - { - public static readonly int Size = 4; - byte* _0; byte* _1; byte* _2; byte* _3; - - public byte* this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (byte** p0 = &_0) { return *(p0 + i); } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (byte** p0 = &_0) { *(p0 + i) = value; } } - } - public byte*[] ToArray() - { - fixed (byte** p0 = &_0) { var a = new byte*[Size]; for (uint i = 0; i < Size; i++) a[i] = *(p0 + i); return a; } - } - public void UpdateFrom(byte*[] array) - { - fixed (byte** p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= Size) return; } } - } - public static implicit operator byte*[](byte_ptrArray4 @struct) => @struct.ToArray(); - } - - public unsafe struct int_array4 - { - public static readonly int Size = 4; - fixed int _[4]; - - public int this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (int_array4* p = &this) { return p->_[i]; } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (int_array4* p = &this) { p->_[i] = value; } } - } - public int[] ToArray() - { - fixed (int_array4* p = &this) { var a = new int[Size]; for (uint i = 0; i < Size; i++) a[i] = p->_[i]; return a; } - } - public void UpdateFrom(int[] array) - { - fixed (int_array4* p = &this) { uint i = 0; foreach(var value in array) { p->_[i++] = value; if (i >= Size) return; } } - } - public static implicit operator int[](int_array4 @struct) => @struct.ToArray(); - } - - public unsafe struct long_array4 - { - public static readonly int Size = 4; - fixed long _[4]; - - public long this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (long_array4* p = &this) { return p->_[i]; } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (long_array4* p = &this) { p->_[i] = value; } } - } - public long[] ToArray() - { - fixed (long_array4* p = &this) { var a = new long[Size]; for (uint i = 0; i < Size; i++) a[i] = p->_[i]; return a; } - } - public void UpdateFrom(long[] array) - { - fixed (long_array4* p = &this) { uint i = 0; foreach(var value in array) { p->_[i++] = value; if (i >= Size) return; } } - } - public static implicit operator long[](long_array4 @struct) => @struct.ToArray(); - } - - public unsafe struct ulong_array4 - { - public static readonly int Size = 4; - fixed ulong _[4]; - - public ulong this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (ulong_array4* p = &this) { return p->_[i]; } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (ulong_array4* p = &this) { p->_[i] = value; } } - } - public ulong[] ToArray() - { - fixed (ulong_array4* p = &this) { var a = new ulong[Size]; for (uint i = 0; i < Size; i++) a[i] = p->_[i]; return a; } - } - public void UpdateFrom(ulong[] array) - { - fixed (ulong_array4* p = &this) { uint i = 0; foreach(var value in array) { p->_[i++] = value; if (i >= Size) return; } } - } - public static implicit operator ulong[](ulong_array4 @struct) => @struct.ToArray(); - } - - public unsafe struct int_array7 - { - public static readonly int Size = 7; - fixed int _[7]; - - public int this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (int_array7* p = &this) { return p->_[i]; } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (int_array7* p = &this) { p->_[i] = value; } } - } - public int[] ToArray() - { - fixed (int_array7* p = &this) { var a = new int[Size]; for (uint i = 0; i < Size; i++) a[i] = p->_[i]; return a; } - } - public void UpdateFrom(int[] array) - { - fixed (int_array7* p = &this) { uint i = 0; foreach(var value in array) { p->_[i++] = value; if (i >= Size) return; } } - } - public static implicit operator int[](int_array7 @struct) => @struct.ToArray(); - } - - public unsafe struct AVBufferRef_ptrArray8 - { - public static readonly int Size = 8; - AVBufferRef* _0; AVBufferRef* _1; AVBufferRef* _2; AVBufferRef* _3; AVBufferRef* _4; AVBufferRef* _5; AVBufferRef* _6; AVBufferRef* _7; - - public AVBufferRef* this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (AVBufferRef** p0 = &_0) { return *(p0 + i); } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (AVBufferRef** p0 = &_0) { *(p0 + i) = value; } } - } - public AVBufferRef*[] ToArray() - { - fixed (AVBufferRef** p0 = &_0) { var a = new AVBufferRef*[Size]; for (uint i = 0; i < Size; i++) a[i] = *(p0 + i); return a; } - } - public void UpdateFrom(AVBufferRef*[] array) - { - fixed (AVBufferRef** p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= Size) return; } } - } - public static implicit operator AVBufferRef*[](AVBufferRef_ptrArray8 @struct) => @struct.ToArray(); - } - - public unsafe struct byte_array8 - { - public static readonly int Size = 8; - fixed byte _[8]; - - public byte this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (byte_array8* p = &this) { return p->_[i]; } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (byte_array8* p = &this) { p->_[i] = value; } } - } - public byte[] ToArray() - { - fixed (byte_array8* p = &this) { var a = new byte[Size]; for (uint i = 0; i < Size; i++) a[i] = p->_[i]; return a; } - } - public void UpdateFrom(byte[] array) - { - fixed (byte_array8* p = &this) { uint i = 0; foreach(var value in array) { p->_[i++] = value; if (i >= Size) return; } } - } - public static implicit operator byte[](byte_array8 @struct) => @struct.ToArray(); - } - - public unsafe struct byte_ptrArray8 - { - public static readonly int Size = 8; - byte* _0; byte* _1; byte* _2; byte* _3; byte* _4; byte* _5; byte* _6; byte* _7; - - public byte* this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (byte** p0 = &_0) { return *(p0 + i); } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (byte** p0 = &_0) { *(p0 + i) = value; } } - } - public byte*[] ToArray() - { - fixed (byte** p0 = &_0) { var a = new byte*[Size]; for (uint i = 0; i < Size; i++) a[i] = *(p0 + i); return a; } - } - public void UpdateFrom(byte*[] array) - { - fixed (byte** p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= Size) return; } } - } - public static implicit operator byte*[](byte_ptrArray8 @struct) => @struct.ToArray(); - } - - public unsafe struct int_array8 - { - public static readonly int Size = 8; - fixed int _[8]; - - public int this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (int_array8* p = &this) { return p->_[i]; } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (int_array8* p = &this) { p->_[i] = value; } } - } - public int[] ToArray() - { - fixed (int_array8* p = &this) { var a = new int[Size]; for (uint i = 0; i < Size; i++) a[i] = p->_[i]; return a; } - } - public void UpdateFrom(int[] array) - { - fixed (int_array8* p = &this) { uint i = 0; foreach(var value in array) { p->_[i++] = value; if (i >= Size) return; } } - } - public static implicit operator int[](int_array8 @struct) => @struct.ToArray(); - } - - public unsafe struct ulong_array8 - { - public static readonly int Size = 8; - fixed ulong _[8]; - - public ulong this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (ulong_array8* p = &this) { return p->_[i]; } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (ulong_array8* p = &this) { p->_[i] = value; } } - } - public ulong[] ToArray() - { - fixed (ulong_array8* p = &this) { var a = new ulong[Size]; for (uint i = 0; i < Size; i++) a[i] = p->_[i]; return a; } - } - public void UpdateFrom(ulong[] array) - { - fixed (ulong_array8* p = &this) { uint i = 0; foreach(var value in array) { p->_[i++] = value; if (i >= Size) return; } } - } - public static implicit operator ulong[](ulong_array8 @struct) => @struct.ToArray(); - } - - public unsafe struct AVHDRPlusPercentile_array15 - { - public static readonly int Size = 15; - AVHDRPlusPercentile _0; AVHDRPlusPercentile _1; AVHDRPlusPercentile _2; AVHDRPlusPercentile _3; AVHDRPlusPercentile _4; AVHDRPlusPercentile _5; AVHDRPlusPercentile _6; AVHDRPlusPercentile _7; AVHDRPlusPercentile _8; AVHDRPlusPercentile _9; AVHDRPlusPercentile _10; AVHDRPlusPercentile _11; AVHDRPlusPercentile _12; AVHDRPlusPercentile _13; AVHDRPlusPercentile _14; - - public AVHDRPlusPercentile this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (AVHDRPlusPercentile* p0 = &_0) { return *(p0 + i); } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (AVHDRPlusPercentile* p0 = &_0) { *(p0 + i) = value; } } - } - public AVHDRPlusPercentile[] ToArray() - { - fixed (AVHDRPlusPercentile* p0 = &_0) { var a = new AVHDRPlusPercentile[Size]; for (uint i = 0; i < Size; i++) a[i] = *(p0 + i); return a; } - } - public void UpdateFrom(AVHDRPlusPercentile[] array) - { - fixed (AVHDRPlusPercentile* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= Size) return; } } - } - public static implicit operator AVHDRPlusPercentile[](AVHDRPlusPercentile_array15 @struct) => @struct.ToArray(); - } - - public unsafe struct AVRational_array15 - { - public static readonly int Size = 15; - AVRational _0; AVRational _1; AVRational _2; AVRational _3; AVRational _4; AVRational _5; AVRational _6; AVRational _7; AVRational _8; AVRational _9; AVRational _10; AVRational _11; AVRational _12; AVRational _13; AVRational _14; - - public AVRational this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { return *(p0 + i); } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { *(p0 + i) = value; } } - } - public AVRational[] ToArray() - { - fixed (AVRational* p0 = &_0) { var a = new AVRational[Size]; for (uint i = 0; i < Size; i++) a[i] = *(p0 + i); return a; } - } - public void UpdateFrom(AVRational[] array) - { - fixed (AVRational* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= Size) return; } } - } - public static implicit operator AVRational[](AVRational_array15 @struct) => @struct.ToArray(); - } - - public unsafe struct byte_array16 - { - public static readonly int Size = 16; - fixed byte _[16]; - - public byte this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (byte_array16* p = &this) { return p->_[i]; } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (byte_array16* p = &this) { p->_[i] = value; } } - } - public byte[] ToArray() - { - fixed (byte_array16* p = &this) { var a = new byte[Size]; for (uint i = 0; i < Size; i++) a[i] = p->_[i]; return a; } - } - public void UpdateFrom(byte[] array) - { - fixed (byte_array16* p = &this) { uint i = 0; foreach(var value in array) { p->_[i++] = value; if (i >= Size) return; } } - } - public static implicit operator byte[](byte_array16 @struct) => @struct.ToArray(); - } - - public unsafe struct AVRational_array25 - { - public static readonly int Size = 25; - AVRational _0; AVRational _1; AVRational _2; AVRational _3; AVRational _4; AVRational _5; AVRational _6; AVRational _7; AVRational _8; AVRational _9; AVRational _10; AVRational _11; AVRational _12; AVRational _13; AVRational _14; AVRational _15; AVRational _16; AVRational _17; AVRational _18; AVRational _19; AVRational _20; AVRational _21; AVRational _22; AVRational _23; AVRational _24; - - public AVRational this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { return *(p0 + i); } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { *(p0 + i) = value; } } - } - public AVRational[] ToArray() - { - fixed (AVRational* p0 = &_0) { var a = new AVRational[Size]; for (uint i = 0; i < Size; i++) a[i] = *(p0 + i); return a; } - } - public void UpdateFrom(AVRational[] array) - { - fixed (AVRational* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= Size) return; } } - } - public static implicit operator AVRational[](AVRational_array25 @struct) => @struct.ToArray(); - } - - public unsafe struct AVRational_array25x25 - { - public static readonly int Size = 25; - AVRational_array25 _0; AVRational_array25 _1; AVRational_array25 _2; AVRational_array25 _3; AVRational_array25 _4; AVRational_array25 _5; AVRational_array25 _6; AVRational_array25 _7; AVRational_array25 _8; AVRational_array25 _9; AVRational_array25 _10; AVRational_array25 _11; AVRational_array25 _12; AVRational_array25 _13; AVRational_array25 _14; AVRational_array25 _15; AVRational_array25 _16; AVRational_array25 _17; AVRational_array25 _18; AVRational_array25 _19; AVRational_array25 _20; AVRational_array25 _21; AVRational_array25 _22; AVRational_array25 _23; AVRational_array25 _24; - - public AVRational_array25 this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (AVRational_array25* p0 = &_0) { return *(p0 + i); } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (AVRational_array25* p0 = &_0) { *(p0 + i) = value; } } - } - public AVRational_array25[] ToArray() - { - fixed (AVRational_array25* p0 = &_0) { var a = new AVRational_array25[Size]; for (uint i = 0; i < Size; i++) a[i] = *(p0 + i); return a; } - } - public void UpdateFrom(AVRational_array25[] array) - { - fixed (AVRational_array25* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= Size) return; } } - } - public static implicit operator AVRational_array25[](AVRational_array25x25 @struct) => @struct.ToArray(); - } - - public unsafe struct byte_array61440 - { - public static readonly int Size = 61440; - fixed byte _[61440]; - - public byte this[uint i] - { - get { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (byte_array61440* p = &this) { return p->_[i]; } } - set { if (i >= Size) throw new ArgumentOutOfRangeException(); fixed (byte_array61440* p = &this) { p->_[i] = value; } } - } - public byte[] ToArray() - { - fixed (byte_array61440* p = &this) { var a = new byte[Size]; for (uint i = 0; i < Size; i++) a[i] = p->_[i]; return a; } - } - public void UpdateFrom(byte[] array) - { - fixed (byte_array61440* p = &this) { uint i = 0; foreach(var value in array) { p->_[i++] = value; if (i >= Size) return; } } - } - public static implicit operator byte[](byte_array61440 @struct) => @struct.ToArray(); - } - -} diff --git a/FFmpeg.AutoGen/FFmpeg.cs b/FFmpeg.AutoGen/FFmpeg.cs index eeacd104..f89152f2 100644 --- a/FFmpeg.AutoGen/FFmpeg.cs +++ b/FFmpeg.AutoGen/FFmpeg.cs @@ -1,109 +1,55 @@ using System; -using System.Collections.Generic; -using System.Linq; -using FFmpeg.AutoGen.Native; -namespace FFmpeg.AutoGen -{ - public delegate IntPtr GetOrLoadLibrary(string libraryName); - - public static partial class ffmpeg - { - public static readonly int EAGAIN; - - public static readonly int ENOMEM = 12; - - public static readonly int EINVAL = 22; - - public static readonly int EPIPE = 32; - - private static readonly object SyncRoot = new(); - - public static readonly Dictionary LibraryDependenciesMap = - new() - { - { "avcodec", new[] { "avutil", "swresample" } }, - { "avdevice", new[] { "avcodec", "avfilter", "avformat", "avutil" } }, - { "avfilter", new[] { "avcodec", "avformat", "avutil", "postproc", "swresample", "swscale" } }, - { "avformat", new[] { "avcodec", "avutil" } }, - { "avutil", new string[0] }, - { "postproc", new[] { "avutil" } }, - { "swresample", new[] { "avutil" } }, - { "swscale", new[] { "avutil" } } - }; - - public static readonly Dictionary LoadedLibraries = new(); +namespace FFmpeg.AutoGen; - static ffmpeg() - { - GetOrLoadLibrary = libraryName => LoadLibrary(libraryName, true); - - EAGAIN = LibraryLoader.GetPlatformId() switch - { - PlatformID.MacOSX => 35, - _ => 11 - }; - } +public static partial class ffmpeg +{ + public static readonly int EAGAIN; - /// - /// Gets or sets the root path for loading libraries. - /// Work out of box with companion ffmpeg distribution package like FFmpeg.AutoGen.Redist.windows.x64 - /// - /// The root path. - public static string RootPath { get; set; } = AppDomain.CurrentDomain.BaseDirectory; + public static readonly int ENOMEM = 12; - public static GetOrLoadLibrary GetOrLoadLibrary { get; set; } + public static readonly int EINVAL = 22; - private static IntPtr LoadLibrary(string libraryName, bool throwException) + public static readonly int EPIPE = 32; + + static ffmpeg() + { + EAGAIN = FunctionResolverFactory.GetPlatformId() switch { - if (LoadedLibraries.TryGetValue(libraryName, out var ptr)) return ptr; - - lock (SyncRoot) - { - if (LoadedLibraries.TryGetValue(libraryName, out ptr)) return ptr; + PlatformID.MacOSX => 35, + _ => 11 + }; - var dependencies = LibraryDependenciesMap[libraryName]; - dependencies.Where(n => !LoadedLibraries.ContainsKey(n) && !n.Equals(libraryName)) - .ToList() - .ForEach(n => LoadLibrary(n, false)); - - var version = LibraryVersionMap[libraryName]; - ptr = LibraryLoader.LoadNativeLibrary(RootPath, libraryName, version); - - if (ptr != IntPtr.Zero) LoadedLibraries.Add(libraryName, ptr); - else if (throwException) - { - throw new DllNotFoundException( - $"Unable to load DLL '{libraryName}.{version} under {RootPath}': The specified module could not be found."); - } + DynamicallyLoadedBindings.Initialize(); + } - return ptr; - } - } - public static T GetFunctionDelegate(IntPtr libraryHandle, string functionName) - => FunctionLoader.GetFunctionDelegate(libraryHandle, functionName); + /// + /// Gets or sets the root path for loading libraries. + /// Work out of box with companion ffmpeg distribution package like FFmpeg.AutoGen.Redist.windows.x64 + /// + /// The root path. + public static string RootPath { get; set; } = AppDomain.CurrentDomain.BaseDirectory; - public static ulong UINT64_C(T a) - => Convert.ToUInt64(a); + public static ulong UINT64_C(T a) + => Convert.ToUInt64(a); - public static int AVERROR(T1 a) - => -Convert.ToInt32(a); + public static int AVERROR(T1 a) + => -Convert.ToInt32(a); - public static int MKTAG(T1 a, T2 b, T3 c, T4 d) - => (int)(Convert.ToUInt32(a) | (Convert.ToUInt32(b) << 8) | (Convert.ToUInt32(c) << 16) | - (Convert.ToUInt32(d) << 24)); + public static int MKTAG(T1 a, T2 b, T3 c, T4 d) + => (int)(Convert.ToUInt32(a) | (Convert.ToUInt32(b) << 8) | (Convert.ToUInt32(c) << 16) | + (Convert.ToUInt32(d) << 24)); - public static int FFERRTAG(T1 a, T2 b, T3 c, T4 d) - => -MKTAG(a, b, c, d); + public static int FFERRTAG(T1 a, T2 b, T3 c, T4 d) + => -MKTAG(a, b, c, d); - public static int AV_VERSION_INT(T1 a, T2 b, T3 c) => - (Convert.ToInt32(a) << 16) | (Convert.ToInt32(b) << 8) | Convert.ToInt32(c); + public static int AV_VERSION_INT(T1 a, T2 b, T3 c) => + (Convert.ToInt32(a) << 16) | (Convert.ToInt32(b) << 8) | Convert.ToInt32(c); - public static string AV_VERSION_DOT(T1 a, T2 b, T3 c) - => $"{a}.{b}.{c}"; + public static string AV_VERSION_DOT(T1 a, T2 b, T3 c) + => $"{a}.{b}.{c}"; - public static string AV_VERSION(T1 a, T2 b, T3 c) - => AV_VERSION_DOT(a, b, c); - } -} \ No newline at end of file + public static string AV_VERSION(T1 a, T2 b, T3 c) + => AV_VERSION_DOT(a, b, c); +} diff --git a/FFmpeg.AutoGen/FFmpeg.delegates.g.cs b/FFmpeg.AutoGen/FFmpeg.delegates.g.cs deleted file mode 100644 index b7d7f70c..00000000 --- a/FFmpeg.AutoGen/FFmpeg.delegates.g.cs +++ /dev/null @@ -1,708 +0,0 @@ -using System; -using System.Runtime.InteropServices; - -namespace FFmpeg.AutoGen -{ - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate string AVClass_item_name (void* @ctx); - public unsafe struct AVClass_item_name_func - { - public IntPtr Pointer; - public static implicit operator AVClass_item_name_func(AVClass_item_name func) => new AVClass_item_name_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate AVClassCategory AVClass_get_category (void* @ctx); - public unsafe struct AVClass_get_category_func - { - public IntPtr Pointer; - public static implicit operator AVClass_get_category_func(AVClass_get_category func) => new AVClass_get_category_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVClass_query_ranges (AVOptionRanges** @p0, void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @key, int @flags); - public unsafe struct AVClass_query_ranges_func - { - public IntPtr Pointer; - public static implicit operator AVClass_query_ranges_func(AVClass_query_ranges func) => new AVClass_query_ranges_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate void* AVClass_child_next (void* @obj, void* @prev); - public unsafe struct AVClass_child_next_func - { - public IntPtr Pointer; - public static implicit operator AVClass_child_next_func(AVClass_child_next func) => new AVClass_child_next_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate AVClass* AVClass_child_class_iterate (void** @iter); - public unsafe struct AVClass_child_class_iterate_func - { - public IntPtr Pointer; - public static implicit operator AVClass_child_class_iterate_func(AVClass_child_class_iterate func) => new AVClass_child_class_iterate_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int av_tree_find_cmp (void* @key, void* @b); - public unsafe struct av_tree_find_cmp_func - { - public IntPtr Pointer; - public static implicit operator av_tree_find_cmp_func(av_tree_find_cmp func) => new av_tree_find_cmp_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int av_tree_insert_cmp (void* @key, void* @b); - public unsafe struct av_tree_insert_cmp_func - { - public IntPtr Pointer; - public static implicit operator av_tree_insert_cmp_func(av_tree_insert_cmp func) => new av_tree_insert_cmp_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int av_tree_enumerate_cmp (void* @opaque, void* @elem); - public unsafe struct av_tree_enumerate_cmp_func - { - public IntPtr Pointer; - public static implicit operator av_tree_enumerate_cmp_func(av_tree_enumerate_cmp func) => new av_tree_enumerate_cmp_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int av_tree_enumerate_enu (void* @opaque, void* @elem); - public unsafe struct av_tree_enumerate_enu_func - { - public IntPtr Pointer; - public static implicit operator av_tree_enumerate_enu_func(av_tree_enumerate_enu func) => new av_tree_enumerate_enu_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate void AVHWDeviceContext_free (AVHWDeviceContext* @ctx); - public unsafe struct AVHWDeviceContext_free_func - { - public IntPtr Pointer; - public static implicit operator AVHWDeviceContext_free_func(AVHWDeviceContext_free func) => new AVHWDeviceContext_free_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate void AVHWFramesContext_free (AVHWFramesContext* @ctx); - public unsafe struct AVHWFramesContext_free_func - { - public IntPtr Pointer; - public static implicit operator AVHWFramesContext_free_func(AVHWFramesContext_free func) => new AVHWFramesContext_free_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate void AVD3D11VADeviceContext_lock (void* @lock_ctx); - public unsafe struct AVD3D11VADeviceContext_lock_func - { - public IntPtr Pointer; - public static implicit operator AVD3D11VADeviceContext_lock_func(AVD3D11VADeviceContext_lock func) => new AVD3D11VADeviceContext_lock_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate void AVD3D11VADeviceContext_unlock (void* @lock_ctx); - public unsafe struct AVD3D11VADeviceContext_unlock_func - { - public IntPtr Pointer; - public static implicit operator AVD3D11VADeviceContext_unlock_func(AVD3D11VADeviceContext_unlock func) => new AVD3D11VADeviceContext_unlock_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate void AVCodecContext_draw_horiz_band (AVCodecContext* @s, AVFrame* @src, ref int_array8 @offset, int @y, int @type, int @height); - public unsafe struct AVCodecContext_draw_horiz_band_func - { - public IntPtr Pointer; - public static implicit operator AVCodecContext_draw_horiz_band_func(AVCodecContext_draw_horiz_band func) => new AVCodecContext_draw_horiz_band_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate AVPixelFormat AVCodecContext_get_format (AVCodecContext* @s, AVPixelFormat* @fmt); - public unsafe struct AVCodecContext_get_format_func - { - public IntPtr Pointer; - public static implicit operator AVCodecContext_get_format_func(AVCodecContext_get_format func) => new AVCodecContext_get_format_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVCodecContext_get_buffer2 (AVCodecContext* @s, AVFrame* @frame, int @flags); - public unsafe struct AVCodecContext_get_buffer2_func - { - public IntPtr Pointer; - public static implicit operator AVCodecContext_get_buffer2_func(AVCodecContext_get_buffer2 func) => new AVCodecContext_get_buffer2_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVHWAccel_alloc_frame (AVCodecContext* @avctx, AVFrame* @frame); - public unsafe struct AVHWAccel_alloc_frame_func - { - public IntPtr Pointer; - public static implicit operator AVHWAccel_alloc_frame_func(AVHWAccel_alloc_frame func) => new AVHWAccel_alloc_frame_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVHWAccel_start_frame (AVCodecContext* @avctx, byte* @buf, uint @buf_size); - public unsafe struct AVHWAccel_start_frame_func - { - public IntPtr Pointer; - public static implicit operator AVHWAccel_start_frame_func(AVHWAccel_start_frame func) => new AVHWAccel_start_frame_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVHWAccel_decode_params (AVCodecContext* @avctx, int @type, byte* @buf, uint @buf_size); - public unsafe struct AVHWAccel_decode_params_func - { - public IntPtr Pointer; - public static implicit operator AVHWAccel_decode_params_func(AVHWAccel_decode_params func) => new AVHWAccel_decode_params_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVHWAccel_decode_slice (AVCodecContext* @avctx, byte* @buf, uint @buf_size); - public unsafe struct AVHWAccel_decode_slice_func - { - public IntPtr Pointer; - public static implicit operator AVHWAccel_decode_slice_func(AVHWAccel_decode_slice func) => new AVHWAccel_decode_slice_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVHWAccel_end_frame (AVCodecContext* @avctx); - public unsafe struct AVHWAccel_end_frame_func - { - public IntPtr Pointer; - public static implicit operator AVHWAccel_end_frame_func(AVHWAccel_end_frame func) => new AVHWAccel_end_frame_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVHWAccel_init (AVCodecContext* @avctx); - public unsafe struct AVHWAccel_init_func - { - public IntPtr Pointer; - public static implicit operator AVHWAccel_init_func(AVHWAccel_init func) => new AVHWAccel_init_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVHWAccel_uninit (AVCodecContext* @avctx); - public unsafe struct AVHWAccel_uninit_func - { - public IntPtr Pointer; - public static implicit operator AVHWAccel_uninit_func(AVHWAccel_uninit func) => new AVHWAccel_uninit_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVHWAccel_frame_params (AVCodecContext* @avctx, AVBufferRef* @hw_frames_ctx); - public unsafe struct AVHWAccel_frame_params_func - { - public IntPtr Pointer; - public static implicit operator AVHWAccel_frame_params_func(AVHWAccel_frame_params func) => new AVHWAccel_frame_params_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVCodecContext_execute (AVCodecContext* @c, func_func @func, void* @arg2, int* @ret, int @count, int @size); - public unsafe struct AVCodecContext_execute_func - { - public IntPtr Pointer; - public static implicit operator AVCodecContext_execute_func(AVCodecContext_execute func) => new AVCodecContext_execute_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVCodecContext_execute2 (AVCodecContext* @c, func_func @func, void* @arg2, int* @ret, int @count); - public unsafe struct AVCodecContext_execute2_func - { - public IntPtr Pointer; - public static implicit operator AVCodecContext_execute2_func(AVCodecContext_execute2 func) => new AVCodecContext_execute2_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVCodecContext_get_encode_buffer (AVCodecContext* @s, AVPacket* @pkt, int @flags); - public unsafe struct AVCodecContext_get_encode_buffer_func - { - public IntPtr Pointer; - public static implicit operator AVCodecContext_get_encode_buffer_func(AVCodecContext_get_encode_buffer func) => new AVCodecContext_get_encode_buffer_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVCodecParser_parser_init (AVCodecParserContext* @s); - public unsafe struct AVCodecParser_parser_init_func - { - public IntPtr Pointer; - public static implicit operator AVCodecParser_parser_init_func(AVCodecParser_parser_init func) => new AVCodecParser_parser_init_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVCodecParser_parser_parse (AVCodecParserContext* @s, AVCodecContext* @avctx, byte** @poutbuf, int* @poutbuf_size, byte* @buf, int @buf_size); - public unsafe struct AVCodecParser_parser_parse_func - { - public IntPtr Pointer; - public static implicit operator AVCodecParser_parser_parse_func(AVCodecParser_parser_parse func) => new AVCodecParser_parser_parse_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate void AVCodecParser_parser_close (AVCodecParserContext* @s); - public unsafe struct AVCodecParser_parser_close_func - { - public IntPtr Pointer; - public static implicit operator AVCodecParser_parser_close_func(AVCodecParser_parser_close func) => new AVCodecParser_parser_close_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVCodecParser_split (AVCodecContext* @avctx, byte* @buf, int @buf_size); - public unsafe struct AVCodecParser_split_func - { - public IntPtr Pointer; - public static implicit operator AVCodecParser_split_func(AVCodecParser_split func) => new AVCodecParser_split_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int avcodec_default_execute_func (AVCodecContext* @c2, void* @arg2); - public unsafe struct avcodec_default_execute_func_func - { - public IntPtr Pointer; - public static implicit operator avcodec_default_execute_func_func(avcodec_default_execute_func func) => new avcodec_default_execute_func_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int avcodec_default_execute2_func (AVCodecContext* @c2, void* @arg2, int @p2, int @p3); - public unsafe struct avcodec_default_execute2_func_func - { - public IntPtr Pointer; - public static implicit operator avcodec_default_execute2_func_func(avcodec_default_execute2_func func) => new avcodec_default_execute2_func_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVInputFormat_read_probe (AVProbeData* @p0); - public unsafe struct AVInputFormat_read_probe_func - { - public IntPtr Pointer; - public static implicit operator AVInputFormat_read_probe_func(AVInputFormat_read_probe func) => new AVInputFormat_read_probe_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVInputFormat_read_header (AVFormatContext* @p0); - public unsafe struct AVInputFormat_read_header_func - { - public IntPtr Pointer; - public static implicit operator AVInputFormat_read_header_func(AVInputFormat_read_header func) => new AVInputFormat_read_header_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVInputFormat_read_packet (AVFormatContext* @p0, AVPacket* @pkt); - public unsafe struct AVInputFormat_read_packet_func - { - public IntPtr Pointer; - public static implicit operator AVInputFormat_read_packet_func(AVInputFormat_read_packet func) => new AVInputFormat_read_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVInputFormat_read_close (AVFormatContext* @p0); - public unsafe struct AVInputFormat_read_close_func - { - public IntPtr Pointer; - public static implicit operator AVInputFormat_read_close_func(AVInputFormat_read_close func) => new AVInputFormat_read_close_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVInputFormat_read_seek (AVFormatContext* @p0, int @stream_index, long @timestamp, int @flags); - public unsafe struct AVInputFormat_read_seek_func - { - public IntPtr Pointer; - public static implicit operator AVInputFormat_read_seek_func(AVInputFormat_read_seek func) => new AVInputFormat_read_seek_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate long AVInputFormat_read_timestamp (AVFormatContext* @s, int @stream_index, long* @pos, long @pos_limit); - public unsafe struct AVInputFormat_read_timestamp_func - { - public IntPtr Pointer; - public static implicit operator AVInputFormat_read_timestamp_func(AVInputFormat_read_timestamp func) => new AVInputFormat_read_timestamp_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVInputFormat_read_play (AVFormatContext* @p0); - public unsafe struct AVInputFormat_read_play_func - { - public IntPtr Pointer; - public static implicit operator AVInputFormat_read_play_func(AVInputFormat_read_play func) => new AVInputFormat_read_play_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVInputFormat_read_pause (AVFormatContext* @p0); - public unsafe struct AVInputFormat_read_pause_func - { - public IntPtr Pointer; - public static implicit operator AVInputFormat_read_pause_func(AVInputFormat_read_pause func) => new AVInputFormat_read_pause_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVInputFormat_read_seek2 (AVFormatContext* @s, int @stream_index, long @min_ts, long @ts, long @max_ts, int @flags); - public unsafe struct AVInputFormat_read_seek2_func - { - public IntPtr Pointer; - public static implicit operator AVInputFormat_read_seek2_func(AVInputFormat_read_seek2 func) => new AVInputFormat_read_seek2_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVInputFormat_get_device_list (AVFormatContext* @s, AVDeviceInfoList* @device_list); - public unsafe struct AVInputFormat_get_device_list_func - { - public IntPtr Pointer; - public static implicit operator AVInputFormat_get_device_list_func(AVInputFormat_get_device_list func) => new AVInputFormat_get_device_list_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVIOContext_read_packet (void* @opaque, byte* @buf, int @buf_size); - public unsafe struct AVIOContext_read_packet_func - { - public IntPtr Pointer; - public static implicit operator AVIOContext_read_packet_func(AVIOContext_read_packet func) => new AVIOContext_read_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVIOContext_write_packet (void* @opaque, byte* @buf, int @buf_size); - public unsafe struct AVIOContext_write_packet_func - { - public IntPtr Pointer; - public static implicit operator AVIOContext_write_packet_func(AVIOContext_write_packet func) => new AVIOContext_write_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate long AVIOContext_seek (void* @opaque, long @offset, int @whence); - public unsafe struct AVIOContext_seek_func - { - public IntPtr Pointer; - public static implicit operator AVIOContext_seek_func(AVIOContext_seek func) => new AVIOContext_seek_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate ulong AVIOContext_update_checksum (ulong @checksum, byte* @buf, uint @size); - public unsafe struct AVIOContext_update_checksum_func - { - public IntPtr Pointer; - public static implicit operator AVIOContext_update_checksum_func(AVIOContext_update_checksum func) => new AVIOContext_update_checksum_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVIOContext_read_pause (void* @opaque, int @pause); - public unsafe struct AVIOContext_read_pause_func - { - public IntPtr Pointer; - public static implicit operator AVIOContext_read_pause_func(AVIOContext_read_pause func) => new AVIOContext_read_pause_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate long AVIOContext_read_seek (void* @opaque, int @stream_index, long @timestamp, int @flags); - public unsafe struct AVIOContext_read_seek_func - { - public IntPtr Pointer; - public static implicit operator AVIOContext_read_seek_func(AVIOContext_read_seek func) => new AVIOContext_read_seek_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVIOContext_write_data_type (void* @opaque, byte* @buf, int @buf_size, AVIODataMarkerType @type, long @time); - public unsafe struct AVIOContext_write_data_type_func - { - public IntPtr Pointer; - public static implicit operator AVIOContext_write_data_type_func(AVIOContext_write_data_type func) => new AVIOContext_write_data_type_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVIOInterruptCB_callback (void* @p0); - public unsafe struct AVIOInterruptCB_callback_func - { - public IntPtr Pointer; - public static implicit operator AVIOInterruptCB_callback_func(AVIOInterruptCB_callback func) => new AVIOInterruptCB_callback_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVFormatContext_control_message_cb (AVFormatContext* @s, int @type, void* @data, ulong @data_size); - public unsafe struct AVFormatContext_control_message_cb_func - { - public IntPtr Pointer; - public static implicit operator AVFormatContext_control_message_cb_func(AVFormatContext_control_message_cb func) => new AVFormatContext_control_message_cb_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVFormatContext_io_open (AVFormatContext* @s, AVIOContext** @pb, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url, int @flags, AVDictionary** @options); - public unsafe struct AVFormatContext_io_open_func - { - public IntPtr Pointer; - public static implicit operator AVFormatContext_io_open_func(AVFormatContext_io_open func) => new AVFormatContext_io_open_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate void AVFormatContext_io_close (AVFormatContext* @s, AVIOContext* @pb); - public unsafe struct AVFormatContext_io_close_func - { - public IntPtr Pointer; - public static implicit operator AVFormatContext_io_close_func(AVFormatContext_io_close func) => new AVFormatContext_io_close_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVFormatContext_io_close2 (AVFormatContext* @s, AVIOContext* @pb); - public unsafe struct AVFormatContext_io_close2_func - { - public IntPtr Pointer; - public static implicit operator AVFormatContext_io_close2_func(AVFormatContext_io_close2 func) => new AVFormatContext_io_close2_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVOutputFormat_write_header (AVFormatContext* @p0); - public unsafe struct AVOutputFormat_write_header_func - { - public IntPtr Pointer; - public static implicit operator AVOutputFormat_write_header_func(AVOutputFormat_write_header func) => new AVOutputFormat_write_header_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVOutputFormat_write_packet (AVFormatContext* @p0, AVPacket* @pkt); - public unsafe struct AVOutputFormat_write_packet_func - { - public IntPtr Pointer; - public static implicit operator AVOutputFormat_write_packet_func(AVOutputFormat_write_packet func) => new AVOutputFormat_write_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVOutputFormat_write_trailer (AVFormatContext* @p0); - public unsafe struct AVOutputFormat_write_trailer_func - { - public IntPtr Pointer; - public static implicit operator AVOutputFormat_write_trailer_func(AVOutputFormat_write_trailer func) => new AVOutputFormat_write_trailer_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVOutputFormat_interleave_packet (AVFormatContext* @s, AVPacket* @pkt, int @flush, int @has_packet); - public unsafe struct AVOutputFormat_interleave_packet_func - { - public IntPtr Pointer; - public static implicit operator AVOutputFormat_interleave_packet_func(AVOutputFormat_interleave_packet func) => new AVOutputFormat_interleave_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVOutputFormat_query_codec (AVCodecID @id, int @std_compliance); - public unsafe struct AVOutputFormat_query_codec_func - { - public IntPtr Pointer; - public static implicit operator AVOutputFormat_query_codec_func(AVOutputFormat_query_codec func) => new AVOutputFormat_query_codec_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate void AVOutputFormat_get_output_timestamp (AVFormatContext* @s, int @stream, long* @dts, long* @wall); - public unsafe struct AVOutputFormat_get_output_timestamp_func - { - public IntPtr Pointer; - public static implicit operator AVOutputFormat_get_output_timestamp_func(AVOutputFormat_get_output_timestamp func) => new AVOutputFormat_get_output_timestamp_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVOutputFormat_control_message (AVFormatContext* @s, int @type, void* @data, ulong @data_size); - public unsafe struct AVOutputFormat_control_message_func - { - public IntPtr Pointer; - public static implicit operator AVOutputFormat_control_message_func(AVOutputFormat_control_message func) => new AVOutputFormat_control_message_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVOutputFormat_write_uncoded_frame (AVFormatContext* @p0, int @stream_index, AVFrame** @frame, uint @flags); - public unsafe struct AVOutputFormat_write_uncoded_frame_func - { - public IntPtr Pointer; - public static implicit operator AVOutputFormat_write_uncoded_frame_func(AVOutputFormat_write_uncoded_frame func) => new AVOutputFormat_write_uncoded_frame_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVOutputFormat_get_device_list (AVFormatContext* @s, AVDeviceInfoList* @device_list); - public unsafe struct AVOutputFormat_get_device_list_func - { - public IntPtr Pointer; - public static implicit operator AVOutputFormat_get_device_list_func(AVOutputFormat_get_device_list func) => new AVOutputFormat_get_device_list_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVOutputFormat_init (AVFormatContext* @p0); - public unsafe struct AVOutputFormat_init_func - { - public IntPtr Pointer; - public static implicit operator AVOutputFormat_init_func(AVOutputFormat_init func) => new AVOutputFormat_init_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate void AVOutputFormat_deinit (AVFormatContext* @p0); - public unsafe struct AVOutputFormat_deinit_func - { - public IntPtr Pointer; - public static implicit operator AVOutputFormat_deinit_func(AVOutputFormat_deinit func) => new AVOutputFormat_deinit_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVOutputFormat_check_bitstream (AVFormatContext* @s, AVStream* @st, AVPacket* @pkt); - public unsafe struct AVOutputFormat_check_bitstream_func - { - public IntPtr Pointer; - public static implicit operator AVOutputFormat_check_bitstream_func(AVOutputFormat_check_bitstream func) => new AVOutputFormat_check_bitstream_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVFilter_preinit (AVFilterContext* @ctx); - public unsafe struct AVFilter_preinit_func - { - public IntPtr Pointer; - public static implicit operator AVFilter_preinit_func(AVFilter_preinit func) => new AVFilter_preinit_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVFilter_init (AVFilterContext* @ctx); - public unsafe struct AVFilter_init_func - { - public IntPtr Pointer; - public static implicit operator AVFilter_init_func(AVFilter_init func) => new AVFilter_init_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVFilter_init_dict (AVFilterContext* @ctx, AVDictionary** @options); - public unsafe struct AVFilter_init_dict_func - { - public IntPtr Pointer; - public static implicit operator AVFilter_init_dict_func(AVFilter_init_dict func) => new AVFilter_init_dict_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate void AVFilter_uninit (AVFilterContext* @ctx); - public unsafe struct AVFilter_uninit_func - { - public IntPtr Pointer; - public static implicit operator AVFilter_uninit_func(AVFilter_uninit func) => new AVFilter_uninit_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int _query_func (AVFilterContext* @p0); - public unsafe struct _query_func_func - { - public IntPtr Pointer; - public static implicit operator _query_func_func(_query_func func) => new _query_func_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVFilter_process_command (AVFilterContext* @p0, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @cmd, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @arg, byte* @res, int @res_len, int @flags); - public unsafe struct AVFilter_process_command_func - { - public IntPtr Pointer; - public static implicit operator AVFilter_process_command_func(AVFilter_process_command func) => new AVFilter_process_command_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVFilter_activate (AVFilterContext* @ctx); - public unsafe struct AVFilter_activate_func - { - public IntPtr Pointer; - public static implicit operator AVFilter_activate_func(AVFilter_activate func) => new AVFilter_activate_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int func (AVFilterContext* @ctx, void* @arg, int @jobnr, int @nb_jobs); - public unsafe struct func_func - { - public IntPtr Pointer; - public static implicit operator func_func(func func) => new func_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int AVFilterGraph_execute (AVFilterContext* @ctx, func_func @func, void* @arg, int* @ret, int @nb_jobs); - public unsafe struct AVFilterGraph_execute_func - { - public IntPtr Pointer; - public static implicit operator AVFilterGraph_execute_func(AVFilterGraph_execute func) => new AVFilterGraph_execute_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate void av_log_set_callback_callback (void* @p0, int @p1, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @p2, byte* @p3); - public unsafe struct av_log_set_callback_callback_func - { - public IntPtr Pointer; - public static implicit operator av_log_set_callback_callback_func(av_log_set_callback_callback func) => new av_log_set_callback_callback_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate void av_buffer_create_free (void* @opaque, byte* @data); - public unsafe struct av_buffer_create_free_func - { - public IntPtr Pointer; - public static implicit operator av_buffer_create_free_func(av_buffer_create_free func) => new av_buffer_create_free_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate AVBufferRef* av_buffer_pool_init_alloc (ulong @size); - public unsafe struct av_buffer_pool_init_alloc_func - { - public IntPtr Pointer; - public static implicit operator av_buffer_pool_init_alloc_func(av_buffer_pool_init_alloc func) => new av_buffer_pool_init_alloc_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate AVBufferRef* av_buffer_pool_init2_alloc (void* @opaque, ulong @size); - public unsafe struct av_buffer_pool_init2_alloc_func - { - public IntPtr Pointer; - public static implicit operator av_buffer_pool_init2_alloc_func(av_buffer_pool_init2_alloc func) => new av_buffer_pool_init2_alloc_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate void av_buffer_pool_init2_pool_free (void* @opaque); - public unsafe struct av_buffer_pool_init2_pool_free_func - { - public IntPtr Pointer; - public static implicit operator av_buffer_pool_init2_pool_free_func(av_buffer_pool_init2_pool_free func) => new av_buffer_pool_init2_pool_free_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int avio_alloc_context_read_packet (void* @opaque, byte* @buf, int @buf_size); - public unsafe struct avio_alloc_context_read_packet_func - { - public IntPtr Pointer; - public static implicit operator avio_alloc_context_read_packet_func(avio_alloc_context_read_packet func) => new avio_alloc_context_read_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate int avio_alloc_context_write_packet (void* @opaque, byte* @buf, int @buf_size); - public unsafe struct avio_alloc_context_write_packet_func - { - public IntPtr Pointer; - public static implicit operator avio_alloc_context_write_packet_func(avio_alloc_context_write_packet func) => new avio_alloc_context_write_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - public unsafe delegate long avio_alloc_context_seek (void* @opaque, long @offset, int @whence); - public unsafe struct avio_alloc_context_seek_func - { - public IntPtr Pointer; - public static implicit operator avio_alloc_context_seek_func(avio_alloc_context_seek func) => new avio_alloc_context_seek_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; - } - -} diff --git a/FFmpeg.AutoGen/FFmpeg.enums.g.cs b/FFmpeg.AutoGen/FFmpeg.enums.g.cs deleted file mode 100644 index bf12a3c6..00000000 --- a/FFmpeg.AutoGen/FFmpeg.enums.g.cs +++ /dev/null @@ -1,1740 +0,0 @@ -using System; -using System.Runtime.InteropServices; - -namespace FFmpeg.AutoGen -{ - public enum AVActiveFormatDescription : int - { - @AV_AFD_SAME = 8, - @AV_AFD_4_3 = 9, - @AV_AFD_16_9 = 10, - @AV_AFD_14_9 = 11, - @AV_AFD_4_3_SP_14_9 = 13, - @AV_AFD_16_9_SP_14_9 = 14, - @AV_AFD_SP_4_3 = 15, - } - - /// Message types used by avdevice_app_to_dev_control_message(). - public enum AVAppToDevMessageType : int - { - /// Dummy message. - @AV_APP_TO_DEV_NONE = 1313820229, - /// Window size change message. - @AV_APP_TO_DEV_WINDOW_SIZE = 1195724621, - /// Repaint request message. - @AV_APP_TO_DEV_WINDOW_REPAINT = 1380274241, - /// Request pause/play. - @AV_APP_TO_DEV_PAUSE = 1346458912, - /// Request pause/play. - @AV_APP_TO_DEV_PLAY = 1347174745, - /// Request pause/play. - @AV_APP_TO_DEV_TOGGLE_PAUSE = 1346458964, - /// Volume control message. - @AV_APP_TO_DEV_SET_VOLUME = 1398165324, - /// Mute control messages. - @AV_APP_TO_DEV_MUTE = 541939028, - /// Mute control messages. - @AV_APP_TO_DEV_UNMUTE = 1431131476, - /// Mute control messages. - @AV_APP_TO_DEV_TOGGLE_MUTE = 1414354260, - /// Get volume/mute messages. - @AV_APP_TO_DEV_GET_VOLUME = 1196838732, - /// Get volume/mute messages. - @AV_APP_TO_DEV_GET_MUTE = 1196250452, - } - - public enum AVAudioServiceType : int - { - @AV_AUDIO_SERVICE_TYPE_MAIN = 0, - @AV_AUDIO_SERVICE_TYPE_EFFECTS = 1, - @AV_AUDIO_SERVICE_TYPE_VISUALLY_IMPAIRED = 2, - @AV_AUDIO_SERVICE_TYPE_HEARING_IMPAIRED = 3, - @AV_AUDIO_SERVICE_TYPE_DIALOGUE = 4, - @AV_AUDIO_SERVICE_TYPE_COMMENTARY = 5, - @AV_AUDIO_SERVICE_TYPE_EMERGENCY = 6, - @AV_AUDIO_SERVICE_TYPE_VOICE_OVER = 7, - @AV_AUDIO_SERVICE_TYPE_KARAOKE = 8, - /// Not part of ABI - @AV_AUDIO_SERVICE_TYPE_NB = 9, - } - - /// @{ - public enum AVChannel : int - { - @AV_CHAN_NONE = -1, - @AV_CHAN_FRONT_LEFT = 0, - @AV_CHAN_FRONT_RIGHT = 1, - @AV_CHAN_FRONT_CENTER = 2, - @AV_CHAN_LOW_FREQUENCY = 3, - @AV_CHAN_BACK_LEFT = 4, - @AV_CHAN_BACK_RIGHT = 5, - @AV_CHAN_FRONT_LEFT_OF_CENTER = 6, - @AV_CHAN_FRONT_RIGHT_OF_CENTER = 7, - @AV_CHAN_BACK_CENTER = 8, - @AV_CHAN_SIDE_LEFT = 9, - @AV_CHAN_SIDE_RIGHT = 10, - @AV_CHAN_TOP_CENTER = 11, - @AV_CHAN_TOP_FRONT_LEFT = 12, - @AV_CHAN_TOP_FRONT_CENTER = 13, - @AV_CHAN_TOP_FRONT_RIGHT = 14, - @AV_CHAN_TOP_BACK_LEFT = 15, - @AV_CHAN_TOP_BACK_CENTER = 16, - @AV_CHAN_TOP_BACK_RIGHT = 17, - /// Stereo downmix. - @AV_CHAN_STEREO_LEFT = 29, - /// See above. - @AV_CHAN_STEREO_RIGHT = 30, - /// See above. - @AV_CHAN_WIDE_LEFT = 31, - /// See above. - @AV_CHAN_WIDE_RIGHT = 32, - /// See above. - @AV_CHAN_SURROUND_DIRECT_LEFT = 33, - /// See above. - @AV_CHAN_SURROUND_DIRECT_RIGHT = 34, - /// See above. - @AV_CHAN_LOW_FREQUENCY_2 = 35, - /// See above. - @AV_CHAN_TOP_SIDE_LEFT = 36, - /// See above. - @AV_CHAN_TOP_SIDE_RIGHT = 37, - /// See above. - @AV_CHAN_BOTTOM_FRONT_CENTER = 38, - /// See above. - @AV_CHAN_BOTTOM_FRONT_LEFT = 39, - /// See above. - @AV_CHAN_BOTTOM_FRONT_RIGHT = 40, - /// Channel is empty can be safely skipped. - @AV_CHAN_UNUSED = 512, - /// Channel contains data, but its position is unknown. - @AV_CHAN_UNKNOWN = 768, - /// Range of channels between AV_CHAN_AMBISONIC_BASE and AV_CHAN_AMBISONIC_END represent Ambisonic components using the ACN system. - @AV_CHAN_AMBISONIC_BASE = 1024, - /// Range of channels between AV_CHAN_AMBISONIC_BASE and AV_CHAN_AMBISONIC_END represent Ambisonic components using the ACN system. - @AV_CHAN_AMBISONIC_END = 2047, - } - - public enum AVChannelOrder : int - { - /// Only the channel count is specified, without any further information about the channel order. - @AV_CHANNEL_ORDER_UNSPEC = 0, - /// The native channel order, i.e. the channels are in the same order in which they are defined in the AVChannel enum. This supports up to 63 different channels. - @AV_CHANNEL_ORDER_NATIVE = 1, - /// The channel order does not correspond to any other predefined order and is stored as an explicit map. For example, this could be used to support layouts with 64 or more channels, or with empty/skipped (AV_CHAN_SILENCE) channels at arbitrary positions. - @AV_CHANNEL_ORDER_CUSTOM = 2, - /// The audio is represented as the decomposition of the sound field into spherical harmonics. Each channel corresponds to a single expansion component. Channels are ordered according to ACN (Ambisonic Channel Number). - @AV_CHANNEL_ORDER_AMBISONIC = 3, - } - - /// Location of chroma samples. - public enum AVChromaLocation : int - { - @AVCHROMA_LOC_UNSPECIFIED = 0, - /// MPEG-2/4 4:2:0, H.264 default for 4:2:0 - @AVCHROMA_LOC_LEFT = 1, - /// MPEG-1 4:2:0, JPEG 4:2:0, H.263 4:2:0 - @AVCHROMA_LOC_CENTER = 2, - /// ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2 - @AVCHROMA_LOC_TOPLEFT = 3, - @AVCHROMA_LOC_TOP = 4, - @AVCHROMA_LOC_BOTTOMLEFT = 5, - @AVCHROMA_LOC_BOTTOM = 6, - /// Not part of ABI - @AVCHROMA_LOC_NB = 7, - } - - public enum AVClassCategory : int - { - @AV_CLASS_CATEGORY_NA = 0, - @AV_CLASS_CATEGORY_INPUT = 1, - @AV_CLASS_CATEGORY_OUTPUT = 2, - @AV_CLASS_CATEGORY_MUXER = 3, - @AV_CLASS_CATEGORY_DEMUXER = 4, - @AV_CLASS_CATEGORY_ENCODER = 5, - @AV_CLASS_CATEGORY_DECODER = 6, - @AV_CLASS_CATEGORY_FILTER = 7, - @AV_CLASS_CATEGORY_BITSTREAM_FILTER = 8, - @AV_CLASS_CATEGORY_SWSCALER = 9, - @AV_CLASS_CATEGORY_SWRESAMPLER = 10, - @AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT = 40, - @AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT = 41, - @AV_CLASS_CATEGORY_DEVICE_AUDIO_OUTPUT = 42, - @AV_CLASS_CATEGORY_DEVICE_AUDIO_INPUT = 43, - @AV_CLASS_CATEGORY_DEVICE_OUTPUT = 44, - @AV_CLASS_CATEGORY_DEVICE_INPUT = 45, - /// not part of ABI/API - @AV_CLASS_CATEGORY_NB = 46, - } - - /// Identify the syntax and semantics of the bitstream. The principle is roughly: Two decoders with the same ID can decode the same streams. Two encoders with the same ID can encode compatible streams. There may be slight deviations from the principle due to implementation details. - public enum AVCodecID : int - { - @AV_CODEC_ID_NONE = 0, - @AV_CODEC_ID_MPEG1VIDEO = 1, - /// preferred ID for MPEG-1/2 video decoding - @AV_CODEC_ID_MPEG2VIDEO = 2, - @AV_CODEC_ID_H261 = 3, - @AV_CODEC_ID_H263 = 4, - @AV_CODEC_ID_RV10 = 5, - @AV_CODEC_ID_RV20 = 6, - @AV_CODEC_ID_MJPEG = 7, - @AV_CODEC_ID_MJPEGB = 8, - @AV_CODEC_ID_LJPEG = 9, - @AV_CODEC_ID_SP5X = 10, - @AV_CODEC_ID_JPEGLS = 11, - @AV_CODEC_ID_MPEG4 = 12, - @AV_CODEC_ID_RAWVIDEO = 13, - @AV_CODEC_ID_MSMPEG4V1 = 14, - @AV_CODEC_ID_MSMPEG4V2 = 15, - @AV_CODEC_ID_MSMPEG4V3 = 16, - @AV_CODEC_ID_WMV1 = 17, - @AV_CODEC_ID_WMV2 = 18, - @AV_CODEC_ID_H263P = 19, - @AV_CODEC_ID_H263I = 20, - @AV_CODEC_ID_FLV1 = 21, - @AV_CODEC_ID_SVQ1 = 22, - @AV_CODEC_ID_SVQ3 = 23, - @AV_CODEC_ID_DVVIDEO = 24, - @AV_CODEC_ID_HUFFYUV = 25, - @AV_CODEC_ID_CYUV = 26, - @AV_CODEC_ID_H264 = 27, - @AV_CODEC_ID_INDEO3 = 28, - @AV_CODEC_ID_VP3 = 29, - @AV_CODEC_ID_THEORA = 30, - @AV_CODEC_ID_ASV1 = 31, - @AV_CODEC_ID_ASV2 = 32, - @AV_CODEC_ID_FFV1 = 33, - @AV_CODEC_ID_4XM = 34, - @AV_CODEC_ID_VCR1 = 35, - @AV_CODEC_ID_CLJR = 36, - @AV_CODEC_ID_MDEC = 37, - @AV_CODEC_ID_ROQ = 38, - @AV_CODEC_ID_INTERPLAY_VIDEO = 39, - @AV_CODEC_ID_XAN_WC3 = 40, - @AV_CODEC_ID_XAN_WC4 = 41, - @AV_CODEC_ID_RPZA = 42, - @AV_CODEC_ID_CINEPAK = 43, - @AV_CODEC_ID_WS_VQA = 44, - @AV_CODEC_ID_MSRLE = 45, - @AV_CODEC_ID_MSVIDEO1 = 46, - @AV_CODEC_ID_IDCIN = 47, - @AV_CODEC_ID_8BPS = 48, - @AV_CODEC_ID_SMC = 49, - @AV_CODEC_ID_FLIC = 50, - @AV_CODEC_ID_TRUEMOTION1 = 51, - @AV_CODEC_ID_VMDVIDEO = 52, - @AV_CODEC_ID_MSZH = 53, - @AV_CODEC_ID_ZLIB = 54, - @AV_CODEC_ID_QTRLE = 55, - @AV_CODEC_ID_TSCC = 56, - @AV_CODEC_ID_ULTI = 57, - @AV_CODEC_ID_QDRAW = 58, - @AV_CODEC_ID_VIXL = 59, - @AV_CODEC_ID_QPEG = 60, - @AV_CODEC_ID_PNG = 61, - @AV_CODEC_ID_PPM = 62, - @AV_CODEC_ID_PBM = 63, - @AV_CODEC_ID_PGM = 64, - @AV_CODEC_ID_PGMYUV = 65, - @AV_CODEC_ID_PAM = 66, - @AV_CODEC_ID_FFVHUFF = 67, - @AV_CODEC_ID_RV30 = 68, - @AV_CODEC_ID_RV40 = 69, - @AV_CODEC_ID_VC1 = 70, - @AV_CODEC_ID_WMV3 = 71, - @AV_CODEC_ID_LOCO = 72, - @AV_CODEC_ID_WNV1 = 73, - @AV_CODEC_ID_AASC = 74, - @AV_CODEC_ID_INDEO2 = 75, - @AV_CODEC_ID_FRAPS = 76, - @AV_CODEC_ID_TRUEMOTION2 = 77, - @AV_CODEC_ID_BMP = 78, - @AV_CODEC_ID_CSCD = 79, - @AV_CODEC_ID_MMVIDEO = 80, - @AV_CODEC_ID_ZMBV = 81, - @AV_CODEC_ID_AVS = 82, - @AV_CODEC_ID_SMACKVIDEO = 83, - @AV_CODEC_ID_NUV = 84, - @AV_CODEC_ID_KMVC = 85, - @AV_CODEC_ID_FLASHSV = 86, - @AV_CODEC_ID_CAVS = 87, - @AV_CODEC_ID_JPEG2000 = 88, - @AV_CODEC_ID_VMNC = 89, - @AV_CODEC_ID_VP5 = 90, - @AV_CODEC_ID_VP6 = 91, - @AV_CODEC_ID_VP6F = 92, - @AV_CODEC_ID_TARGA = 93, - @AV_CODEC_ID_DSICINVIDEO = 94, - @AV_CODEC_ID_TIERTEXSEQVIDEO = 95, - @AV_CODEC_ID_TIFF = 96, - @AV_CODEC_ID_GIF = 97, - @AV_CODEC_ID_DXA = 98, - @AV_CODEC_ID_DNXHD = 99, - @AV_CODEC_ID_THP = 100, - @AV_CODEC_ID_SGI = 101, - @AV_CODEC_ID_C93 = 102, - @AV_CODEC_ID_BETHSOFTVID = 103, - @AV_CODEC_ID_PTX = 104, - @AV_CODEC_ID_TXD = 105, - @AV_CODEC_ID_VP6A = 106, - @AV_CODEC_ID_AMV = 107, - @AV_CODEC_ID_VB = 108, - @AV_CODEC_ID_PCX = 109, - @AV_CODEC_ID_SUNRAST = 110, - @AV_CODEC_ID_INDEO4 = 111, - @AV_CODEC_ID_INDEO5 = 112, - @AV_CODEC_ID_MIMIC = 113, - @AV_CODEC_ID_RL2 = 114, - @AV_CODEC_ID_ESCAPE124 = 115, - @AV_CODEC_ID_DIRAC = 116, - @AV_CODEC_ID_BFI = 117, - @AV_CODEC_ID_CMV = 118, - @AV_CODEC_ID_MOTIONPIXELS = 119, - @AV_CODEC_ID_TGV = 120, - @AV_CODEC_ID_TGQ = 121, - @AV_CODEC_ID_TQI = 122, - @AV_CODEC_ID_AURA = 123, - @AV_CODEC_ID_AURA2 = 124, - @AV_CODEC_ID_V210X = 125, - @AV_CODEC_ID_TMV = 126, - @AV_CODEC_ID_V210 = 127, - @AV_CODEC_ID_DPX = 128, - @AV_CODEC_ID_MAD = 129, - @AV_CODEC_ID_FRWU = 130, - @AV_CODEC_ID_FLASHSV2 = 131, - @AV_CODEC_ID_CDGRAPHICS = 132, - @AV_CODEC_ID_R210 = 133, - @AV_CODEC_ID_ANM = 134, - @AV_CODEC_ID_BINKVIDEO = 135, - @AV_CODEC_ID_IFF_ILBM = 136, - @AV_CODEC_ID_KGV1 = 137, - @AV_CODEC_ID_YOP = 138, - @AV_CODEC_ID_VP8 = 139, - @AV_CODEC_ID_PICTOR = 140, - @AV_CODEC_ID_ANSI = 141, - @AV_CODEC_ID_A64_MULTI = 142, - @AV_CODEC_ID_A64_MULTI5 = 143, - @AV_CODEC_ID_R10K = 144, - @AV_CODEC_ID_MXPEG = 145, - @AV_CODEC_ID_LAGARITH = 146, - @AV_CODEC_ID_PRORES = 147, - @AV_CODEC_ID_JV = 148, - @AV_CODEC_ID_DFA = 149, - @AV_CODEC_ID_WMV3IMAGE = 150, - @AV_CODEC_ID_VC1IMAGE = 151, - @AV_CODEC_ID_UTVIDEO = 152, - @AV_CODEC_ID_BMV_VIDEO = 153, - @AV_CODEC_ID_VBLE = 154, - @AV_CODEC_ID_DXTORY = 155, - @AV_CODEC_ID_V410 = 156, - @AV_CODEC_ID_XWD = 157, - @AV_CODEC_ID_CDXL = 158, - @AV_CODEC_ID_XBM = 159, - @AV_CODEC_ID_ZEROCODEC = 160, - @AV_CODEC_ID_MSS1 = 161, - @AV_CODEC_ID_MSA1 = 162, - @AV_CODEC_ID_TSCC2 = 163, - @AV_CODEC_ID_MTS2 = 164, - @AV_CODEC_ID_CLLC = 165, - @AV_CODEC_ID_MSS2 = 166, - @AV_CODEC_ID_VP9 = 167, - @AV_CODEC_ID_AIC = 168, - @AV_CODEC_ID_ESCAPE130 = 169, - @AV_CODEC_ID_G2M = 170, - @AV_CODEC_ID_WEBP = 171, - @AV_CODEC_ID_HNM4_VIDEO = 172, - @AV_CODEC_ID_HEVC = 173, - @AV_CODEC_ID_FIC = 174, - @AV_CODEC_ID_ALIAS_PIX = 175, - @AV_CODEC_ID_BRENDER_PIX = 176, - @AV_CODEC_ID_PAF_VIDEO = 177, - @AV_CODEC_ID_EXR = 178, - @AV_CODEC_ID_VP7 = 179, - @AV_CODEC_ID_SANM = 180, - @AV_CODEC_ID_SGIRLE = 181, - @AV_CODEC_ID_MVC1 = 182, - @AV_CODEC_ID_MVC2 = 183, - @AV_CODEC_ID_HQX = 184, - @AV_CODEC_ID_TDSC = 185, - @AV_CODEC_ID_HQ_HQA = 186, - @AV_CODEC_ID_HAP = 187, - @AV_CODEC_ID_DDS = 188, - @AV_CODEC_ID_DXV = 189, - @AV_CODEC_ID_SCREENPRESSO = 190, - @AV_CODEC_ID_RSCC = 191, - @AV_CODEC_ID_AVS2 = 192, - @AV_CODEC_ID_PGX = 193, - @AV_CODEC_ID_AVS3 = 194, - @AV_CODEC_ID_MSP2 = 195, - @AV_CODEC_ID_VVC = 196, - @AV_CODEC_ID_Y41P = 197, - @AV_CODEC_ID_AVRP = 198, - @AV_CODEC_ID_012V = 199, - @AV_CODEC_ID_AVUI = 200, - @AV_CODEC_ID_AYUV = 201, - @AV_CODEC_ID_TARGA_Y216 = 202, - @AV_CODEC_ID_V308 = 203, - @AV_CODEC_ID_V408 = 204, - @AV_CODEC_ID_YUV4 = 205, - @AV_CODEC_ID_AVRN = 206, - @AV_CODEC_ID_CPIA = 207, - @AV_CODEC_ID_XFACE = 208, - @AV_CODEC_ID_SNOW = 209, - @AV_CODEC_ID_SMVJPEG = 210, - @AV_CODEC_ID_APNG = 211, - @AV_CODEC_ID_DAALA = 212, - @AV_CODEC_ID_CFHD = 213, - @AV_CODEC_ID_TRUEMOTION2RT = 214, - @AV_CODEC_ID_M101 = 215, - @AV_CODEC_ID_MAGICYUV = 216, - @AV_CODEC_ID_SHEERVIDEO = 217, - @AV_CODEC_ID_YLC = 218, - @AV_CODEC_ID_PSD = 219, - @AV_CODEC_ID_PIXLET = 220, - @AV_CODEC_ID_SPEEDHQ = 221, - @AV_CODEC_ID_FMVC = 222, - @AV_CODEC_ID_SCPR = 223, - @AV_CODEC_ID_CLEARVIDEO = 224, - @AV_CODEC_ID_XPM = 225, - @AV_CODEC_ID_AV1 = 226, - @AV_CODEC_ID_BITPACKED = 227, - @AV_CODEC_ID_MSCC = 228, - @AV_CODEC_ID_SRGC = 229, - @AV_CODEC_ID_SVG = 230, - @AV_CODEC_ID_GDV = 231, - @AV_CODEC_ID_FITS = 232, - @AV_CODEC_ID_IMM4 = 233, - @AV_CODEC_ID_PROSUMER = 234, - @AV_CODEC_ID_MWSC = 235, - @AV_CODEC_ID_WCMV = 236, - @AV_CODEC_ID_RASC = 237, - @AV_CODEC_ID_HYMT = 238, - @AV_CODEC_ID_ARBC = 239, - @AV_CODEC_ID_AGM = 240, - @AV_CODEC_ID_LSCR = 241, - @AV_CODEC_ID_VP4 = 242, - @AV_CODEC_ID_IMM5 = 243, - @AV_CODEC_ID_MVDV = 244, - @AV_CODEC_ID_MVHA = 245, - @AV_CODEC_ID_CDTOONS = 246, - @AV_CODEC_ID_MV30 = 247, - @AV_CODEC_ID_NOTCHLC = 248, - @AV_CODEC_ID_PFM = 249, - @AV_CODEC_ID_MOBICLIP = 250, - @AV_CODEC_ID_PHOTOCD = 251, - @AV_CODEC_ID_IPU = 252, - @AV_CODEC_ID_ARGO = 253, - @AV_CODEC_ID_CRI = 254, - @AV_CODEC_ID_SIMBIOSIS_IMX = 255, - @AV_CODEC_ID_SGA_VIDEO = 256, - @AV_CODEC_ID_GEM = 257, - @AV_CODEC_ID_VBN = 258, - @AV_CODEC_ID_JPEGXL = 259, - @AV_CODEC_ID_QOI = 260, - @AV_CODEC_ID_PHM = 261, - /// A dummy id pointing at the start of audio codecs - @AV_CODEC_ID_FIRST_AUDIO = 65536, - @AV_CODEC_ID_PCM_S16LE = 65536, - @AV_CODEC_ID_PCM_S16BE = 65537, - @AV_CODEC_ID_PCM_U16LE = 65538, - @AV_CODEC_ID_PCM_U16BE = 65539, - @AV_CODEC_ID_PCM_S8 = 65540, - @AV_CODEC_ID_PCM_U8 = 65541, - @AV_CODEC_ID_PCM_MULAW = 65542, - @AV_CODEC_ID_PCM_ALAW = 65543, - @AV_CODEC_ID_PCM_S32LE = 65544, - @AV_CODEC_ID_PCM_S32BE = 65545, - @AV_CODEC_ID_PCM_U32LE = 65546, - @AV_CODEC_ID_PCM_U32BE = 65547, - @AV_CODEC_ID_PCM_S24LE = 65548, - @AV_CODEC_ID_PCM_S24BE = 65549, - @AV_CODEC_ID_PCM_U24LE = 65550, - @AV_CODEC_ID_PCM_U24BE = 65551, - @AV_CODEC_ID_PCM_S24DAUD = 65552, - @AV_CODEC_ID_PCM_ZORK = 65553, - @AV_CODEC_ID_PCM_S16LE_PLANAR = 65554, - @AV_CODEC_ID_PCM_DVD = 65555, - @AV_CODEC_ID_PCM_F32BE = 65556, - @AV_CODEC_ID_PCM_F32LE = 65557, - @AV_CODEC_ID_PCM_F64BE = 65558, - @AV_CODEC_ID_PCM_F64LE = 65559, - @AV_CODEC_ID_PCM_BLURAY = 65560, - @AV_CODEC_ID_PCM_LXF = 65561, - @AV_CODEC_ID_S302M = 65562, - @AV_CODEC_ID_PCM_S8_PLANAR = 65563, - @AV_CODEC_ID_PCM_S24LE_PLANAR = 65564, - @AV_CODEC_ID_PCM_S32LE_PLANAR = 65565, - @AV_CODEC_ID_PCM_S16BE_PLANAR = 65566, - @AV_CODEC_ID_PCM_S64LE = 65567, - @AV_CODEC_ID_PCM_S64BE = 65568, - @AV_CODEC_ID_PCM_F16LE = 65569, - @AV_CODEC_ID_PCM_F24LE = 65570, - @AV_CODEC_ID_PCM_VIDC = 65571, - @AV_CODEC_ID_PCM_SGA = 65572, - @AV_CODEC_ID_ADPCM_IMA_QT = 69632, - @AV_CODEC_ID_ADPCM_IMA_WAV = 69633, - @AV_CODEC_ID_ADPCM_IMA_DK3 = 69634, - @AV_CODEC_ID_ADPCM_IMA_DK4 = 69635, - @AV_CODEC_ID_ADPCM_IMA_WS = 69636, - @AV_CODEC_ID_ADPCM_IMA_SMJPEG = 69637, - @AV_CODEC_ID_ADPCM_MS = 69638, - @AV_CODEC_ID_ADPCM_4XM = 69639, - @AV_CODEC_ID_ADPCM_XA = 69640, - @AV_CODEC_ID_ADPCM_ADX = 69641, - @AV_CODEC_ID_ADPCM_EA = 69642, - @AV_CODEC_ID_ADPCM_G726 = 69643, - @AV_CODEC_ID_ADPCM_CT = 69644, - @AV_CODEC_ID_ADPCM_SWF = 69645, - @AV_CODEC_ID_ADPCM_YAMAHA = 69646, - @AV_CODEC_ID_ADPCM_SBPRO_4 = 69647, - @AV_CODEC_ID_ADPCM_SBPRO_3 = 69648, - @AV_CODEC_ID_ADPCM_SBPRO_2 = 69649, - @AV_CODEC_ID_ADPCM_THP = 69650, - @AV_CODEC_ID_ADPCM_IMA_AMV = 69651, - @AV_CODEC_ID_ADPCM_EA_R1 = 69652, - @AV_CODEC_ID_ADPCM_EA_R3 = 69653, - @AV_CODEC_ID_ADPCM_EA_R2 = 69654, - @AV_CODEC_ID_ADPCM_IMA_EA_SEAD = 69655, - @AV_CODEC_ID_ADPCM_IMA_EA_EACS = 69656, - @AV_CODEC_ID_ADPCM_EA_XAS = 69657, - @AV_CODEC_ID_ADPCM_EA_MAXIS_XA = 69658, - @AV_CODEC_ID_ADPCM_IMA_ISS = 69659, - @AV_CODEC_ID_ADPCM_G722 = 69660, - @AV_CODEC_ID_ADPCM_IMA_APC = 69661, - @AV_CODEC_ID_ADPCM_VIMA = 69662, - @AV_CODEC_ID_ADPCM_AFC = 69663, - @AV_CODEC_ID_ADPCM_IMA_OKI = 69664, - @AV_CODEC_ID_ADPCM_DTK = 69665, - @AV_CODEC_ID_ADPCM_IMA_RAD = 69666, - @AV_CODEC_ID_ADPCM_G726LE = 69667, - @AV_CODEC_ID_ADPCM_THP_LE = 69668, - @AV_CODEC_ID_ADPCM_PSX = 69669, - @AV_CODEC_ID_ADPCM_AICA = 69670, - @AV_CODEC_ID_ADPCM_IMA_DAT4 = 69671, - @AV_CODEC_ID_ADPCM_MTAF = 69672, - @AV_CODEC_ID_ADPCM_AGM = 69673, - @AV_CODEC_ID_ADPCM_ARGO = 69674, - @AV_CODEC_ID_ADPCM_IMA_SSI = 69675, - @AV_CODEC_ID_ADPCM_ZORK = 69676, - @AV_CODEC_ID_ADPCM_IMA_APM = 69677, - @AV_CODEC_ID_ADPCM_IMA_ALP = 69678, - @AV_CODEC_ID_ADPCM_IMA_MTF = 69679, - @AV_CODEC_ID_ADPCM_IMA_CUNNING = 69680, - @AV_CODEC_ID_ADPCM_IMA_MOFLEX = 69681, - @AV_CODEC_ID_ADPCM_IMA_ACORN = 69682, - @AV_CODEC_ID_AMR_NB = 73728, - @AV_CODEC_ID_AMR_WB = 73729, - @AV_CODEC_ID_RA_144 = 77824, - @AV_CODEC_ID_RA_288 = 77825, - @AV_CODEC_ID_ROQ_DPCM = 81920, - @AV_CODEC_ID_INTERPLAY_DPCM = 81921, - @AV_CODEC_ID_XAN_DPCM = 81922, - @AV_CODEC_ID_SOL_DPCM = 81923, - @AV_CODEC_ID_SDX2_DPCM = 81924, - @AV_CODEC_ID_GREMLIN_DPCM = 81925, - @AV_CODEC_ID_DERF_DPCM = 81926, - @AV_CODEC_ID_MP2 = 86016, - /// preferred ID for decoding MPEG audio layer 1, 2 or 3 - @AV_CODEC_ID_MP3 = 86017, - @AV_CODEC_ID_AAC = 86018, - @AV_CODEC_ID_AC3 = 86019, - @AV_CODEC_ID_DTS = 86020, - @AV_CODEC_ID_VORBIS = 86021, - @AV_CODEC_ID_DVAUDIO = 86022, - @AV_CODEC_ID_WMAV1 = 86023, - @AV_CODEC_ID_WMAV2 = 86024, - @AV_CODEC_ID_MACE3 = 86025, - @AV_CODEC_ID_MACE6 = 86026, - @AV_CODEC_ID_VMDAUDIO = 86027, - @AV_CODEC_ID_FLAC = 86028, - @AV_CODEC_ID_MP3ADU = 86029, - @AV_CODEC_ID_MP3ON4 = 86030, - @AV_CODEC_ID_SHORTEN = 86031, - @AV_CODEC_ID_ALAC = 86032, - @AV_CODEC_ID_WESTWOOD_SND1 = 86033, - /// as in Berlin toast format - @AV_CODEC_ID_GSM = 86034, - @AV_CODEC_ID_QDM2 = 86035, - @AV_CODEC_ID_COOK = 86036, - @AV_CODEC_ID_TRUESPEECH = 86037, - @AV_CODEC_ID_TTA = 86038, - @AV_CODEC_ID_SMACKAUDIO = 86039, - @AV_CODEC_ID_QCELP = 86040, - @AV_CODEC_ID_WAVPACK = 86041, - @AV_CODEC_ID_DSICINAUDIO = 86042, - @AV_CODEC_ID_IMC = 86043, - @AV_CODEC_ID_MUSEPACK7 = 86044, - @AV_CODEC_ID_MLP = 86045, - @AV_CODEC_ID_GSM_MS = 86046, - @AV_CODEC_ID_ATRAC3 = 86047, - @AV_CODEC_ID_APE = 86048, - @AV_CODEC_ID_NELLYMOSER = 86049, - @AV_CODEC_ID_MUSEPACK8 = 86050, - @AV_CODEC_ID_SPEEX = 86051, - @AV_CODEC_ID_WMAVOICE = 86052, - @AV_CODEC_ID_WMAPRO = 86053, - @AV_CODEC_ID_WMALOSSLESS = 86054, - @AV_CODEC_ID_ATRAC3P = 86055, - @AV_CODEC_ID_EAC3 = 86056, - @AV_CODEC_ID_SIPR = 86057, - @AV_CODEC_ID_MP1 = 86058, - @AV_CODEC_ID_TWINVQ = 86059, - @AV_CODEC_ID_TRUEHD = 86060, - @AV_CODEC_ID_MP4ALS = 86061, - @AV_CODEC_ID_ATRAC1 = 86062, - @AV_CODEC_ID_BINKAUDIO_RDFT = 86063, - @AV_CODEC_ID_BINKAUDIO_DCT = 86064, - @AV_CODEC_ID_AAC_LATM = 86065, - @AV_CODEC_ID_QDMC = 86066, - @AV_CODEC_ID_CELT = 86067, - @AV_CODEC_ID_G723_1 = 86068, - @AV_CODEC_ID_G729 = 86069, - @AV_CODEC_ID_8SVX_EXP = 86070, - @AV_CODEC_ID_8SVX_FIB = 86071, - @AV_CODEC_ID_BMV_AUDIO = 86072, - @AV_CODEC_ID_RALF = 86073, - @AV_CODEC_ID_IAC = 86074, - @AV_CODEC_ID_ILBC = 86075, - @AV_CODEC_ID_OPUS = 86076, - @AV_CODEC_ID_COMFORT_NOISE = 86077, - @AV_CODEC_ID_TAK = 86078, - @AV_CODEC_ID_METASOUND = 86079, - @AV_CODEC_ID_PAF_AUDIO = 86080, - @AV_CODEC_ID_ON2AVC = 86081, - @AV_CODEC_ID_DSS_SP = 86082, - @AV_CODEC_ID_CODEC2 = 86083, - @AV_CODEC_ID_FFWAVESYNTH = 86084, - @AV_CODEC_ID_SONIC = 86085, - @AV_CODEC_ID_SONIC_LS = 86086, - @AV_CODEC_ID_EVRC = 86087, - @AV_CODEC_ID_SMV = 86088, - @AV_CODEC_ID_DSD_LSBF = 86089, - @AV_CODEC_ID_DSD_MSBF = 86090, - @AV_CODEC_ID_DSD_LSBF_PLANAR = 86091, - @AV_CODEC_ID_DSD_MSBF_PLANAR = 86092, - @AV_CODEC_ID_4GV = 86093, - @AV_CODEC_ID_INTERPLAY_ACM = 86094, - @AV_CODEC_ID_XMA1 = 86095, - @AV_CODEC_ID_XMA2 = 86096, - @AV_CODEC_ID_DST = 86097, - @AV_CODEC_ID_ATRAC3AL = 86098, - @AV_CODEC_ID_ATRAC3PAL = 86099, - @AV_CODEC_ID_DOLBY_E = 86100, - @AV_CODEC_ID_APTX = 86101, - @AV_CODEC_ID_APTX_HD = 86102, - @AV_CODEC_ID_SBC = 86103, - @AV_CODEC_ID_ATRAC9 = 86104, - @AV_CODEC_ID_HCOM = 86105, - @AV_CODEC_ID_ACELP_KELVIN = 86106, - @AV_CODEC_ID_MPEGH_3D_AUDIO = 86107, - @AV_CODEC_ID_SIREN = 86108, - @AV_CODEC_ID_HCA = 86109, - @AV_CODEC_ID_FASTAUDIO = 86110, - @AV_CODEC_ID_MSNSIREN = 86111, - @AV_CODEC_ID_DFPWM = 86112, - /// A dummy ID pointing at the start of subtitle codecs. - @AV_CODEC_ID_FIRST_SUBTITLE = 94208, - @AV_CODEC_ID_DVD_SUBTITLE = 94208, - @AV_CODEC_ID_DVB_SUBTITLE = 94209, - /// raw UTF-8 text - @AV_CODEC_ID_TEXT = 94210, - @AV_CODEC_ID_XSUB = 94211, - @AV_CODEC_ID_SSA = 94212, - @AV_CODEC_ID_MOV_TEXT = 94213, - @AV_CODEC_ID_HDMV_PGS_SUBTITLE = 94214, - @AV_CODEC_ID_DVB_TELETEXT = 94215, - @AV_CODEC_ID_SRT = 94216, - @AV_CODEC_ID_MICRODVD = 94217, - @AV_CODEC_ID_EIA_608 = 94218, - @AV_CODEC_ID_JACOSUB = 94219, - @AV_CODEC_ID_SAMI = 94220, - @AV_CODEC_ID_REALTEXT = 94221, - @AV_CODEC_ID_STL = 94222, - @AV_CODEC_ID_SUBVIEWER1 = 94223, - @AV_CODEC_ID_SUBVIEWER = 94224, - @AV_CODEC_ID_SUBRIP = 94225, - @AV_CODEC_ID_WEBVTT = 94226, - @AV_CODEC_ID_MPL2 = 94227, - @AV_CODEC_ID_VPLAYER = 94228, - @AV_CODEC_ID_PJS = 94229, - @AV_CODEC_ID_ASS = 94230, - @AV_CODEC_ID_HDMV_TEXT_SUBTITLE = 94231, - @AV_CODEC_ID_TTML = 94232, - @AV_CODEC_ID_ARIB_CAPTION = 94233, - /// A dummy ID pointing at the start of various fake codecs. - @AV_CODEC_ID_FIRST_UNKNOWN = 98304, - @AV_CODEC_ID_TTF = 98304, - /// Contain timestamp estimated through PCR of program stream. - @AV_CODEC_ID_SCTE_35 = 98305, - @AV_CODEC_ID_EPG = 98306, - @AV_CODEC_ID_BINTEXT = 98307, - @AV_CODEC_ID_XBIN = 98308, - @AV_CODEC_ID_IDF = 98309, - @AV_CODEC_ID_OTF = 98310, - @AV_CODEC_ID_SMPTE_KLV = 98311, - @AV_CODEC_ID_DVD_NAV = 98312, - @AV_CODEC_ID_TIMED_ID3 = 98313, - @AV_CODEC_ID_BIN_DATA = 98314, - /// codec_id is not known (like AV_CODEC_ID_NONE) but lavf should attempt to identify it - @AV_CODEC_ID_PROBE = 102400, - /// _FAKE_ codec to indicate a raw MPEG-2 TS stream (only used by libavformat) - @AV_CODEC_ID_MPEG2TS = 131072, - /// _FAKE_ codec to indicate a MPEG-4 Systems stream (only used by libavformat) - @AV_CODEC_ID_MPEG4SYSTEMS = 131073, - /// Dummy codec for streams containing only metadata information. - @AV_CODEC_ID_FFMETADATA = 135168, - /// Passthrough codec, AVFrames wrapped in AVPacket - @AV_CODEC_ID_WRAPPED_AVFRAME = 135169, - } - - /// Chromaticity coordinates of the source primaries. These values match the ones defined by ISO/IEC 23091-2_2019 subclause 8.1 and ITU-T H.273. - public enum AVColorPrimaries : int - { - @AVCOL_PRI_RESERVED0 = 0, - /// also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B - @AVCOL_PRI_BT709 = 1, - @AVCOL_PRI_UNSPECIFIED = 2, - @AVCOL_PRI_RESERVED = 3, - /// also FCC Title 47 Code of Federal Regulations 73.682 (a)(20) - @AVCOL_PRI_BT470M = 4, - /// also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM - @AVCOL_PRI_BT470BG = 5, - /// also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC - @AVCOL_PRI_SMPTE170M = 6, - /// identical to above, also called "SMPTE C" even though it uses D65 - @AVCOL_PRI_SMPTE240M = 7, - /// colour filters using Illuminant C - @AVCOL_PRI_FILM = 8, - /// ITU-R BT2020 - @AVCOL_PRI_BT2020 = 9, - /// SMPTE ST 428-1 (CIE 1931 XYZ) - @AVCOL_PRI_SMPTE428 = 10, - @AVCOL_PRI_SMPTEST428_1 = 10, - /// SMPTE ST 431-2 (2011) / DCI P3 - @AVCOL_PRI_SMPTE431 = 11, - /// SMPTE ST 432-1 (2010) / P3 D65 / Display P3 - @AVCOL_PRI_SMPTE432 = 12, - /// EBU Tech. 3213-E (nothing there) / one of JEDEC P22 group phosphors - @AVCOL_PRI_EBU3213 = 22, - @AVCOL_PRI_JEDEC_P22 = 22, - /// Not part of ABI - @AVCOL_PRI_NB = 23, - } - - /// Visual content value range. - public enum AVColorRange : int - { - @AVCOL_RANGE_UNSPECIFIED = 0, - /// Narrow or limited range content. - @AVCOL_RANGE_MPEG = 1, - /// Full range content. - @AVCOL_RANGE_JPEG = 2, - /// Not part of ABI - @AVCOL_RANGE_NB = 3, - } - - /// YUV colorspace type. These values match the ones defined by ISO/IEC 23091-2_2019 subclause 8.3. - public enum AVColorSpace : int - { - /// order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1 - @AVCOL_SPC_RGB = 0, - /// also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B - @AVCOL_SPC_BT709 = 1, - @AVCOL_SPC_UNSPECIFIED = 2, - /// reserved for future use by ITU-T and ISO/IEC just like 15-255 are - @AVCOL_SPC_RESERVED = 3, - /// FCC Title 47 Code of Federal Regulations 73.682 (a)(20) - @AVCOL_SPC_FCC = 4, - /// also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601 - @AVCOL_SPC_BT470BG = 5, - /// also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above - @AVCOL_SPC_SMPTE170M = 6, - /// derived from 170M primaries and D65 white point, 170M is derived from BT470 System M's primaries - @AVCOL_SPC_SMPTE240M = 7, - /// used by Dirac / VC-2 and H.264 FRext, see ITU-T SG16 - @AVCOL_SPC_YCGCO = 8, - @AVCOL_SPC_YCOCG = 8, - /// ITU-R BT2020 non-constant luminance system - @AVCOL_SPC_BT2020_NCL = 9, - /// ITU-R BT2020 constant luminance system - @AVCOL_SPC_BT2020_CL = 10, - /// SMPTE 2085, Y'D'zD'x - @AVCOL_SPC_SMPTE2085 = 11, - /// Chromaticity-derived non-constant luminance system - @AVCOL_SPC_CHROMA_DERIVED_NCL = 12, - /// Chromaticity-derived constant luminance system - @AVCOL_SPC_CHROMA_DERIVED_CL = 13, - /// ITU-R BT.2100-0, ICtCp - @AVCOL_SPC_ICTCP = 14, - /// Not part of ABI - @AVCOL_SPC_NB = 15, - } - - /// Color Transfer Characteristic. These values match the ones defined by ISO/IEC 23091-2_2019 subclause 8.2. - public enum AVColorTransferCharacteristic : int - { - @AVCOL_TRC_RESERVED0 = 0, - /// also ITU-R BT1361 - @AVCOL_TRC_BT709 = 1, - @AVCOL_TRC_UNSPECIFIED = 2, - @AVCOL_TRC_RESERVED = 3, - /// also ITU-R BT470M / ITU-R BT1700 625 PAL & SECAM - @AVCOL_TRC_GAMMA22 = 4, - /// also ITU-R BT470BG - @AVCOL_TRC_GAMMA28 = 5, - /// also ITU-R BT601-6 525 or 625 / ITU-R BT1358 525 or 625 / ITU-R BT1700 NTSC - @AVCOL_TRC_SMPTE170M = 6, - @AVCOL_TRC_SMPTE240M = 7, - /// "Linear transfer characteristics" - @AVCOL_TRC_LINEAR = 8, - /// "Logarithmic transfer characteristic (100:1 range)" - @AVCOL_TRC_LOG = 9, - /// "Logarithmic transfer characteristic (100 * Sqrt(10) : 1 range)" - @AVCOL_TRC_LOG_SQRT = 10, - /// IEC 61966-2-4 - @AVCOL_TRC_IEC61966_2_4 = 11, - /// ITU-R BT1361 Extended Colour Gamut - @AVCOL_TRC_BT1361_ECG = 12, - /// IEC 61966-2-1 (sRGB or sYCC) - @AVCOL_TRC_IEC61966_2_1 = 13, - /// ITU-R BT2020 for 10-bit system - @AVCOL_TRC_BT2020_10 = 14, - /// ITU-R BT2020 for 12-bit system - @AVCOL_TRC_BT2020_12 = 15, - /// SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems - @AVCOL_TRC_SMPTE2084 = 16, - @AVCOL_TRC_SMPTEST2084 = 16, - /// SMPTE ST 428-1 - @AVCOL_TRC_SMPTE428 = 17, - @AVCOL_TRC_SMPTEST428_1 = 17, - /// ARIB STD-B67, known as "Hybrid log-gamma" - @AVCOL_TRC_ARIB_STD_B67 = 18, - /// Not part of ABI - @AVCOL_TRC_NB = 19, - } - - /// Message types used by avdevice_dev_to_app_control_message(). - public enum AVDevToAppMessageType : int - { - /// Dummy message. - @AV_DEV_TO_APP_NONE = 1313820229, - /// Create window buffer message. - @AV_DEV_TO_APP_CREATE_WINDOW_BUFFER = 1111708229, - /// Prepare window buffer message. - @AV_DEV_TO_APP_PREPARE_WINDOW_BUFFER = 1112560197, - /// Display window buffer message. - @AV_DEV_TO_APP_DISPLAY_WINDOW_BUFFER = 1111771475, - /// Destroy window buffer message. - @AV_DEV_TO_APP_DESTROY_WINDOW_BUFFER = 1111770451, - /// Buffer fullness status messages. - @AV_DEV_TO_APP_BUFFER_OVERFLOW = 1112491596, - /// Buffer fullness status messages. - @AV_DEV_TO_APP_BUFFER_UNDERFLOW = 1112884812, - /// Buffer readable/writable. - @AV_DEV_TO_APP_BUFFER_READABLE = 1112687648, - /// Buffer readable/writable. - @AV_DEV_TO_APP_BUFFER_WRITABLE = 1113018912, - /// Mute state change message. - @AV_DEV_TO_APP_MUTE_STATE_CHANGED = 1129141588, - /// Volume level change message. - @AV_DEV_TO_APP_VOLUME_LEVEL_CHANGED = 1129729868, - } - - public enum AVDiscard : int - { - /// discard nothing - @AVDISCARD_NONE = -16, - /// discard useless packets like 0 size packets in avi - @AVDISCARD_DEFAULT = 0, - /// discard all non reference - @AVDISCARD_NONREF = 8, - /// discard all bidirectional frames - @AVDISCARD_BIDIR = 16, - /// discard all non intra frames - @AVDISCARD_NONINTRA = 24, - /// discard all frames except keyframes - @AVDISCARD_NONKEY = 32, - /// discard all - @AVDISCARD_ALL = 48, - } - - /// The duration of a video can be estimated through various ways, and this enum can be used to know how the duration was estimated. - public enum AVDurationEstimationMethod : int - { - /// Duration accurately estimated from PTSes - @AVFMT_DURATION_FROM_PTS = 0, - /// Duration estimated from a stream with a known duration - @AVFMT_DURATION_FROM_STREAM = 1, - /// Duration estimated from bitrate (less accurate) - @AVFMT_DURATION_FROM_BITRATE = 2, - } - - public enum AVFieldOrder : int - { - @AV_FIELD_UNKNOWN = 0, - @AV_FIELD_PROGRESSIVE = 1, - @AV_FIELD_TT = 2, - @AV_FIELD_BB = 3, - @AV_FIELD_TB = 4, - @AV_FIELD_BT = 5, - } - - /// stage of the initialization of the link properties (dimensions, etc) - public enum AVFilterLink_init_state : int - { - /// not started - @AVLINK_UNINIT = 0, - /// started, but incomplete - @AVLINK_STARTINIT = 1, - /// complete - @AVLINK_INIT = 2, - } - - /// @{ AVFrame is an abstraction for reference-counted raw multimedia data. - public enum AVFrameSideDataType : int - { - /// The data is the AVPanScan struct defined in libavcodec. - @AV_FRAME_DATA_PANSCAN = 0, - /// ATSC A53 Part 4 Closed Captions. A53 CC bitstream is stored as uint8_t in AVFrameSideData.data. The number of bytes of CC data is AVFrameSideData.size. - @AV_FRAME_DATA_A53_CC = 1, - /// Stereoscopic 3d metadata. The data is the AVStereo3D struct defined in libavutil/stereo3d.h. - @AV_FRAME_DATA_STEREO3D = 2, - /// The data is the AVMatrixEncoding enum defined in libavutil/channel_layout.h. - @AV_FRAME_DATA_MATRIXENCODING = 3, - /// Metadata relevant to a downmix procedure. The data is the AVDownmixInfo struct defined in libavutil/downmix_info.h. - @AV_FRAME_DATA_DOWNMIX_INFO = 4, - /// ReplayGain information in the form of the AVReplayGain struct. - @AV_FRAME_DATA_REPLAYGAIN = 5, - /// This side data contains a 3x3 transformation matrix describing an affine transformation that needs to be applied to the frame for correct presentation. - @AV_FRAME_DATA_DISPLAYMATRIX = 6, - /// Active Format Description data consisting of a single byte as specified in ETSI TS 101 154 using AVActiveFormatDescription enum. - @AV_FRAME_DATA_AFD = 7, - /// Motion vectors exported by some codecs (on demand through the export_mvs flag set in the libavcodec AVCodecContext flags2 option). The data is the AVMotionVector struct defined in libavutil/motion_vector.h. - @AV_FRAME_DATA_MOTION_VECTORS = 8, - /// Recommmends skipping the specified number of samples. This is exported only if the "skip_manual" AVOption is set in libavcodec. This has the same format as AV_PKT_DATA_SKIP_SAMPLES. - @AV_FRAME_DATA_SKIP_SAMPLES = 9, - /// This side data must be associated with an audio frame and corresponds to enum AVAudioServiceType defined in avcodec.h. - @AV_FRAME_DATA_AUDIO_SERVICE_TYPE = 10, - /// Mastering display metadata associated with a video frame. The payload is an AVMasteringDisplayMetadata type and contains information about the mastering display color volume. - @AV_FRAME_DATA_MASTERING_DISPLAY_METADATA = 11, - /// The GOP timecode in 25 bit timecode format. Data format is 64-bit integer. This is set on the first frame of a GOP that has a temporal reference of 0. - @AV_FRAME_DATA_GOP_TIMECODE = 12, - /// The data represents the AVSphericalMapping structure defined in libavutil/spherical.h. - @AV_FRAME_DATA_SPHERICAL = 13, - /// Content light level (based on CTA-861.3). This payload contains data in the form of the AVContentLightMetadata struct. - @AV_FRAME_DATA_CONTENT_LIGHT_LEVEL = 14, - /// The data contains an ICC profile as an opaque octet buffer following the format described by ISO 15076-1 with an optional name defined in the metadata key entry "name". - @AV_FRAME_DATA_ICC_PROFILE = 15, - /// Timecode which conforms to SMPTE ST 12-1. The data is an array of 4 uint32_t where the first uint32_t describes how many (1-3) of the other timecodes are used. The timecode format is described in the documentation of av_timecode_get_smpte_from_framenum() function in libavutil/timecode.h. - @AV_FRAME_DATA_S12M_TIMECODE = 16, - /// HDR dynamic metadata associated with a video frame. The payload is an AVDynamicHDRPlus type and contains information for color volume transform - application 4 of SMPTE 2094-40:2016 standard. - @AV_FRAME_DATA_DYNAMIC_HDR_PLUS = 17, - /// Regions Of Interest, the data is an array of AVRegionOfInterest type, the number of array element is implied by AVFrameSideData.size / AVRegionOfInterest.self_size. - @AV_FRAME_DATA_REGIONS_OF_INTEREST = 18, - /// Encoding parameters for a video frame, as described by AVVideoEncParams. - @AV_FRAME_DATA_VIDEO_ENC_PARAMS = 19, - /// User data unregistered metadata associated with a video frame. This is the H.26[45] UDU SEI message, and shouldn't be used for any other purpose The data is stored as uint8_t in AVFrameSideData.data which is 16 bytes of uuid_iso_iec_11578 followed by AVFrameSideData.size - 16 bytes of user_data_payload_byte. - @AV_FRAME_DATA_SEI_UNREGISTERED = 20, - /// Film grain parameters for a frame, described by AVFilmGrainParams. Must be present for every frame which should have film grain applied. - @AV_FRAME_DATA_FILM_GRAIN_PARAMS = 21, - /// Bounding boxes for object detection and classification, as described by AVDetectionBBoxHeader. - @AV_FRAME_DATA_DETECTION_BBOXES = 22, - /// Dolby Vision RPU raw data, suitable for passing to x265 or other libraries. Array of uint8_t, with NAL emulation bytes intact. - @AV_FRAME_DATA_DOVI_RPU_BUFFER = 23, - /// Parsed Dolby Vision metadata, suitable for passing to a software implementation. The payload is the AVDOVIMetadata struct defined in libavutil/dovi_meta.h. - @AV_FRAME_DATA_DOVI_METADATA = 24, - /// HDR Vivid dynamic metadata associated with a video frame. The payload is an AVDynamicHDRVivid type and contains information for color volume transform - CUVA 005.1-2021. - @AV_FRAME_DATA_DYNAMIC_HDR_VIVID = 25, - } - - /// Option for overlapping elliptical pixel selectors in an image. - public enum AVHDRPlusOverlapProcessOption : int - { - @AV_HDR_PLUS_OVERLAP_PROCESS_WEIGHTED_AVERAGING = 0, - @AV_HDR_PLUS_OVERLAP_PROCESS_LAYERING = 1, - } - - public enum AVHWDeviceType : int - { - @AV_HWDEVICE_TYPE_NONE = 0, - @AV_HWDEVICE_TYPE_VDPAU = 1, - @AV_HWDEVICE_TYPE_CUDA = 2, - @AV_HWDEVICE_TYPE_VAAPI = 3, - @AV_HWDEVICE_TYPE_DXVA2 = 4, - @AV_HWDEVICE_TYPE_QSV = 5, - @AV_HWDEVICE_TYPE_VIDEOTOOLBOX = 6, - @AV_HWDEVICE_TYPE_D3D11VA = 7, - @AV_HWDEVICE_TYPE_DRM = 8, - @AV_HWDEVICE_TYPE_OPENCL = 9, - @AV_HWDEVICE_TYPE_MEDIACODEC = 10, - @AV_HWDEVICE_TYPE_VULKAN = 11, - } - - public enum AVHWFrameTransferDirection : int - { - /// Transfer the data from the queried hw frame. - @AV_HWFRAME_TRANSFER_DIRECTION_FROM = 0, - /// Transfer the data to the queried hw frame. - @AV_HWFRAME_TRANSFER_DIRECTION_TO = 1, - } - - /// Different data types that can be returned via the AVIO write_data_type callback. - public enum AVIODataMarkerType : int - { - /// Header data; this needs to be present for the stream to be decodeable. - @AVIO_DATA_MARKER_HEADER = 0, - /// A point in the output bytestream where a decoder can start decoding (i.e. a keyframe). A demuxer/decoder given the data flagged with AVIO_DATA_MARKER_HEADER, followed by any AVIO_DATA_MARKER_SYNC_POINT, should give decodeable results. - @AVIO_DATA_MARKER_SYNC_POINT = 1, - /// A point in the output bytestream where a demuxer can start parsing (for non self synchronizing bytestream formats). That is, any non-keyframe packet start point. - @AVIO_DATA_MARKER_BOUNDARY_POINT = 2, - /// This is any, unlabelled data. It can either be a muxer not marking any positions at all, it can be an actual boundary/sync point that the muxer chooses not to mark, or a later part of a packet/fragment that is cut into multiple write callbacks due to limited IO buffer size. - @AVIO_DATA_MARKER_UNKNOWN = 3, - /// Trailer data, which doesn't contain actual content, but only for finalizing the output file. - @AVIO_DATA_MARKER_TRAILER = 4, - /// A point in the output bytestream where the underlying AVIOContext might flush the buffer depending on latency or buffering requirements. Typically means the end of a packet. - @AVIO_DATA_MARKER_FLUSH_POINT = 5, - } - - /// Directory entry types. - public enum AVIODirEntryType : int - { - @AVIO_ENTRY_UNKNOWN = 0, - @AVIO_ENTRY_BLOCK_DEVICE = 1, - @AVIO_ENTRY_CHARACTER_DEVICE = 2, - @AVIO_ENTRY_DIRECTORY = 3, - @AVIO_ENTRY_NAMED_PIPE = 4, - @AVIO_ENTRY_SYMBOLIC_LINK = 5, - @AVIO_ENTRY_SOCKET = 6, - @AVIO_ENTRY_FILE = 7, - @AVIO_ENTRY_SERVER = 8, - @AVIO_ENTRY_SHARE = 9, - @AVIO_ENTRY_WORKGROUP = 10, - } - - public enum AVMatrixEncoding : int - { - @AV_MATRIX_ENCODING_NONE = 0, - @AV_MATRIX_ENCODING_DOLBY = 1, - @AV_MATRIX_ENCODING_DPLII = 2, - @AV_MATRIX_ENCODING_DPLIIX = 3, - @AV_MATRIX_ENCODING_DPLIIZ = 4, - @AV_MATRIX_ENCODING_DOLBYEX = 5, - @AV_MATRIX_ENCODING_DOLBYHEADPHONE = 6, - @AV_MATRIX_ENCODING_NB = 7, - } - - /// Media Type - public enum AVMediaType : int - { - /// Usually treated as AVMEDIA_TYPE_DATA - @AVMEDIA_TYPE_UNKNOWN = -1, - @AVMEDIA_TYPE_VIDEO = 0, - @AVMEDIA_TYPE_AUDIO = 1, - /// Opaque data information usually continuous - @AVMEDIA_TYPE_DATA = 2, - @AVMEDIA_TYPE_SUBTITLE = 3, - /// Opaque data information usually sparse - @AVMEDIA_TYPE_ATTACHMENT = 4, - @AVMEDIA_TYPE_NB = 5, - } - - /// @{ AVOptions provide a generic system to declare options on arbitrary structs ("objects"). An option can have a help text, a type and a range of possible values. Options may then be enumerated, read and written to. - public enum AVOptionType : int - { - @AV_OPT_TYPE_FLAGS = 0, - @AV_OPT_TYPE_INT = 1, - @AV_OPT_TYPE_INT64 = 2, - @AV_OPT_TYPE_DOUBLE = 3, - @AV_OPT_TYPE_FLOAT = 4, - @AV_OPT_TYPE_STRING = 5, - @AV_OPT_TYPE_RATIONAL = 6, - /// offset must point to a pointer immediately followed by an int for the length - @AV_OPT_TYPE_BINARY = 7, - @AV_OPT_TYPE_DICT = 8, - @AV_OPT_TYPE_UINT64 = 9, - @AV_OPT_TYPE_CONST = 10, - /// offset must point to two consecutive integers - @AV_OPT_TYPE_IMAGE_SIZE = 11, - @AV_OPT_TYPE_PIXEL_FMT = 12, - @AV_OPT_TYPE_SAMPLE_FMT = 13, - /// offset must point to AVRational - @AV_OPT_TYPE_VIDEO_RATE = 14, - @AV_OPT_TYPE_DURATION = 15, - @AV_OPT_TYPE_COLOR = 16, - @AV_OPT_TYPE_CHANNEL_LAYOUT = 17, - @AV_OPT_TYPE_BOOL = 18, - @AV_OPT_TYPE_CHLAYOUT = 19, - } - - /// Types and functions for working with AVPacket. @{ - public enum AVPacketSideDataType : int - { - /// An AV_PKT_DATA_PALETTE side data packet contains exactly AVPALETTE_SIZE bytes worth of palette. This side data signals that a new palette is present. - @AV_PKT_DATA_PALETTE = 0, - /// The AV_PKT_DATA_NEW_EXTRADATA is used to notify the codec or the format that the extradata buffer was changed and the receiving side should act upon it appropriately. The new extradata is embedded in the side data buffer and should be immediately used for processing the current frame or packet. - @AV_PKT_DATA_NEW_EXTRADATA = 1, - /// An AV_PKT_DATA_PARAM_CHANGE side data packet is laid out as follows: - @AV_PKT_DATA_PARAM_CHANGE = 2, - /// An AV_PKT_DATA_H263_MB_INFO side data packet contains a number of structures with info about macroblocks relevant to splitting the packet into smaller packets on macroblock edges (e.g. as for RFC 2190). That is, it does not necessarily contain info about all macroblocks, as long as the distance between macroblocks in the info is smaller than the target payload size. Each MB info structure is 12 bytes, and is laid out as follows: - @AV_PKT_DATA_H263_MB_INFO = 3, - /// This side data should be associated with an audio stream and contains ReplayGain information in form of the AVReplayGain struct. - @AV_PKT_DATA_REPLAYGAIN = 4, - /// This side data contains a 3x3 transformation matrix describing an affine transformation that needs to be applied to the decoded video frames for correct presentation. - @AV_PKT_DATA_DISPLAYMATRIX = 5, - /// This side data should be associated with a video stream and contains Stereoscopic 3D information in form of the AVStereo3D struct. - @AV_PKT_DATA_STEREO3D = 6, - /// This side data should be associated with an audio stream and corresponds to enum AVAudioServiceType. - @AV_PKT_DATA_AUDIO_SERVICE_TYPE = 7, - /// This side data contains quality related information from the encoder. - @AV_PKT_DATA_QUALITY_STATS = 8, - /// This side data contains an integer value representing the stream index of a "fallback" track. A fallback track indicates an alternate track to use when the current track can not be decoded for some reason. e.g. no decoder available for codec. - @AV_PKT_DATA_FALLBACK_TRACK = 9, - /// This side data corresponds to the AVCPBProperties struct. - @AV_PKT_DATA_CPB_PROPERTIES = 10, - /// Recommmends skipping the specified number of samples - @AV_PKT_DATA_SKIP_SAMPLES = 11, - /// An AV_PKT_DATA_JP_DUALMONO side data packet indicates that the packet may contain "dual mono" audio specific to Japanese DTV and if it is true, recommends only the selected channel to be used. - @AV_PKT_DATA_JP_DUALMONO = 12, - /// A list of zero terminated key/value strings. There is no end marker for the list, so it is required to rely on the side data size to stop. - @AV_PKT_DATA_STRINGS_METADATA = 13, - /// Subtitle event position - @AV_PKT_DATA_SUBTITLE_POSITION = 14, - /// Data found in BlockAdditional element of matroska container. There is no end marker for the data, so it is required to rely on the side data size to recognize the end. 8 byte id (as found in BlockAddId) followed by data. - @AV_PKT_DATA_MATROSKA_BLOCKADDITIONAL = 15, - /// The optional first identifier line of a WebVTT cue. - @AV_PKT_DATA_WEBVTT_IDENTIFIER = 16, - /// The optional settings (rendering instructions) that immediately follow the timestamp specifier of a WebVTT cue. - @AV_PKT_DATA_WEBVTT_SETTINGS = 17, - /// A list of zero terminated key/value strings. There is no end marker for the list, so it is required to rely on the side data size to stop. This side data includes updated metadata which appeared in the stream. - @AV_PKT_DATA_METADATA_UPDATE = 18, - /// MPEGTS stream ID as uint8_t, this is required to pass the stream ID information from the demuxer to the corresponding muxer. - @AV_PKT_DATA_MPEGTS_STREAM_ID = 19, - /// Mastering display metadata (based on SMPTE-2086:2014). This metadata should be associated with a video stream and contains data in the form of the AVMasteringDisplayMetadata struct. - @AV_PKT_DATA_MASTERING_DISPLAY_METADATA = 20, - /// This side data should be associated with a video stream and corresponds to the AVSphericalMapping structure. - @AV_PKT_DATA_SPHERICAL = 21, - /// Content light level (based on CTA-861.3). This metadata should be associated with a video stream and contains data in the form of the AVContentLightMetadata struct. - @AV_PKT_DATA_CONTENT_LIGHT_LEVEL = 22, - /// ATSC A53 Part 4 Closed Captions. This metadata should be associated with a video stream. A53 CC bitstream is stored as uint8_t in AVPacketSideData.data. The number of bytes of CC data is AVPacketSideData.size. - @AV_PKT_DATA_A53_CC = 23, - /// This side data is encryption initialization data. The format is not part of ABI, use av_encryption_init_info_* methods to access. - @AV_PKT_DATA_ENCRYPTION_INIT_INFO = 24, - /// This side data contains encryption info for how to decrypt the packet. The format is not part of ABI, use av_encryption_info_* methods to access. - @AV_PKT_DATA_ENCRYPTION_INFO = 25, - /// Active Format Description data consisting of a single byte as specified in ETSI TS 101 154 using AVActiveFormatDescription enum. - @AV_PKT_DATA_AFD = 26, - /// Producer Reference Time data corresponding to the AVProducerReferenceTime struct, usually exported by some encoders (on demand through the prft flag set in the AVCodecContext export_side_data field). - @AV_PKT_DATA_PRFT = 27, - /// ICC profile data consisting of an opaque octet buffer following the format described by ISO 15076-1. - @AV_PKT_DATA_ICC_PROFILE = 28, - /// DOVI configuration ref: dolby-vision-bitstreams-within-the-iso-base-media-file-format-v2.1.2, section 2.2 dolby-vision-bitstreams-in-mpeg-2-transport-stream-multiplex-v1.2, section 3.3 Tags are stored in struct AVDOVIDecoderConfigurationRecord. - @AV_PKT_DATA_DOVI_CONF = 29, - /// Timecode which conforms to SMPTE ST 12-1:2014. The data is an array of 4 uint32_t where the first uint32_t describes how many (1-3) of the other timecodes are used. The timecode format is described in the documentation of av_timecode_get_smpte_from_framenum() function in libavutil/timecode.h. - @AV_PKT_DATA_S12M_TIMECODE = 30, - /// HDR10+ dynamic metadata associated with a video frame. The metadata is in the form of the AVDynamicHDRPlus struct and contains information for color volume transform - application 4 of SMPTE 2094-40:2016 standard. - @AV_PKT_DATA_DYNAMIC_HDR10_PLUS = 31, - /// The number of side data types. This is not part of the public API/ABI in the sense that it may change when new side data types are added. This must stay the last enum value. If its value becomes huge, some code using it needs to be updated as it assumes it to be smaller than other limits. - @AV_PKT_DATA_NB = 32, - } - - /// @{ - public enum AVPictureStructure : int - { - @AV_PICTURE_STRUCTURE_UNKNOWN = 0, - @AV_PICTURE_STRUCTURE_TOP_FIELD = 1, - @AV_PICTURE_STRUCTURE_BOTTOM_FIELD = 2, - @AV_PICTURE_STRUCTURE_FRAME = 3, - } - - /// @} @} - public enum AVPictureType : int - { - /// Undefined - @AV_PICTURE_TYPE_NONE = 0, - /// Intra - @AV_PICTURE_TYPE_I = 1, - /// Predicted - @AV_PICTURE_TYPE_P = 2, - /// Bi-dir predicted - @AV_PICTURE_TYPE_B = 3, - /// S(GMC)-VOP MPEG-4 - @AV_PICTURE_TYPE_S = 4, - /// Switching Intra - @AV_PICTURE_TYPE_SI = 5, - /// Switching Predicted - @AV_PICTURE_TYPE_SP = 6, - /// BI type - @AV_PICTURE_TYPE_BI = 7, - } - - /// Pixel format. - public enum AVPixelFormat : int - { - @AV_PIX_FMT_NONE = -1, - /// planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples) - @AV_PIX_FMT_YUV420P = 0, - /// packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr - @AV_PIX_FMT_YUYV422 = 1, - /// packed RGB 8:8:8, 24bpp, RGBRGB... - @AV_PIX_FMT_RGB24 = 2, - /// packed RGB 8:8:8, 24bpp, BGRBGR... - @AV_PIX_FMT_BGR24 = 3, - /// planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples) - @AV_PIX_FMT_YUV422P = 4, - /// planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples) - @AV_PIX_FMT_YUV444P = 5, - /// planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples) - @AV_PIX_FMT_YUV410P = 6, - /// planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) - @AV_PIX_FMT_YUV411P = 7, - /// Y , 8bpp - @AV_PIX_FMT_GRAY8 = 8, - /// Y , 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb to the lsb - @AV_PIX_FMT_MONOWHITE = 9, - /// Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb - @AV_PIX_FMT_MONOBLACK = 10, - /// 8 bits with AV_PIX_FMT_RGB32 palette - @AV_PIX_FMT_PAL8 = 11, - /// planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting color_range - @AV_PIX_FMT_YUVJ420P = 12, - /// planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting color_range - @AV_PIX_FMT_YUVJ422P = 13, - /// planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting color_range - @AV_PIX_FMT_YUVJ444P = 14, - /// packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1 - @AV_PIX_FMT_UYVY422 = 15, - /// packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3 - @AV_PIX_FMT_UYYVYY411 = 16, - /// packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb) - @AV_PIX_FMT_BGR8 = 17, - /// packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits - @AV_PIX_FMT_BGR4 = 18, - /// packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb) - @AV_PIX_FMT_BGR4_BYTE = 19, - /// packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb) - @AV_PIX_FMT_RGB8 = 20, - /// packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits - @AV_PIX_FMT_RGB4 = 21, - /// packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb) - @AV_PIX_FMT_RGB4_BYTE = 22, - /// planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V) - @AV_PIX_FMT_NV12 = 23, - /// as above, but U and V bytes are swapped - @AV_PIX_FMT_NV21 = 24, - /// packed ARGB 8:8:8:8, 32bpp, ARGBARGB... - @AV_PIX_FMT_ARGB = 25, - /// packed RGBA 8:8:8:8, 32bpp, RGBARGBA... - @AV_PIX_FMT_RGBA = 26, - /// packed ABGR 8:8:8:8, 32bpp, ABGRABGR... - @AV_PIX_FMT_ABGR = 27, - /// packed BGRA 8:8:8:8, 32bpp, BGRABGRA... - @AV_PIX_FMT_BGRA = 28, - /// Y , 16bpp, big-endian - @AV_PIX_FMT_GRAY16BE = 29, - /// Y , 16bpp, little-endian - @AV_PIX_FMT_GRAY16LE = 30, - /// planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples) - @AV_PIX_FMT_YUV440P = 31, - /// planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range - @AV_PIX_FMT_YUVJ440P = 32, - /// planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples) - @AV_PIX_FMT_YUVA420P = 33, - /// packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big-endian - @AV_PIX_FMT_RGB48BE = 34, - /// packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as little-endian - @AV_PIX_FMT_RGB48LE = 35, - /// packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian - @AV_PIX_FMT_RGB565BE = 36, - /// packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian - @AV_PIX_FMT_RGB565LE = 37, - /// packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), big-endian , X=unused/undefined - @AV_PIX_FMT_RGB555BE = 38, - /// packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), little-endian, X=unused/undefined - @AV_PIX_FMT_RGB555LE = 39, - /// packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), big-endian - @AV_PIX_FMT_BGR565BE = 40, - /// packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), little-endian - @AV_PIX_FMT_BGR565LE = 41, - /// packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), big-endian , X=unused/undefined - @AV_PIX_FMT_BGR555BE = 42, - /// packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), little-endian, X=unused/undefined - @AV_PIX_FMT_BGR555LE = 43, - /// Hardware acceleration through VA-API, data[3] contains a VASurfaceID. - @AV_PIX_FMT_VAAPI = 44, - /// planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian - @AV_PIX_FMT_YUV420P16LE = 45, - /// planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian - @AV_PIX_FMT_YUV420P16BE = 46, - /// planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian - @AV_PIX_FMT_YUV422P16LE = 47, - /// planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian - @AV_PIX_FMT_YUV422P16BE = 48, - /// planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian - @AV_PIX_FMT_YUV444P16LE = 49, - /// planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian - @AV_PIX_FMT_YUV444P16BE = 50, - /// HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer - @AV_PIX_FMT_DXVA2_VLD = 51, - /// packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), little-endian, X=unused/undefined - @AV_PIX_FMT_RGB444LE = 52, - /// packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), big-endian, X=unused/undefined - @AV_PIX_FMT_RGB444BE = 53, - /// packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), little-endian, X=unused/undefined - @AV_PIX_FMT_BGR444LE = 54, - /// packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), big-endian, X=unused/undefined - @AV_PIX_FMT_BGR444BE = 55, - /// 8 bits gray, 8 bits alpha - @AV_PIX_FMT_YA8 = 56, - /// alias for AV_PIX_FMT_YA8 - @AV_PIX_FMT_Y400A = 56, - /// alias for AV_PIX_FMT_YA8 - @AV_PIX_FMT_GRAY8A = 56, - /// packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big-endian - @AV_PIX_FMT_BGR48BE = 57, - /// packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as little-endian - @AV_PIX_FMT_BGR48LE = 58, - /// planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian - @AV_PIX_FMT_YUV420P9BE = 59, - /// planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian - @AV_PIX_FMT_YUV420P9LE = 60, - /// planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian - @AV_PIX_FMT_YUV420P10BE = 61, - /// planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian - @AV_PIX_FMT_YUV420P10LE = 62, - /// planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian - @AV_PIX_FMT_YUV422P10BE = 63, - /// planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian - @AV_PIX_FMT_YUV422P10LE = 64, - /// planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian - @AV_PIX_FMT_YUV444P9BE = 65, - /// planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian - @AV_PIX_FMT_YUV444P9LE = 66, - /// planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian - @AV_PIX_FMT_YUV444P10BE = 67, - /// planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian - @AV_PIX_FMT_YUV444P10LE = 68, - /// planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian - @AV_PIX_FMT_YUV422P9BE = 69, - /// planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian - @AV_PIX_FMT_YUV422P9LE = 70, - /// planar GBR 4:4:4 24bpp - @AV_PIX_FMT_GBRP = 71, - @AV_PIX_FMT_GBR24P = 71, - /// planar GBR 4:4:4 27bpp, big-endian - @AV_PIX_FMT_GBRP9BE = 72, - /// planar GBR 4:4:4 27bpp, little-endian - @AV_PIX_FMT_GBRP9LE = 73, - /// planar GBR 4:4:4 30bpp, big-endian - @AV_PIX_FMT_GBRP10BE = 74, - /// planar GBR 4:4:4 30bpp, little-endian - @AV_PIX_FMT_GBRP10LE = 75, - /// planar GBR 4:4:4 48bpp, big-endian - @AV_PIX_FMT_GBRP16BE = 76, - /// planar GBR 4:4:4 48bpp, little-endian - @AV_PIX_FMT_GBRP16LE = 77, - /// planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples) - @AV_PIX_FMT_YUVA422P = 78, - /// planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples) - @AV_PIX_FMT_YUVA444P = 79, - /// planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), big-endian - @AV_PIX_FMT_YUVA420P9BE = 80, - /// planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), little-endian - @AV_PIX_FMT_YUVA420P9LE = 81, - /// planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), big-endian - @AV_PIX_FMT_YUVA422P9BE = 82, - /// planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), little-endian - @AV_PIX_FMT_YUVA422P9LE = 83, - /// planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), big-endian - @AV_PIX_FMT_YUVA444P9BE = 84, - /// planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), little-endian - @AV_PIX_FMT_YUVA444P9LE = 85, - /// planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian) - @AV_PIX_FMT_YUVA420P10BE = 86, - /// planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian) - @AV_PIX_FMT_YUVA420P10LE = 87, - /// planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian) - @AV_PIX_FMT_YUVA422P10BE = 88, - /// planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian) - @AV_PIX_FMT_YUVA422P10LE = 89, - /// planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian) - @AV_PIX_FMT_YUVA444P10BE = 90, - /// planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian) - @AV_PIX_FMT_YUVA444P10LE = 91, - /// planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian) - @AV_PIX_FMT_YUVA420P16BE = 92, - /// planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian) - @AV_PIX_FMT_YUVA420P16LE = 93, - /// planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian) - @AV_PIX_FMT_YUVA422P16BE = 94, - /// planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian) - @AV_PIX_FMT_YUVA422P16LE = 95, - /// planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian) - @AV_PIX_FMT_YUVA444P16BE = 96, - /// planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian) - @AV_PIX_FMT_YUVA444P16LE = 97, - /// HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface - @AV_PIX_FMT_VDPAU = 98, - /// packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as little-endian, the 4 lower bits are set to 0 - @AV_PIX_FMT_XYZ12LE = 99, - /// packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as big-endian, the 4 lower bits are set to 0 - @AV_PIX_FMT_XYZ12BE = 100, - /// interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples) - @AV_PIX_FMT_NV16 = 101, - /// interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian - @AV_PIX_FMT_NV20LE = 102, - /// interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian - @AV_PIX_FMT_NV20BE = 103, - /// packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian - @AV_PIX_FMT_RGBA64BE = 104, - /// packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian - @AV_PIX_FMT_RGBA64LE = 105, - /// packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian - @AV_PIX_FMT_BGRA64BE = 106, - /// packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian - @AV_PIX_FMT_BGRA64LE = 107, - /// packed YUV 4:2:2, 16bpp, Y0 Cr Y1 Cb - @AV_PIX_FMT_YVYU422 = 108, - /// 16 bits gray, 16 bits alpha (big-endian) - @AV_PIX_FMT_YA16BE = 109, - /// 16 bits gray, 16 bits alpha (little-endian) - @AV_PIX_FMT_YA16LE = 110, - /// planar GBRA 4:4:4:4 32bpp - @AV_PIX_FMT_GBRAP = 111, - /// planar GBRA 4:4:4:4 64bpp, big-endian - @AV_PIX_FMT_GBRAP16BE = 112, - /// planar GBRA 4:4:4:4 64bpp, little-endian - @AV_PIX_FMT_GBRAP16LE = 113, - /// HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure. - @AV_PIX_FMT_QSV = 114, - /// HW acceleration though MMAL, data[3] contains a pointer to the MMAL_BUFFER_HEADER_T structure. - @AV_PIX_FMT_MMAL = 115, - /// HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView pointer - @AV_PIX_FMT_D3D11VA_VLD = 116, - /// HW acceleration through CUDA. data[i] contain CUdeviceptr pointers exactly as for system memory frames. - @AV_PIX_FMT_CUDA = 117, - /// packed RGB 8:8:8, 32bpp, XRGBXRGB... X=unused/undefined - @AV_PIX_FMT_0RGB = 118, - /// packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined - @AV_PIX_FMT_RGB0 = 119, - /// packed BGR 8:8:8, 32bpp, XBGRXBGR... X=unused/undefined - @AV_PIX_FMT_0BGR = 120, - /// packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined - @AV_PIX_FMT_BGR0 = 121, - /// planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian - @AV_PIX_FMT_YUV420P12BE = 122, - /// planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian - @AV_PIX_FMT_YUV420P12LE = 123, - /// planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian - @AV_PIX_FMT_YUV420P14BE = 124, - /// planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian - @AV_PIX_FMT_YUV420P14LE = 125, - /// planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian - @AV_PIX_FMT_YUV422P12BE = 126, - /// planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian - @AV_PIX_FMT_YUV422P12LE = 127, - /// planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian - @AV_PIX_FMT_YUV422P14BE = 128, - /// planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian - @AV_PIX_FMT_YUV422P14LE = 129, - /// planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian - @AV_PIX_FMT_YUV444P12BE = 130, - /// planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian - @AV_PIX_FMT_YUV444P12LE = 131, - /// planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian - @AV_PIX_FMT_YUV444P14BE = 132, - /// planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian - @AV_PIX_FMT_YUV444P14LE = 133, - /// planar GBR 4:4:4 36bpp, big-endian - @AV_PIX_FMT_GBRP12BE = 134, - /// planar GBR 4:4:4 36bpp, little-endian - @AV_PIX_FMT_GBRP12LE = 135, - /// planar GBR 4:4:4 42bpp, big-endian - @AV_PIX_FMT_GBRP14BE = 136, - /// planar GBR 4:4:4 42bpp, little-endian - @AV_PIX_FMT_GBRP14LE = 137, - /// planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV411P and setting color_range - @AV_PIX_FMT_YUVJ411P = 138, - /// bayer, BGBG..(odd line), GRGR..(even line), 8-bit samples - @AV_PIX_FMT_BAYER_BGGR8 = 139, - /// bayer, RGRG..(odd line), GBGB..(even line), 8-bit samples - @AV_PIX_FMT_BAYER_RGGB8 = 140, - /// bayer, GBGB..(odd line), RGRG..(even line), 8-bit samples - @AV_PIX_FMT_BAYER_GBRG8 = 141, - /// bayer, GRGR..(odd line), BGBG..(even line), 8-bit samples - @AV_PIX_FMT_BAYER_GRBG8 = 142, - /// bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, little-endian - @AV_PIX_FMT_BAYER_BGGR16LE = 143, - /// bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, big-endian - @AV_PIX_FMT_BAYER_BGGR16BE = 144, - /// bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, little-endian - @AV_PIX_FMT_BAYER_RGGB16LE = 145, - /// bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, big-endian - @AV_PIX_FMT_BAYER_RGGB16BE = 146, - /// bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, little-endian - @AV_PIX_FMT_BAYER_GBRG16LE = 147, - /// bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, big-endian - @AV_PIX_FMT_BAYER_GBRG16BE = 148, - /// bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, little-endian - @AV_PIX_FMT_BAYER_GRBG16LE = 149, - /// bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, big-endian - @AV_PIX_FMT_BAYER_GRBG16BE = 150, - /// XVideo Motion Acceleration via common packet passing - @AV_PIX_FMT_XVMC = 151, - /// planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian - @AV_PIX_FMT_YUV440P10LE = 152, - /// planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian - @AV_PIX_FMT_YUV440P10BE = 153, - /// planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian - @AV_PIX_FMT_YUV440P12LE = 154, - /// planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian - @AV_PIX_FMT_YUV440P12BE = 155, - /// packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), little-endian - @AV_PIX_FMT_AYUV64LE = 156, - /// packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), big-endian - @AV_PIX_FMT_AYUV64BE = 157, - /// hardware decoding through Videotoolbox - @AV_PIX_FMT_VIDEOTOOLBOX = 158, - /// like NV12, with 10bpp per component, data in the high bits, zeros in the low bits, little-endian - @AV_PIX_FMT_P010LE = 159, - /// like NV12, with 10bpp per component, data in the high bits, zeros in the low bits, big-endian - @AV_PIX_FMT_P010BE = 160, - /// planar GBR 4:4:4:4 48bpp, big-endian - @AV_PIX_FMT_GBRAP12BE = 161, - /// planar GBR 4:4:4:4 48bpp, little-endian - @AV_PIX_FMT_GBRAP12LE = 162, - /// planar GBR 4:4:4:4 40bpp, big-endian - @AV_PIX_FMT_GBRAP10BE = 163, - /// planar GBR 4:4:4:4 40bpp, little-endian - @AV_PIX_FMT_GBRAP10LE = 164, - /// hardware decoding through MediaCodec - @AV_PIX_FMT_MEDIACODEC = 165, - /// Y , 12bpp, big-endian - @AV_PIX_FMT_GRAY12BE = 166, - /// Y , 12bpp, little-endian - @AV_PIX_FMT_GRAY12LE = 167, - /// Y , 10bpp, big-endian - @AV_PIX_FMT_GRAY10BE = 168, - /// Y , 10bpp, little-endian - @AV_PIX_FMT_GRAY10LE = 169, - /// like NV12, with 16bpp per component, little-endian - @AV_PIX_FMT_P016LE = 170, - /// like NV12, with 16bpp per component, big-endian - @AV_PIX_FMT_P016BE = 171, - /// Hardware surfaces for Direct3D11. - @AV_PIX_FMT_D3D11 = 172, - /// Y , 9bpp, big-endian - @AV_PIX_FMT_GRAY9BE = 173, - /// Y , 9bpp, little-endian - @AV_PIX_FMT_GRAY9LE = 174, - /// IEEE-754 single precision planar GBR 4:4:4, 96bpp, big-endian - @AV_PIX_FMT_GBRPF32BE = 175, - /// IEEE-754 single precision planar GBR 4:4:4, 96bpp, little-endian - @AV_PIX_FMT_GBRPF32LE = 176, - /// IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, big-endian - @AV_PIX_FMT_GBRAPF32BE = 177, - /// IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, little-endian - @AV_PIX_FMT_GBRAPF32LE = 178, - /// DRM-managed buffers exposed through PRIME buffer sharing. - @AV_PIX_FMT_DRM_PRIME = 179, - /// Hardware surfaces for OpenCL. - @AV_PIX_FMT_OPENCL = 180, - /// Y , 14bpp, big-endian - @AV_PIX_FMT_GRAY14BE = 181, - /// Y , 14bpp, little-endian - @AV_PIX_FMT_GRAY14LE = 182, - /// IEEE-754 single precision Y, 32bpp, big-endian - @AV_PIX_FMT_GRAYF32BE = 183, - /// IEEE-754 single precision Y, 32bpp, little-endian - @AV_PIX_FMT_GRAYF32LE = 184, - /// planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), 12b alpha, big-endian - @AV_PIX_FMT_YUVA422P12BE = 185, - /// planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), 12b alpha, little-endian - @AV_PIX_FMT_YUVA422P12LE = 186, - /// planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), 12b alpha, big-endian - @AV_PIX_FMT_YUVA444P12BE = 187, - /// planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), 12b alpha, little-endian - @AV_PIX_FMT_YUVA444P12LE = 188, - /// planar YUV 4:4:4, 24bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V) - @AV_PIX_FMT_NV24 = 189, - /// as above, but U and V bytes are swapped - @AV_PIX_FMT_NV42 = 190, - /// Vulkan hardware images. - @AV_PIX_FMT_VULKAN = 191, - /// packed YUV 4:2:2 like YUYV422, 20bpp, data in the high bits, big-endian - @AV_PIX_FMT_Y210BE = 192, - /// packed YUV 4:2:2 like YUYV422, 20bpp, data in the high bits, little-endian - @AV_PIX_FMT_Y210LE = 193, - /// packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), little-endian, X=unused/undefined - @AV_PIX_FMT_X2RGB10LE = 194, - /// packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), big-endian, X=unused/undefined - @AV_PIX_FMT_X2RGB10BE = 195, - /// packed BGR 10:10:10, 30bpp, (msb)2X 10B 10G 10R(lsb), little-endian, X=unused/undefined - @AV_PIX_FMT_X2BGR10LE = 196, - /// packed BGR 10:10:10, 30bpp, (msb)2X 10B 10G 10R(lsb), big-endian, X=unused/undefined - @AV_PIX_FMT_X2BGR10BE = 197, - /// interleaved chroma YUV 4:2:2, 20bpp, data in the high bits, big-endian - @AV_PIX_FMT_P210BE = 198, - /// interleaved chroma YUV 4:2:2, 20bpp, data in the high bits, little-endian - @AV_PIX_FMT_P210LE = 199, - /// interleaved chroma YUV 4:4:4, 30bpp, data in the high bits, big-endian - @AV_PIX_FMT_P410BE = 200, - /// interleaved chroma YUV 4:4:4, 30bpp, data in the high bits, little-endian - @AV_PIX_FMT_P410LE = 201, - /// interleaved chroma YUV 4:2:2, 32bpp, big-endian - @AV_PIX_FMT_P216BE = 202, - /// interleaved chroma YUV 4:2:2, 32bpp, little-endian - @AV_PIX_FMT_P216LE = 203, - /// interleaved chroma YUV 4:4:4, 48bpp, big-endian - @AV_PIX_FMT_P416BE = 204, - /// interleaved chroma YUV 4:4:4, 48bpp, little-endian - @AV_PIX_FMT_P416LE = 205, - /// number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of formats might differ between versions - @AV_PIX_FMT_NB = 206, - } - - /// Rounding methods. - public enum AVRounding : int - { - /// Round toward zero. - @AV_ROUND_ZERO = 0, - /// Round away from zero. - @AV_ROUND_INF = 1, - /// Round toward -infinity. - @AV_ROUND_DOWN = 2, - /// Round toward +infinity. - @AV_ROUND_UP = 3, - /// Round to nearest and halfway cases away from zero. - @AV_ROUND_NEAR_INF = 5, - /// Flag telling rescaling functions to pass `INT64_MIN`/`MAX` through unchanged, avoiding special cases for #AV_NOPTS_VALUE. - @AV_ROUND_PASS_MINMAX = 8192, - } - - /// Audio sample formats - public enum AVSampleFormat : int - { - @AV_SAMPLE_FMT_NONE = -1, - /// unsigned 8 bits - @AV_SAMPLE_FMT_U8 = 0, - /// signed 16 bits - @AV_SAMPLE_FMT_S16 = 1, - /// signed 32 bits - @AV_SAMPLE_FMT_S32 = 2, - /// float - @AV_SAMPLE_FMT_FLT = 3, - /// double - @AV_SAMPLE_FMT_DBL = 4, - /// unsigned 8 bits, planar - @AV_SAMPLE_FMT_U8P = 5, - /// signed 16 bits, planar - @AV_SAMPLE_FMT_S16P = 6, - /// signed 32 bits, planar - @AV_SAMPLE_FMT_S32P = 7, - /// float, planar - @AV_SAMPLE_FMT_FLTP = 8, - /// double, planar - @AV_SAMPLE_FMT_DBLP = 9, - /// signed 64 bits - @AV_SAMPLE_FMT_S64 = 10, - /// signed 64 bits, planar - @AV_SAMPLE_FMT_S64P = 11, - /// Number of sample formats. DO NOT USE if linking dynamically - @AV_SAMPLE_FMT_NB = 12, - } - - public enum AVSideDataParamChangeFlags : int - { - @AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_COUNT = 1, - @AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_LAYOUT = 2, - @AV_SIDE_DATA_PARAM_CHANGE_SAMPLE_RATE = 4, - @AV_SIDE_DATA_PARAM_CHANGE_DIMENSIONS = 8, - } - - /// @} - public enum AVStreamParseType : int - { - @AVSTREAM_PARSE_NONE = 0, - /// full parsing and repack - @AVSTREAM_PARSE_FULL = 1, - /// Only parse headers, do not repack. - @AVSTREAM_PARSE_HEADERS = 2, - /// full parsing and interpolation of timestamps for frames not starting on a packet boundary - @AVSTREAM_PARSE_TIMESTAMPS = 3, - /// full parsing and repack of the first frame only, only implemented for H.264 currently - @AVSTREAM_PARSE_FULL_ONCE = 4, - /// full parsing and repack with timestamp and position generation by parser for raw this assumes that each packet in the file contains no demuxer level headers and just codec level data, otherwise position generation would fail - @AVSTREAM_PARSE_FULL_RAW = 5, - } - - /// @} - public enum AVSubtitleType : int - { - @SUBTITLE_NONE = 0, - /// A bitmap, pict will be set - @SUBTITLE_BITMAP = 1, - /// Plain text, the text field must be set by the decoder and is authoritative. ass and pict fields may contain approximations. - @SUBTITLE_TEXT = 2, - /// Formatted text, the ass field must be set by the decoder and is authoritative. pict and text fields may contain approximations. - @SUBTITLE_ASS = 3, - } - - public enum AVTimebaseSource : int - { - @AVFMT_TBCF_AUTO = -1, - @AVFMT_TBCF_DECODER = 0, - @AVFMT_TBCF_DEMUXER = 1, - @AVFMT_TBCF_R_FRAMERATE = 2, - } - - public enum AVTimecodeFlag : int - { - /// timecode is drop frame - @AV_TIMECODE_FLAG_DROPFRAME = 1, - /// timecode wraps after 24 hours - @AV_TIMECODE_FLAG_24HOURSMAX = 2, - /// negative time values are allowed - @AV_TIMECODE_FLAG_ALLOWNEGATIVE = 4, - } - - /// Dithering algorithms - public enum SwrDitherType : int - { - @SWR_DITHER_NONE = 0, - @SWR_DITHER_RECTANGULAR = 1, - @SWR_DITHER_TRIANGULAR = 2, - @SWR_DITHER_TRIANGULAR_HIGHPASS = 3, - /// not part of API/ABI - @SWR_DITHER_NS = 64, - @SWR_DITHER_NS_LIPSHITZ = 65, - @SWR_DITHER_NS_F_WEIGHTED = 66, - @SWR_DITHER_NS_MODIFIED_E_WEIGHTED = 67, - @SWR_DITHER_NS_IMPROVED_E_WEIGHTED = 68, - @SWR_DITHER_NS_SHIBATA = 69, - @SWR_DITHER_NS_LOW_SHIBATA = 70, - @SWR_DITHER_NS_HIGH_SHIBATA = 71, - /// not part of API/ABI - @SWR_DITHER_NB = 72, - } - - /// Resampling Engines - public enum SwrEngine : int - { - /// SW Resampler - @SWR_ENGINE_SWR = 0, - /// SoX Resampler - @SWR_ENGINE_SOXR = 1, - /// not part of API/ABI - @SWR_ENGINE_NB = 2, - } - - /// Resampling Filter Types - public enum SwrFilterType : int - { - /// Cubic - @SWR_FILTER_TYPE_CUBIC = 0, - /// Blackman Nuttall windowed sinc - @SWR_FILTER_TYPE_BLACKMAN_NUTTALL = 1, - /// Kaiser windowed sinc - @SWR_FILTER_TYPE_KAISER = 2, - } - -} diff --git a/FFmpeg.AutoGen/FFmpeg.functions.export.g.cs b/FFmpeg.AutoGen/FFmpeg.functions.export.g.cs deleted file mode 100644 index 57c292e3..00000000 --- a/FFmpeg.AutoGen/FFmpeg.functions.export.g.cs +++ /dev/null @@ -1,17513 +0,0 @@ -using System; -using System.Runtime.InteropServices; - -namespace FFmpeg.AutoGen -{ - public unsafe static partial class ffmpeg - { - private const string PlatformNotSupportedMessageFormat = "{0} is not supported on this platform."; - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_codec_is_decoder_delegate(AVCodec* @codec); - private static av_codec_is_decoder_delegate av_codec_is_decoder_fptr = (AVCodec* @codec) => - { - av_codec_is_decoder_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_codec_is_decoder"); - if (av_codec_is_decoder_fptr == null) - { - av_codec_is_decoder_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_codec_is_decoder")); - }; - } - return av_codec_is_decoder_fptr(@codec); - }; - /// Returns a non-zero number if codec is a decoder, zero otherwise - /// a non-zero number if codec is a decoder, zero otherwise - public static int av_codec_is_decoder(AVCodec* @codec) - { - return av_codec_is_decoder_fptr(@codec); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_codec_is_encoder_delegate(AVCodec* @codec); - private static av_codec_is_encoder_delegate av_codec_is_encoder_fptr = (AVCodec* @codec) => - { - av_codec_is_encoder_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_codec_is_encoder"); - if (av_codec_is_encoder_fptr == null) - { - av_codec_is_encoder_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_codec_is_encoder")); - }; - } - return av_codec_is_encoder_fptr(@codec); - }; - /// Returns a non-zero number if codec is an encoder, zero otherwise - /// a non-zero number if codec is an encoder, zero otherwise - public static int av_codec_is_encoder(AVCodec* @codec) - { - return av_codec_is_encoder_fptr(@codec); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodec* av_codec_iterate_delegate(void** @opaque); - private static av_codec_iterate_delegate av_codec_iterate_fptr = (void** @opaque) => - { - av_codec_iterate_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_codec_iterate"); - if (av_codec_iterate_fptr == null) - { - av_codec_iterate_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_codec_iterate")); - }; - } - return av_codec_iterate_fptr(@opaque); - }; - /// Iterate over all registered codecs. - /// a pointer where libavcodec will store the iteration state. Must point to NULL to start the iteration. - /// the next registered codec or NULL when the iteration is finished - public static AVCodec* av_codec_iterate(void** @opaque) - { - return av_codec_iterate_fptr(@opaque); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCPBProperties* av_cpb_properties_alloc_delegate(ulong* @size); - private static av_cpb_properties_alloc_delegate av_cpb_properties_alloc_fptr = (ulong* @size) => - { - av_cpb_properties_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_cpb_properties_alloc"); - if (av_cpb_properties_alloc_fptr == null) - { - av_cpb_properties_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_cpb_properties_alloc")); - }; - } - return av_cpb_properties_alloc_fptr(@size); - }; - /// Allocate a CPB properties structure and initialize its fields to default values. - /// if non-NULL, the size of the allocated struct will be written here. This is useful for embedding it in side data. - /// the newly allocated struct or NULL on failure - public static AVCPBProperties* av_cpb_properties_alloc(ulong* @size) - { - return av_cpb_properties_alloc_fptr(@size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVD3D11VAContext* av_d3d11va_alloc_context_delegate(); - private static av_d3d11va_alloc_context_delegate av_d3d11va_alloc_context_fptr = () => - { - av_d3d11va_alloc_context_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_d3d11va_alloc_context"); - if (av_d3d11va_alloc_context_fptr == null) - { - av_d3d11va_alloc_context_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_d3d11va_alloc_context")); - }; - } - return av_d3d11va_alloc_context_fptr(); - }; - /// Allocate an AVD3D11VAContext. - /// Newly-allocated AVD3D11VAContext or NULL on failure. - public static AVD3D11VAContext* av_d3d11va_alloc_context() - { - return av_d3d11va_alloc_context_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_fast_padded_malloc_delegate(void* @ptr, uint* @size, ulong @min_size); - private static av_fast_padded_malloc_delegate av_fast_padded_malloc_fptr = (void* @ptr, uint* @size, ulong @min_size) => - { - av_fast_padded_malloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_fast_padded_malloc"); - if (av_fast_padded_malloc_fptr == null) - { - av_fast_padded_malloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_fast_padded_malloc")); - }; - } - av_fast_padded_malloc_fptr(@ptr, @size, @min_size); - }; - /// Same behaviour av_fast_malloc but the buffer has additional AV_INPUT_BUFFER_PADDING_SIZE at the end which will always be 0. - public static void av_fast_padded_malloc(void* @ptr, uint* @size, ulong @min_size) - { - av_fast_padded_malloc_fptr(@ptr, @size, @min_size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_fast_padded_mallocz_delegate(void* @ptr, uint* @size, ulong @min_size); - private static av_fast_padded_mallocz_delegate av_fast_padded_mallocz_fptr = (void* @ptr, uint* @size, ulong @min_size) => - { - av_fast_padded_mallocz_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_fast_padded_mallocz"); - if (av_fast_padded_mallocz_fptr == null) - { - av_fast_padded_mallocz_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_fast_padded_mallocz")); - }; - } - av_fast_padded_mallocz_fptr(@ptr, @size, @min_size); - }; - /// Same behaviour av_fast_padded_malloc except that buffer will always be 0-initialized after call. - public static void av_fast_padded_mallocz(void* @ptr, uint* @size, ulong @min_size) - { - av_fast_padded_mallocz_fptr(@ptr, @size, @min_size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_get_audio_frame_duration_delegate(AVCodecContext* @avctx, int @frame_bytes); - private static av_get_audio_frame_duration_delegate av_get_audio_frame_duration_fptr = (AVCodecContext* @avctx, int @frame_bytes) => - { - av_get_audio_frame_duration_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_get_audio_frame_duration"); - if (av_get_audio_frame_duration_fptr == null) - { - av_get_audio_frame_duration_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_audio_frame_duration")); - }; - } - return av_get_audio_frame_duration_fptr(@avctx, @frame_bytes); - }; - /// Return audio frame duration. - /// codec context - /// size of the frame, or 0 if unknown - /// frame duration, in samples, if known. 0 if not able to determine. - public static int av_get_audio_frame_duration(AVCodecContext* @avctx, int @frame_bytes) - { - return av_get_audio_frame_duration_fptr(@avctx, @frame_bytes); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_get_audio_frame_duration2_delegate(AVCodecParameters* @par, int @frame_bytes); - private static av_get_audio_frame_duration2_delegate av_get_audio_frame_duration2_fptr = (AVCodecParameters* @par, int @frame_bytes) => - { - av_get_audio_frame_duration2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_get_audio_frame_duration2"); - if (av_get_audio_frame_duration2_fptr == null) - { - av_get_audio_frame_duration2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_audio_frame_duration2")); - }; - } - return av_get_audio_frame_duration2_fptr(@par, @frame_bytes); - }; - /// This function is the same as av_get_audio_frame_duration(), except it works with AVCodecParameters instead of an AVCodecContext. - public static int av_get_audio_frame_duration2(AVCodecParameters* @par, int @frame_bytes) - { - return av_get_audio_frame_duration2_fptr(@par, @frame_bytes); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_get_bits_per_sample_delegate(AVCodecID @codec_id); - private static av_get_bits_per_sample_delegate av_get_bits_per_sample_fptr = (AVCodecID @codec_id) => - { - av_get_bits_per_sample_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_get_bits_per_sample"); - if (av_get_bits_per_sample_fptr == null) - { - av_get_bits_per_sample_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_bits_per_sample")); - }; - } - return av_get_bits_per_sample_fptr(@codec_id); - }; - /// Return codec bits per sample. - /// the codec - /// Number of bits per sample or zero if unknown for the given codec. - public static int av_get_bits_per_sample(AVCodecID @codec_id) - { - return av_get_bits_per_sample_fptr(@codec_id); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_get_exact_bits_per_sample_delegate(AVCodecID @codec_id); - private static av_get_exact_bits_per_sample_delegate av_get_exact_bits_per_sample_fptr = (AVCodecID @codec_id) => - { - av_get_exact_bits_per_sample_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_get_exact_bits_per_sample"); - if (av_get_exact_bits_per_sample_fptr == null) - { - av_get_exact_bits_per_sample_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_exact_bits_per_sample")); - }; - } - return av_get_exact_bits_per_sample_fptr(@codec_id); - }; - /// Return codec bits per sample. Only return non-zero if the bits per sample is exactly correct, not an approximation. - /// the codec - /// Number of bits per sample or zero if unknown for the given codec. - public static int av_get_exact_bits_per_sample(AVCodecID @codec_id) - { - return av_get_exact_bits_per_sample_fptr(@codec_id); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodecID av_get_pcm_codec_delegate(AVSampleFormat @fmt, int @be); - private static av_get_pcm_codec_delegate av_get_pcm_codec_fptr = (AVSampleFormat @fmt, int @be) => - { - av_get_pcm_codec_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_get_pcm_codec"); - if (av_get_pcm_codec_fptr == null) - { - av_get_pcm_codec_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_pcm_codec")); - }; - } - return av_get_pcm_codec_fptr(@fmt, @be); - }; - /// Return the PCM codec associated with a sample format. - /// endianness, 0 for little, 1 for big, -1 (or anything else) for native - /// AV_CODEC_ID_PCM_* or AV_CODEC_ID_NONE - public static AVCodecID av_get_pcm_codec(AVSampleFormat @fmt, int @be) - { - return av_get_pcm_codec_fptr(@fmt, @be); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string av_get_profile_name_delegate(AVCodec* @codec, int @profile); - private static av_get_profile_name_delegate av_get_profile_name_fptr = (AVCodec* @codec, int @profile) => - { - av_get_profile_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_get_profile_name"); - if (av_get_profile_name_fptr == null) - { - av_get_profile_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_profile_name")); - }; - } - return av_get_profile_name_fptr(@codec, @profile); - }; - /// Return a name for the specified profile, if available. - /// the codec that is searched for the given profile - /// the profile value for which a name is requested - /// A name for the profile if found, NULL otherwise. - public static string av_get_profile_name(AVCodec* @codec, int @profile) - { - return av_get_profile_name_fptr(@codec, @profile); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_grow_packet_delegate(AVPacket* @pkt, int @grow_by); - private static av_grow_packet_delegate av_grow_packet_fptr = (AVPacket* @pkt, int @grow_by) => - { - av_grow_packet_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_grow_packet"); - if (av_grow_packet_fptr == null) - { - av_grow_packet_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_grow_packet")); - }; - } - return av_grow_packet_fptr(@pkt, @grow_by); - }; - /// Increase packet size, correctly zeroing padding - /// packet - /// number of bytes by which to increase the size of the packet - public static int av_grow_packet(AVPacket* @pkt, int @grow_by) - { - return av_grow_packet_fptr(@pkt, @grow_by); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_init_packet_delegate(AVPacket* @pkt); - private static av_init_packet_delegate av_init_packet_fptr = (AVPacket* @pkt) => - { - av_init_packet_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_init_packet"); - if (av_init_packet_fptr == null) - { - av_init_packet_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_init_packet")); - }; - } - av_init_packet_fptr(@pkt); - }; - /// Initialize optional fields of a packet with default values. - /// packet - [Obsolete("This function is deprecated. Once it's removed, sizeof(AVPacket) will not be a part of the ABI anymore.")] - public static void av_init_packet(AVPacket* @pkt) - { - av_init_packet_fptr(@pkt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_new_packet_delegate(AVPacket* @pkt, int @size); - private static av_new_packet_delegate av_new_packet_fptr = (AVPacket* @pkt, int @size) => - { - av_new_packet_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_new_packet"); - if (av_new_packet_fptr == null) - { - av_new_packet_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_new_packet")); - }; - } - return av_new_packet_fptr(@pkt, @size); - }; - /// Allocate the payload of a packet and initialize its fields with default values. - /// packet - /// wanted payload size - /// 0 if OK, AVERROR_xxx otherwise - public static int av_new_packet(AVPacket* @pkt, int @size) - { - return av_new_packet_fptr(@pkt, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_packet_add_side_data_delegate(AVPacket* @pkt, AVPacketSideDataType @type, byte* @data, ulong @size); - private static av_packet_add_side_data_delegate av_packet_add_side_data_fptr = (AVPacket* @pkt, AVPacketSideDataType @type, byte* @data, ulong @size) => - { - av_packet_add_side_data_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_packet_add_side_data"); - if (av_packet_add_side_data_fptr == null) - { - av_packet_add_side_data_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_packet_add_side_data")); - }; - } - return av_packet_add_side_data_fptr(@pkt, @type, @data, @size); - }; - /// Wrap an existing array as a packet side data. - /// packet - /// side information type - /// the side data array. It must be allocated with the av_malloc() family of functions. The ownership of the data is transferred to pkt. - /// side information size - /// a non-negative number on success, a negative AVERROR code on failure. On failure, the packet is unchanged and the data remains owned by the caller. - public static int av_packet_add_side_data(AVPacket* @pkt, AVPacketSideDataType @type, byte* @data, ulong @size) - { - return av_packet_add_side_data_fptr(@pkt, @type, @data, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVPacket* av_packet_alloc_delegate(); - private static av_packet_alloc_delegate av_packet_alloc_fptr = () => - { - av_packet_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_packet_alloc"); - if (av_packet_alloc_fptr == null) - { - av_packet_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_packet_alloc")); - }; - } - return av_packet_alloc_fptr(); - }; - /// Allocate an AVPacket and set its fields to default values. The resulting struct must be freed using av_packet_free(). - /// An AVPacket filled with default values or NULL on failure. - public static AVPacket* av_packet_alloc() - { - return av_packet_alloc_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVPacket* av_packet_clone_delegate(AVPacket* @src); - private static av_packet_clone_delegate av_packet_clone_fptr = (AVPacket* @src) => - { - av_packet_clone_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_packet_clone"); - if (av_packet_clone_fptr == null) - { - av_packet_clone_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_packet_clone")); - }; - } - return av_packet_clone_fptr(@src); - }; - /// Create a new packet that references the same data as src. - /// newly created AVPacket on success, NULL on error. - public static AVPacket* av_packet_clone(AVPacket* @src) - { - return av_packet_clone_fptr(@src); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_packet_copy_props_delegate(AVPacket* @dst, AVPacket* @src); - private static av_packet_copy_props_delegate av_packet_copy_props_fptr = (AVPacket* @dst, AVPacket* @src) => - { - av_packet_copy_props_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_packet_copy_props"); - if (av_packet_copy_props_fptr == null) - { - av_packet_copy_props_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_packet_copy_props")); - }; - } - return av_packet_copy_props_fptr(@dst, @src); - }; - /// Copy only "properties" fields from src to dst. - /// Destination packet - /// Source packet - /// 0 on success AVERROR on failure. - public static int av_packet_copy_props(AVPacket* @dst, AVPacket* @src) - { - return av_packet_copy_props_fptr(@dst, @src); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_packet_free_delegate(AVPacket** @pkt); - private static av_packet_free_delegate av_packet_free_fptr = (AVPacket** @pkt) => - { - av_packet_free_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_packet_free"); - if (av_packet_free_fptr == null) - { - av_packet_free_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_packet_free")); - }; - } - av_packet_free_fptr(@pkt); - }; - /// Free the packet, if the packet is reference counted, it will be unreferenced first. - /// packet to be freed. The pointer will be set to NULL. - public static void av_packet_free(AVPacket** @pkt) - { - av_packet_free_fptr(@pkt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_packet_free_side_data_delegate(AVPacket* @pkt); - private static av_packet_free_side_data_delegate av_packet_free_side_data_fptr = (AVPacket* @pkt) => - { - av_packet_free_side_data_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_packet_free_side_data"); - if (av_packet_free_side_data_fptr == null) - { - av_packet_free_side_data_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_packet_free_side_data")); - }; - } - av_packet_free_side_data_fptr(@pkt); - }; - /// Convenience function to free all the side data stored. All the other fields stay untouched. - /// packet - public static void av_packet_free_side_data(AVPacket* @pkt) - { - av_packet_free_side_data_fptr(@pkt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_packet_from_data_delegate(AVPacket* @pkt, byte* @data, int @size); - private static av_packet_from_data_delegate av_packet_from_data_fptr = (AVPacket* @pkt, byte* @data, int @size) => - { - av_packet_from_data_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_packet_from_data"); - if (av_packet_from_data_fptr == null) - { - av_packet_from_data_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_packet_from_data")); - }; - } - return av_packet_from_data_fptr(@pkt, @data, @size); - }; - /// Initialize a reference-counted packet from av_malloc()ed data. - /// packet to be initialized. This function will set the data, size, and buf fields, all others are left untouched. - /// Data allocated by av_malloc() to be used as packet data. If this function returns successfully, the data is owned by the underlying AVBuffer. The caller may not access the data through other means. - /// size of data in bytes, without the padding. I.e. the full buffer size is assumed to be size + AV_INPUT_BUFFER_PADDING_SIZE. - /// 0 on success, a negative AVERROR on error - public static int av_packet_from_data(AVPacket* @pkt, byte* @data, int @size) - { - return av_packet_from_data_fptr(@pkt, @data, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate byte* av_packet_get_side_data_delegate(AVPacket* @pkt, AVPacketSideDataType @type, ulong* @size); - private static av_packet_get_side_data_delegate av_packet_get_side_data_fptr = (AVPacket* @pkt, AVPacketSideDataType @type, ulong* @size) => - { - av_packet_get_side_data_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_packet_get_side_data"); - if (av_packet_get_side_data_fptr == null) - { - av_packet_get_side_data_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_packet_get_side_data")); - }; - } - return av_packet_get_side_data_fptr(@pkt, @type, @size); - }; - /// Get side information from packet. - /// packet - /// desired side information type - /// If supplied, *size will be set to the size of the side data or to zero if the desired side data is not present. - /// pointer to data if present or NULL otherwise - public static byte* av_packet_get_side_data(AVPacket* @pkt, AVPacketSideDataType @type, ulong* @size) - { - return av_packet_get_side_data_fptr(@pkt, @type, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_packet_make_refcounted_delegate(AVPacket* @pkt); - private static av_packet_make_refcounted_delegate av_packet_make_refcounted_fptr = (AVPacket* @pkt) => - { - av_packet_make_refcounted_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_packet_make_refcounted"); - if (av_packet_make_refcounted_fptr == null) - { - av_packet_make_refcounted_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_packet_make_refcounted")); - }; - } - return av_packet_make_refcounted_fptr(@pkt); - }; - /// Ensure the data described by a given packet is reference counted. - /// packet whose data should be made reference counted. - /// 0 on success, a negative AVERROR on error. On failure, the packet is unchanged. - public static int av_packet_make_refcounted(AVPacket* @pkt) - { - return av_packet_make_refcounted_fptr(@pkt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_packet_make_writable_delegate(AVPacket* @pkt); - private static av_packet_make_writable_delegate av_packet_make_writable_fptr = (AVPacket* @pkt) => - { - av_packet_make_writable_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_packet_make_writable"); - if (av_packet_make_writable_fptr == null) - { - av_packet_make_writable_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_packet_make_writable")); - }; - } - return av_packet_make_writable_fptr(@pkt); - }; - /// Create a writable reference for the data described by a given packet, avoiding data copy if possible. - /// Packet whose data should be made writable. - /// 0 on success, a negative AVERROR on failure. On failure, the packet is unchanged. - public static int av_packet_make_writable(AVPacket* @pkt) - { - return av_packet_make_writable_fptr(@pkt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_packet_move_ref_delegate(AVPacket* @dst, AVPacket* @src); - private static av_packet_move_ref_delegate av_packet_move_ref_fptr = (AVPacket* @dst, AVPacket* @src) => - { - av_packet_move_ref_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_packet_move_ref"); - if (av_packet_move_ref_fptr == null) - { - av_packet_move_ref_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_packet_move_ref")); - }; - } - av_packet_move_ref_fptr(@dst, @src); - }; - /// Move every field in src to dst and reset src. - /// Destination packet - /// Source packet, will be reset - public static void av_packet_move_ref(AVPacket* @dst, AVPacket* @src) - { - av_packet_move_ref_fptr(@dst, @src); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate byte* av_packet_new_side_data_delegate(AVPacket* @pkt, AVPacketSideDataType @type, ulong @size); - private static av_packet_new_side_data_delegate av_packet_new_side_data_fptr = (AVPacket* @pkt, AVPacketSideDataType @type, ulong @size) => - { - av_packet_new_side_data_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_packet_new_side_data"); - if (av_packet_new_side_data_fptr == null) - { - av_packet_new_side_data_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_packet_new_side_data")); - }; - } - return av_packet_new_side_data_fptr(@pkt, @type, @size); - }; - /// Allocate new information of a packet. - /// packet - /// side information type - /// side information size - /// pointer to fresh allocated data or NULL otherwise - public static byte* av_packet_new_side_data(AVPacket* @pkt, AVPacketSideDataType @type, ulong @size) - { - return av_packet_new_side_data_fptr(@pkt, @type, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate byte* av_packet_pack_dictionary_delegate(AVDictionary* @dict, ulong* @size); - private static av_packet_pack_dictionary_delegate av_packet_pack_dictionary_fptr = (AVDictionary* @dict, ulong* @size) => - { - av_packet_pack_dictionary_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_packet_pack_dictionary"); - if (av_packet_pack_dictionary_fptr == null) - { - av_packet_pack_dictionary_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_packet_pack_dictionary")); - }; - } - return av_packet_pack_dictionary_fptr(@dict, @size); - }; - /// Pack a dictionary for use in side_data. - /// The dictionary to pack. - /// pointer to store the size of the returned data - /// pointer to data if successful, NULL otherwise - public static byte* av_packet_pack_dictionary(AVDictionary* @dict, ulong* @size) - { - return av_packet_pack_dictionary_fptr(@dict, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_packet_ref_delegate(AVPacket* @dst, AVPacket* @src); - private static av_packet_ref_delegate av_packet_ref_fptr = (AVPacket* @dst, AVPacket* @src) => - { - av_packet_ref_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_packet_ref"); - if (av_packet_ref_fptr == null) - { - av_packet_ref_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_packet_ref")); - }; - } - return av_packet_ref_fptr(@dst, @src); - }; - /// Setup a new reference to the data described by a given packet - /// Destination packet. Will be completely overwritten. - /// Source packet - /// 0 on success, a negative AVERROR on error. On error, dst will be blank (as if returned by av_packet_alloc()). - public static int av_packet_ref(AVPacket* @dst, AVPacket* @src) - { - return av_packet_ref_fptr(@dst, @src); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_packet_rescale_ts_delegate(AVPacket* @pkt, AVRational @tb_src, AVRational @tb_dst); - private static av_packet_rescale_ts_delegate av_packet_rescale_ts_fptr = (AVPacket* @pkt, AVRational @tb_src, AVRational @tb_dst) => - { - av_packet_rescale_ts_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_packet_rescale_ts"); - if (av_packet_rescale_ts_fptr == null) - { - av_packet_rescale_ts_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_packet_rescale_ts")); - }; - } - av_packet_rescale_ts_fptr(@pkt, @tb_src, @tb_dst); - }; - /// Convert valid timing fields (timestamps / durations) in a packet from one timebase to another. Timestamps with unknown values (AV_NOPTS_VALUE) will be ignored. - /// packet on which the conversion will be performed - /// source timebase, in which the timing fields in pkt are expressed - /// destination timebase, to which the timing fields will be converted - public static void av_packet_rescale_ts(AVPacket* @pkt, AVRational @tb_src, AVRational @tb_dst) - { - av_packet_rescale_ts_fptr(@pkt, @tb_src, @tb_dst); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_packet_shrink_side_data_delegate(AVPacket* @pkt, AVPacketSideDataType @type, ulong @size); - private static av_packet_shrink_side_data_delegate av_packet_shrink_side_data_fptr = (AVPacket* @pkt, AVPacketSideDataType @type, ulong @size) => - { - av_packet_shrink_side_data_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_packet_shrink_side_data"); - if (av_packet_shrink_side_data_fptr == null) - { - av_packet_shrink_side_data_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_packet_shrink_side_data")); - }; - } - return av_packet_shrink_side_data_fptr(@pkt, @type, @size); - }; - /// Shrink the already allocated side data buffer - /// packet - /// side information type - /// new side information size - /// 0 on success, < 0 on failure - public static int av_packet_shrink_side_data(AVPacket* @pkt, AVPacketSideDataType @type, ulong @size) - { - return av_packet_shrink_side_data_fptr(@pkt, @type, @size); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string av_packet_side_data_name_delegate(AVPacketSideDataType @type); - private static av_packet_side_data_name_delegate av_packet_side_data_name_fptr = (AVPacketSideDataType @type) => - { - av_packet_side_data_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_packet_side_data_name"); - if (av_packet_side_data_name_fptr == null) - { - av_packet_side_data_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_packet_side_data_name")); - }; - } - return av_packet_side_data_name_fptr(@type); - }; - public static string av_packet_side_data_name(AVPacketSideDataType @type) - { - return av_packet_side_data_name_fptr(@type); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_packet_unpack_dictionary_delegate(byte* @data, ulong @size, AVDictionary** @dict); - private static av_packet_unpack_dictionary_delegate av_packet_unpack_dictionary_fptr = (byte* @data, ulong @size, AVDictionary** @dict) => - { - av_packet_unpack_dictionary_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_packet_unpack_dictionary"); - if (av_packet_unpack_dictionary_fptr == null) - { - av_packet_unpack_dictionary_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_packet_unpack_dictionary")); - }; - } - return av_packet_unpack_dictionary_fptr(@data, @size, @dict); - }; - /// Unpack a dictionary from side_data. - /// data from side_data - /// size of the data - /// the metadata storage dictionary - /// 0 on success, < 0 on failure - public static int av_packet_unpack_dictionary(byte* @data, ulong @size, AVDictionary** @dict) - { - return av_packet_unpack_dictionary_fptr(@data, @size, @dict); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_packet_unref_delegate(AVPacket* @pkt); - private static av_packet_unref_delegate av_packet_unref_fptr = (AVPacket* @pkt) => - { - av_packet_unref_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_packet_unref"); - if (av_packet_unref_fptr == null) - { - av_packet_unref_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_packet_unref")); - }; - } - av_packet_unref_fptr(@pkt); - }; - /// Wipe the packet. - /// The packet to be unreferenced. - public static void av_packet_unref(AVPacket* @pkt) - { - av_packet_unref_fptr(@pkt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_parser_close_delegate(AVCodecParserContext* @s); - private static av_parser_close_delegate av_parser_close_fptr = (AVCodecParserContext* @s) => - { - av_parser_close_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_parser_close"); - if (av_parser_close_fptr == null) - { - av_parser_close_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_parser_close")); - }; - } - av_parser_close_fptr(@s); - }; - public static void av_parser_close(AVCodecParserContext* @s) - { - av_parser_close_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodecParserContext* av_parser_init_delegate(int @codec_id); - private static av_parser_init_delegate av_parser_init_fptr = (int @codec_id) => - { - av_parser_init_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_parser_init"); - if (av_parser_init_fptr == null) - { - av_parser_init_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_parser_init")); - }; - } - return av_parser_init_fptr(@codec_id); - }; - public static AVCodecParserContext* av_parser_init(int @codec_id) - { - return av_parser_init_fptr(@codec_id); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodecParser* av_parser_iterate_delegate(void** @opaque); - private static av_parser_iterate_delegate av_parser_iterate_fptr = (void** @opaque) => - { - av_parser_iterate_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_parser_iterate"); - if (av_parser_iterate_fptr == null) - { - av_parser_iterate_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_parser_iterate")); - }; - } - return av_parser_iterate_fptr(@opaque); - }; - /// Iterate over all registered codec parsers. - /// a pointer where libavcodec will store the iteration state. Must point to NULL to start the iteration. - /// the next registered codec parser or NULL when the iteration is finished - public static AVCodecParser* av_parser_iterate(void** @opaque) - { - return av_parser_iterate_fptr(@opaque); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_parser_parse2_delegate(AVCodecParserContext* @s, AVCodecContext* @avctx, byte** @poutbuf, int* @poutbuf_size, byte* @buf, int @buf_size, long @pts, long @dts, long @pos); - private static av_parser_parse2_delegate av_parser_parse2_fptr = (AVCodecParserContext* @s, AVCodecContext* @avctx, byte** @poutbuf, int* @poutbuf_size, byte* @buf, int @buf_size, long @pts, long @dts, long @pos) => - { - av_parser_parse2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_parser_parse2"); - if (av_parser_parse2_fptr == null) - { - av_parser_parse2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_parser_parse2")); - }; - } - return av_parser_parse2_fptr(@s, @avctx, @poutbuf, @poutbuf_size, @buf, @buf_size, @pts, @dts, @pos); - }; - /// Parse a packet. - /// parser context. - /// codec context. - /// set to pointer to parsed buffer or NULL if not yet finished. - /// set to size of parsed buffer or zero if not yet finished. - /// input buffer. - /// buffer size in bytes without the padding. I.e. the full buffer size is assumed to be buf_size + AV_INPUT_BUFFER_PADDING_SIZE. To signal EOF, this should be 0 (so that the last frame can be output). - /// input presentation timestamp. - /// input decoding timestamp. - /// input byte position in stream. - /// the number of bytes of the input bitstream used. - public static int av_parser_parse2(AVCodecParserContext* @s, AVCodecContext* @avctx, byte** @poutbuf, int* @poutbuf_size, byte* @buf, int @buf_size, long @pts, long @dts, long @pos) - { - return av_parser_parse2_fptr(@s, @avctx, @poutbuf, @poutbuf_size, @buf, @buf_size, @pts, @dts, @pos); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_shrink_packet_delegate(AVPacket* @pkt, int @size); - private static av_shrink_packet_delegate av_shrink_packet_fptr = (AVPacket* @pkt, int @size) => - { - av_shrink_packet_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_shrink_packet"); - if (av_shrink_packet_fptr == null) - { - av_shrink_packet_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_shrink_packet")); - }; - } - av_shrink_packet_fptr(@pkt, @size); - }; - /// Reduce packet size, correctly zeroing padding - /// packet - /// new size - public static void av_shrink_packet(AVPacket* @pkt, int @size) - { - av_shrink_packet_fptr(@pkt, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint av_xiphlacing_delegate(byte* @s, uint @v); - private static av_xiphlacing_delegate av_xiphlacing_fptr = (byte* @s, uint @v) => - { - av_xiphlacing_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "av_xiphlacing"); - if (av_xiphlacing_fptr == null) - { - av_xiphlacing_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_xiphlacing")); - }; - } - return av_xiphlacing_fptr(@s, @v); - }; - /// Encode extradata length to a buffer. Used by xiph codecs. - /// buffer to write to; must be at least (v/255+1) bytes long - /// size of extradata in bytes - /// number of bytes written to the buffer. - public static uint av_xiphlacing(byte* @s, uint @v) - { - return av_xiphlacing_fptr(@s, @v); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avcodec_align_dimensions_delegate(AVCodecContext* @s, int* @width, int* @height); - private static avcodec_align_dimensions_delegate avcodec_align_dimensions_fptr = (AVCodecContext* @s, int* @width, int* @height) => - { - avcodec_align_dimensions_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_align_dimensions"); - if (avcodec_align_dimensions_fptr == null) - { - avcodec_align_dimensions_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_align_dimensions")); - }; - } - avcodec_align_dimensions_fptr(@s, @width, @height); - }; - /// Modify width and height values so that they will result in a memory buffer that is acceptable for the codec if you do not use any horizontal padding. - public static void avcodec_align_dimensions(AVCodecContext* @s, int* @width, int* @height) - { - avcodec_align_dimensions_fptr(@s, @width, @height); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avcodec_align_dimensions2_delegate(AVCodecContext* @s, int* @width, int* @height, ref int_array8 @linesize_align); - private static avcodec_align_dimensions2_delegate avcodec_align_dimensions2_fptr = (AVCodecContext* @s, int* @width, int* @height, ref int_array8 @linesize_align) => - { - avcodec_align_dimensions2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_align_dimensions2"); - if (avcodec_align_dimensions2_fptr == null) - { - avcodec_align_dimensions2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_align_dimensions2")); - }; - } - avcodec_align_dimensions2_fptr(@s, @width, @height, ref @linesize_align); - }; - /// Modify width and height values so that they will result in a memory buffer that is acceptable for the codec if you also ensure that all line sizes are a multiple of the respective linesize_align[i]. - public static void avcodec_align_dimensions2(AVCodecContext* @s, int* @width, int* @height, ref int_array8 @linesize_align) - { - avcodec_align_dimensions2_fptr(@s, @width, @height, ref @linesize_align); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodecContext* avcodec_alloc_context3_delegate(AVCodec* @codec); - private static avcodec_alloc_context3_delegate avcodec_alloc_context3_fptr = (AVCodec* @codec) => - { - avcodec_alloc_context3_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_alloc_context3"); - if (avcodec_alloc_context3_fptr == null) - { - avcodec_alloc_context3_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_alloc_context3")); - }; - } - return avcodec_alloc_context3_fptr(@codec); - }; - /// Allocate an AVCodecContext and set its fields to default values. The resulting struct should be freed with avcodec_free_context(). - /// if non-NULL, allocate private data and initialize defaults for the given codec. It is illegal to then call avcodec_open2() with a different codec. If NULL, then the codec-specific defaults won't be initialized, which may result in suboptimal default settings (this is important mainly for encoders, e.g. libx264). - /// An AVCodecContext filled with default values or NULL on failure. - public static AVCodecContext* avcodec_alloc_context3(AVCodec* @codec) - { - return avcodec_alloc_context3_fptr(@codec); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVChromaLocation avcodec_chroma_pos_to_enum_delegate(int @xpos, int @ypos); - private static avcodec_chroma_pos_to_enum_delegate avcodec_chroma_pos_to_enum_fptr = (int @xpos, int @ypos) => - { - avcodec_chroma_pos_to_enum_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_chroma_pos_to_enum"); - if (avcodec_chroma_pos_to_enum_fptr == null) - { - avcodec_chroma_pos_to_enum_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_chroma_pos_to_enum")); - }; - } - return avcodec_chroma_pos_to_enum_fptr(@xpos, @ypos); - }; - /// Converts swscale x/y chroma position to AVChromaLocation. - /// horizontal chroma sample position - /// vertical chroma sample position - public static AVChromaLocation avcodec_chroma_pos_to_enum(int @xpos, int @ypos) - { - return avcodec_chroma_pos_to_enum_fptr(@xpos, @ypos); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avcodec_close_delegate(AVCodecContext* @avctx); - private static avcodec_close_delegate avcodec_close_fptr = (AVCodecContext* @avctx) => - { - avcodec_close_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_close"); - if (avcodec_close_fptr == null) - { - avcodec_close_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_close")); - }; - } - return avcodec_close_fptr(@avctx); - }; - /// Close a given AVCodecContext and free all the data associated with it (but not the AVCodecContext itself). - public static int avcodec_close(AVCodecContext* @avctx) - { - return avcodec_close_fptr(@avctx); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string avcodec_configuration_delegate(); - private static avcodec_configuration_delegate avcodec_configuration_fptr = () => - { - avcodec_configuration_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_configuration"); - if (avcodec_configuration_fptr == null) - { - avcodec_configuration_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_configuration")); - }; - } - return avcodec_configuration_fptr(); - }; - /// Return the libavcodec build-time configuration. - public static string avcodec_configuration() - { - return avcodec_configuration_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avcodec_decode_subtitle2_delegate(AVCodecContext* @avctx, AVSubtitle* @sub, int* @got_sub_ptr, AVPacket* @avpkt); - private static avcodec_decode_subtitle2_delegate avcodec_decode_subtitle2_fptr = (AVCodecContext* @avctx, AVSubtitle* @sub, int* @got_sub_ptr, AVPacket* @avpkt) => - { - avcodec_decode_subtitle2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_decode_subtitle2"); - if (avcodec_decode_subtitle2_fptr == null) - { - avcodec_decode_subtitle2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_decode_subtitle2")); - }; - } - return avcodec_decode_subtitle2_fptr(@avctx, @sub, @got_sub_ptr, @avpkt); - }; - /// Decode a subtitle message. Return a negative value on error, otherwise return the number of bytes used. If no subtitle could be decompressed, got_sub_ptr is zero. Otherwise, the subtitle is stored in *sub. Note that AV_CODEC_CAP_DR1 is not available for subtitle codecs. This is for simplicity, because the performance difference is expected to be negligible and reusing a get_buffer written for video codecs would probably perform badly due to a potentially very different allocation pattern. - /// the codec context - /// The preallocated AVSubtitle in which the decoded subtitle will be stored, must be freed with avsubtitle_free if *got_sub_ptr is set. - /// Zero if no subtitle could be decompressed, otherwise, it is nonzero. - /// The input AVPacket containing the input buffer. - public static int avcodec_decode_subtitle2(AVCodecContext* @avctx, AVSubtitle* @sub, int* @got_sub_ptr, AVPacket* @avpkt) - { - return avcodec_decode_subtitle2_fptr(@avctx, @sub, @got_sub_ptr, @avpkt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avcodec_default_execute_delegate(AVCodecContext* @c, avcodec_default_execute_func_func @func, void* @arg, int* @ret, int @count, int @size); - private static avcodec_default_execute_delegate avcodec_default_execute_fptr = (AVCodecContext* @c, avcodec_default_execute_func_func @func, void* @arg, int* @ret, int @count, int @size) => - { - avcodec_default_execute_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_default_execute"); - if (avcodec_default_execute_fptr == null) - { - avcodec_default_execute_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_default_execute")); - }; - } - return avcodec_default_execute_fptr(@c, @func, @arg, @ret, @count, @size); - }; - public static int avcodec_default_execute(AVCodecContext* @c, avcodec_default_execute_func_func @func, void* @arg, int* @ret, int @count, int @size) - { - return avcodec_default_execute_fptr(@c, @func, @arg, @ret, @count, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avcodec_default_execute2_delegate(AVCodecContext* @c, avcodec_default_execute2_func_func @func, void* @arg, int* @ret, int @count); - private static avcodec_default_execute2_delegate avcodec_default_execute2_fptr = (AVCodecContext* @c, avcodec_default_execute2_func_func @func, void* @arg, int* @ret, int @count) => - { - avcodec_default_execute2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_default_execute2"); - if (avcodec_default_execute2_fptr == null) - { - avcodec_default_execute2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_default_execute2")); - }; - } - return avcodec_default_execute2_fptr(@c, @func, @arg, @ret, @count); - }; - public static int avcodec_default_execute2(AVCodecContext* @c, avcodec_default_execute2_func_func @func, void* @arg, int* @ret, int @count) - { - return avcodec_default_execute2_fptr(@c, @func, @arg, @ret, @count); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avcodec_default_get_buffer2_delegate(AVCodecContext* @s, AVFrame* @frame, int @flags); - private static avcodec_default_get_buffer2_delegate avcodec_default_get_buffer2_fptr = (AVCodecContext* @s, AVFrame* @frame, int @flags) => - { - avcodec_default_get_buffer2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_default_get_buffer2"); - if (avcodec_default_get_buffer2_fptr == null) - { - avcodec_default_get_buffer2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_default_get_buffer2")); - }; - } - return avcodec_default_get_buffer2_fptr(@s, @frame, @flags); - }; - /// The default callback for AVCodecContext.get_buffer2(). It is made public so it can be called by custom get_buffer2() implementations for decoders without AV_CODEC_CAP_DR1 set. - public static int avcodec_default_get_buffer2(AVCodecContext* @s, AVFrame* @frame, int @flags) - { - return avcodec_default_get_buffer2_fptr(@s, @frame, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avcodec_default_get_encode_buffer_delegate(AVCodecContext* @s, AVPacket* @pkt, int @flags); - private static avcodec_default_get_encode_buffer_delegate avcodec_default_get_encode_buffer_fptr = (AVCodecContext* @s, AVPacket* @pkt, int @flags) => - { - avcodec_default_get_encode_buffer_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_default_get_encode_buffer"); - if (avcodec_default_get_encode_buffer_fptr == null) - { - avcodec_default_get_encode_buffer_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_default_get_encode_buffer")); - }; - } - return avcodec_default_get_encode_buffer_fptr(@s, @pkt, @flags); - }; - /// The default callback for AVCodecContext.get_encode_buffer(). It is made public so it can be called by custom get_encode_buffer() implementations for encoders without AV_CODEC_CAP_DR1 set. - public static int avcodec_default_get_encode_buffer(AVCodecContext* @s, AVPacket* @pkt, int @flags) - { - return avcodec_default_get_encode_buffer_fptr(@s, @pkt, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVPixelFormat avcodec_default_get_format_delegate(AVCodecContext* @s, AVPixelFormat* @fmt); - private static avcodec_default_get_format_delegate avcodec_default_get_format_fptr = (AVCodecContext* @s, AVPixelFormat* @fmt) => - { - avcodec_default_get_format_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_default_get_format"); - if (avcodec_default_get_format_fptr == null) - { - avcodec_default_get_format_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_default_get_format")); - }; - } - return avcodec_default_get_format_fptr(@s, @fmt); - }; - public static AVPixelFormat avcodec_default_get_format(AVCodecContext* @s, AVPixelFormat* @fmt) - { - return avcodec_default_get_format_fptr(@s, @fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodecDescriptor* avcodec_descriptor_get_delegate(AVCodecID @id); - private static avcodec_descriptor_get_delegate avcodec_descriptor_get_fptr = (AVCodecID @id) => - { - avcodec_descriptor_get_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_descriptor_get"); - if (avcodec_descriptor_get_fptr == null) - { - avcodec_descriptor_get_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_descriptor_get")); - }; - } - return avcodec_descriptor_get_fptr(@id); - }; - /// Returns descriptor for given codec ID or NULL if no descriptor exists. - /// descriptor for given codec ID or NULL if no descriptor exists. - public static AVCodecDescriptor* avcodec_descriptor_get(AVCodecID @id) - { - return avcodec_descriptor_get_fptr(@id); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodecDescriptor* avcodec_descriptor_get_by_name_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name); - private static avcodec_descriptor_get_by_name_delegate avcodec_descriptor_get_by_name_fptr = (string @name) => - { - avcodec_descriptor_get_by_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_descriptor_get_by_name"); - if (avcodec_descriptor_get_by_name_fptr == null) - { - avcodec_descriptor_get_by_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_descriptor_get_by_name")); - }; - } - return avcodec_descriptor_get_by_name_fptr(@name); - }; - /// Returns codec descriptor with the given name or NULL if no such descriptor exists. - /// codec descriptor with the given name or NULL if no such descriptor exists. - public static AVCodecDescriptor* avcodec_descriptor_get_by_name( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name) - { - return avcodec_descriptor_get_by_name_fptr(@name); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodecDescriptor* avcodec_descriptor_next_delegate(AVCodecDescriptor* @prev); - private static avcodec_descriptor_next_delegate avcodec_descriptor_next_fptr = (AVCodecDescriptor* @prev) => - { - avcodec_descriptor_next_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_descriptor_next"); - if (avcodec_descriptor_next_fptr == null) - { - avcodec_descriptor_next_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_descriptor_next")); - }; - } - return avcodec_descriptor_next_fptr(@prev); - }; - /// Iterate over all codec descriptors known to libavcodec. - /// previous descriptor. NULL to get the first descriptor. - /// next descriptor or NULL after the last descriptor - public static AVCodecDescriptor* avcodec_descriptor_next(AVCodecDescriptor* @prev) - { - return avcodec_descriptor_next_fptr(@prev); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avcodec_encode_subtitle_delegate(AVCodecContext* @avctx, byte* @buf, int @buf_size, AVSubtitle* @sub); - private static avcodec_encode_subtitle_delegate avcodec_encode_subtitle_fptr = (AVCodecContext* @avctx, byte* @buf, int @buf_size, AVSubtitle* @sub) => - { - avcodec_encode_subtitle_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_encode_subtitle"); - if (avcodec_encode_subtitle_fptr == null) - { - avcodec_encode_subtitle_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_encode_subtitle")); - }; - } - return avcodec_encode_subtitle_fptr(@avctx, @buf, @buf_size, @sub); - }; - /// @{ - public static int avcodec_encode_subtitle(AVCodecContext* @avctx, byte* @buf, int @buf_size, AVSubtitle* @sub) - { - return avcodec_encode_subtitle_fptr(@avctx, @buf, @buf_size, @sub); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avcodec_enum_to_chroma_pos_delegate(int* @xpos, int* @ypos, AVChromaLocation @pos); - private static avcodec_enum_to_chroma_pos_delegate avcodec_enum_to_chroma_pos_fptr = (int* @xpos, int* @ypos, AVChromaLocation @pos) => - { - avcodec_enum_to_chroma_pos_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_enum_to_chroma_pos"); - if (avcodec_enum_to_chroma_pos_fptr == null) - { - avcodec_enum_to_chroma_pos_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_enum_to_chroma_pos")); - }; - } - return avcodec_enum_to_chroma_pos_fptr(@xpos, @ypos, @pos); - }; - /// Converts AVChromaLocation to swscale x/y chroma position. - /// horizontal chroma sample position - /// vertical chroma sample position - public static int avcodec_enum_to_chroma_pos(int* @xpos, int* @ypos, AVChromaLocation @pos) - { - return avcodec_enum_to_chroma_pos_fptr(@xpos, @ypos, @pos); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avcodec_fill_audio_frame_delegate(AVFrame* @frame, int @nb_channels, AVSampleFormat @sample_fmt, byte* @buf, int @buf_size, int @align); - private static avcodec_fill_audio_frame_delegate avcodec_fill_audio_frame_fptr = (AVFrame* @frame, int @nb_channels, AVSampleFormat @sample_fmt, byte* @buf, int @buf_size, int @align) => - { - avcodec_fill_audio_frame_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_fill_audio_frame"); - if (avcodec_fill_audio_frame_fptr == null) - { - avcodec_fill_audio_frame_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_fill_audio_frame")); - }; - } - return avcodec_fill_audio_frame_fptr(@frame, @nb_channels, @sample_fmt, @buf, @buf_size, @align); - }; - /// Fill AVFrame audio data and linesize pointers. - /// the AVFrame frame->nb_samples must be set prior to calling the function. This function fills in frame->data, frame->extended_data, frame->linesize[0]. - /// channel count - /// sample format - /// buffer to use for frame data - /// size of buffer - /// plane size sample alignment (0 = default) - /// >=0 on success, negative error code on failure - public static int avcodec_fill_audio_frame(AVFrame* @frame, int @nb_channels, AVSampleFormat @sample_fmt, byte* @buf, int @buf_size, int @align) - { - return avcodec_fill_audio_frame_fptr(@frame, @nb_channels, @sample_fmt, @buf, @buf_size, @align); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVPixelFormat avcodec_find_best_pix_fmt_of_list_delegate(AVPixelFormat* @pix_fmt_list, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr); - private static avcodec_find_best_pix_fmt_of_list_delegate avcodec_find_best_pix_fmt_of_list_fptr = (AVPixelFormat* @pix_fmt_list, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr) => - { - avcodec_find_best_pix_fmt_of_list_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_find_best_pix_fmt_of_list"); - if (avcodec_find_best_pix_fmt_of_list_fptr == null) - { - avcodec_find_best_pix_fmt_of_list_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_find_best_pix_fmt_of_list")); - }; - } - return avcodec_find_best_pix_fmt_of_list_fptr(@pix_fmt_list, @src_pix_fmt, @has_alpha, @loss_ptr); - }; - /// Find the best pixel format to convert to given a certain source pixel format. When converting from one pixel format to another, information loss may occur. For example, when converting from RGB24 to GRAY, the color information will be lost. Similarly, other losses occur when converting from some formats to other formats. avcodec_find_best_pix_fmt_of_2() searches which of the given pixel formats should be used to suffer the least amount of loss. The pixel formats from which it chooses one, are determined by the pix_fmt_list parameter. - /// AV_PIX_FMT_NONE terminated array of pixel formats to choose from - /// source pixel format - /// Whether the source pixel format alpha channel is used. - /// Combination of flags informing you what kind of losses will occur. - /// The best pixel format to convert to or -1 if none was found. - public static AVPixelFormat avcodec_find_best_pix_fmt_of_list(AVPixelFormat* @pix_fmt_list, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr) - { - return avcodec_find_best_pix_fmt_of_list_fptr(@pix_fmt_list, @src_pix_fmt, @has_alpha, @loss_ptr); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodec* avcodec_find_decoder_delegate(AVCodecID @id); - private static avcodec_find_decoder_delegate avcodec_find_decoder_fptr = (AVCodecID @id) => - { - avcodec_find_decoder_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_find_decoder"); - if (avcodec_find_decoder_fptr == null) - { - avcodec_find_decoder_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_find_decoder")); - }; - } - return avcodec_find_decoder_fptr(@id); - }; - /// Find a registered decoder with a matching codec ID. - /// AVCodecID of the requested decoder - /// A decoder if one was found, NULL otherwise. - public static AVCodec* avcodec_find_decoder(AVCodecID @id) - { - return avcodec_find_decoder_fptr(@id); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodec* avcodec_find_decoder_by_name_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name); - private static avcodec_find_decoder_by_name_delegate avcodec_find_decoder_by_name_fptr = (string @name) => - { - avcodec_find_decoder_by_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_find_decoder_by_name"); - if (avcodec_find_decoder_by_name_fptr == null) - { - avcodec_find_decoder_by_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_find_decoder_by_name")); - }; - } - return avcodec_find_decoder_by_name_fptr(@name); - }; - /// Find a registered decoder with the specified name. - /// name of the requested decoder - /// A decoder if one was found, NULL otherwise. - public static AVCodec* avcodec_find_decoder_by_name( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name) - { - return avcodec_find_decoder_by_name_fptr(@name); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodec* avcodec_find_encoder_delegate(AVCodecID @id); - private static avcodec_find_encoder_delegate avcodec_find_encoder_fptr = (AVCodecID @id) => - { - avcodec_find_encoder_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_find_encoder"); - if (avcodec_find_encoder_fptr == null) - { - avcodec_find_encoder_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_find_encoder")); - }; - } - return avcodec_find_encoder_fptr(@id); - }; - /// Find a registered encoder with a matching codec ID. - /// AVCodecID of the requested encoder - /// An encoder if one was found, NULL otherwise. - public static AVCodec* avcodec_find_encoder(AVCodecID @id) - { - return avcodec_find_encoder_fptr(@id); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodec* avcodec_find_encoder_by_name_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name); - private static avcodec_find_encoder_by_name_delegate avcodec_find_encoder_by_name_fptr = (string @name) => - { - avcodec_find_encoder_by_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_find_encoder_by_name"); - if (avcodec_find_encoder_by_name_fptr == null) - { - avcodec_find_encoder_by_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_find_encoder_by_name")); - }; - } - return avcodec_find_encoder_by_name_fptr(@name); - }; - /// Find a registered encoder with the specified name. - /// name of the requested encoder - /// An encoder if one was found, NULL otherwise. - public static AVCodec* avcodec_find_encoder_by_name( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name) - { - return avcodec_find_encoder_by_name_fptr(@name); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avcodec_flush_buffers_delegate(AVCodecContext* @avctx); - private static avcodec_flush_buffers_delegate avcodec_flush_buffers_fptr = (AVCodecContext* @avctx) => - { - avcodec_flush_buffers_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_flush_buffers"); - if (avcodec_flush_buffers_fptr == null) - { - avcodec_flush_buffers_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_flush_buffers")); - }; - } - avcodec_flush_buffers_fptr(@avctx); - }; - /// Reset the internal codec state / flush internal buffers. Should be called e.g. when seeking or when switching to a different stream. - public static void avcodec_flush_buffers(AVCodecContext* @avctx) - { - avcodec_flush_buffers_fptr(@avctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avcodec_free_context_delegate(AVCodecContext** @avctx); - private static avcodec_free_context_delegate avcodec_free_context_fptr = (AVCodecContext** @avctx) => - { - avcodec_free_context_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_free_context"); - if (avcodec_free_context_fptr == null) - { - avcodec_free_context_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_free_context")); - }; - } - avcodec_free_context_fptr(@avctx); - }; - /// Free the codec context and everything associated with it and write NULL to the provided pointer. - public static void avcodec_free_context(AVCodecContext** @avctx) - { - avcodec_free_context_fptr(@avctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVClass* avcodec_get_class_delegate(); - private static avcodec_get_class_delegate avcodec_get_class_fptr = () => - { - avcodec_get_class_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_get_class"); - if (avcodec_get_class_fptr == null) - { - avcodec_get_class_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_get_class")); - }; - } - return avcodec_get_class_fptr(); - }; - /// Get the AVClass for AVCodecContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. - public static AVClass* avcodec_get_class() - { - return avcodec_get_class_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVClass* avcodec_get_frame_class_delegate(); - private static avcodec_get_frame_class_delegate avcodec_get_frame_class_fptr = () => - { - avcodec_get_frame_class_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_get_frame_class"); - if (avcodec_get_frame_class_fptr == null) - { - avcodec_get_frame_class_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_get_frame_class")); - }; - } - return avcodec_get_frame_class_fptr(); - }; - [Obsolete("This function should not be used.")] - public static AVClass* avcodec_get_frame_class() - { - return avcodec_get_frame_class_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodecHWConfig* avcodec_get_hw_config_delegate(AVCodec* @codec, int @index); - private static avcodec_get_hw_config_delegate avcodec_get_hw_config_fptr = (AVCodec* @codec, int @index) => - { - avcodec_get_hw_config_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_get_hw_config"); - if (avcodec_get_hw_config_fptr == null) - { - avcodec_get_hw_config_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_get_hw_config")); - }; - } - return avcodec_get_hw_config_fptr(@codec, @index); - }; - /// Retrieve supported hardware configurations for a codec. - public static AVCodecHWConfig* avcodec_get_hw_config(AVCodec* @codec, int @index) - { - return avcodec_get_hw_config_fptr(@codec, @index); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avcodec_get_hw_frames_parameters_delegate(AVCodecContext* @avctx, AVBufferRef* @device_ref, AVPixelFormat @hw_pix_fmt, AVBufferRef** @out_frames_ref); - private static avcodec_get_hw_frames_parameters_delegate avcodec_get_hw_frames_parameters_fptr = (AVCodecContext* @avctx, AVBufferRef* @device_ref, AVPixelFormat @hw_pix_fmt, AVBufferRef** @out_frames_ref) => - { - avcodec_get_hw_frames_parameters_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_get_hw_frames_parameters"); - if (avcodec_get_hw_frames_parameters_fptr == null) - { - avcodec_get_hw_frames_parameters_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_get_hw_frames_parameters")); - }; - } - return avcodec_get_hw_frames_parameters_fptr(@avctx, @device_ref, @hw_pix_fmt, @out_frames_ref); - }; - /// Create and return a AVHWFramesContext with values adequate for hardware decoding. This is meant to get called from the get_format callback, and is a helper for preparing a AVHWFramesContext for AVCodecContext.hw_frames_ctx. This API is for decoding with certain hardware acceleration modes/APIs only. - /// The context which is currently calling get_format, and which implicitly contains all state needed for filling the returned AVHWFramesContext properly. - /// A reference to the AVHWDeviceContext describing the device which will be used by the hardware decoder. - /// The hwaccel format you are going to return from get_format. - /// On success, set to a reference to an _uninitialized_ AVHWFramesContext, created from the given device_ref. Fields will be set to values required for decoding. Not changed if an error is returned. - /// zero on success, a negative value on error. The following error codes have special semantics: AVERROR(ENOENT): the decoder does not support this functionality. Setup is always manual, or it is a decoder which does not support setting AVCodecContext.hw_frames_ctx at all, or it is a software format. AVERROR(EINVAL): it is known that hardware decoding is not supported for this configuration, or the device_ref is not supported for the hwaccel referenced by hw_pix_fmt. - public static int avcodec_get_hw_frames_parameters(AVCodecContext* @avctx, AVBufferRef* @device_ref, AVPixelFormat @hw_pix_fmt, AVBufferRef** @out_frames_ref) - { - return avcodec_get_hw_frames_parameters_fptr(@avctx, @device_ref, @hw_pix_fmt, @out_frames_ref); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string avcodec_get_name_delegate(AVCodecID @id); - private static avcodec_get_name_delegate avcodec_get_name_fptr = (AVCodecID @id) => - { - avcodec_get_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_get_name"); - if (avcodec_get_name_fptr == null) - { - avcodec_get_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_get_name")); - }; - } - return avcodec_get_name_fptr(@id); - }; - /// Get the name of a codec. - /// a static string identifying the codec; never NULL - public static string avcodec_get_name(AVCodecID @id) - { - return avcodec_get_name_fptr(@id); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVClass* avcodec_get_subtitle_rect_class_delegate(); - private static avcodec_get_subtitle_rect_class_delegate avcodec_get_subtitle_rect_class_fptr = () => - { - avcodec_get_subtitle_rect_class_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_get_subtitle_rect_class"); - if (avcodec_get_subtitle_rect_class_fptr == null) - { - avcodec_get_subtitle_rect_class_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_get_subtitle_rect_class")); - }; - } - return avcodec_get_subtitle_rect_class_fptr(); - }; - /// Get the AVClass for AVSubtitleRect. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. - public static AVClass* avcodec_get_subtitle_rect_class() - { - return avcodec_get_subtitle_rect_class_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVMediaType avcodec_get_type_delegate(AVCodecID @codec_id); - private static avcodec_get_type_delegate avcodec_get_type_fptr = (AVCodecID @codec_id) => - { - avcodec_get_type_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_get_type"); - if (avcodec_get_type_fptr == null) - { - avcodec_get_type_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_get_type")); - }; - } - return avcodec_get_type_fptr(@codec_id); - }; - /// Get the type of the given codec. - public static AVMediaType avcodec_get_type(AVCodecID @codec_id) - { - return avcodec_get_type_fptr(@codec_id); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avcodec_is_open_delegate(AVCodecContext* @s); - private static avcodec_is_open_delegate avcodec_is_open_fptr = (AVCodecContext* @s) => - { - avcodec_is_open_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_is_open"); - if (avcodec_is_open_fptr == null) - { - avcodec_is_open_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_is_open")); - }; - } - return avcodec_is_open_fptr(@s); - }; - /// Returns a positive value if s is open (i.e. avcodec_open2() was called on it with no corresponding avcodec_close()), 0 otherwise. - /// a positive value if s is open (i.e. avcodec_open2() was called on it with no corresponding avcodec_close()), 0 otherwise. - public static int avcodec_is_open(AVCodecContext* @s) - { - return avcodec_is_open_fptr(@s); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string avcodec_license_delegate(); - private static avcodec_license_delegate avcodec_license_fptr = () => - { - avcodec_license_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_license"); - if (avcodec_license_fptr == null) - { - avcodec_license_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_license")); - }; - } - return avcodec_license_fptr(); - }; - /// Return the libavcodec license. - public static string avcodec_license() - { - return avcodec_license_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avcodec_open2_delegate(AVCodecContext* @avctx, AVCodec* @codec, AVDictionary** @options); - private static avcodec_open2_delegate avcodec_open2_fptr = (AVCodecContext* @avctx, AVCodec* @codec, AVDictionary** @options) => - { - avcodec_open2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_open2"); - if (avcodec_open2_fptr == null) - { - avcodec_open2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_open2")); - }; - } - return avcodec_open2_fptr(@avctx, @codec, @options); - }; - /// Initialize the AVCodecContext to use the given AVCodec. Prior to using this function the context has to be allocated with avcodec_alloc_context3(). - /// The context to initialize. - /// The codec to open this context for. If a non-NULL codec has been previously passed to avcodec_alloc_context3() or for this context, then this parameter MUST be either NULL or equal to the previously passed codec. - /// A dictionary filled with AVCodecContext and codec-private options. On return this object will be filled with options that were not found. - /// zero on success, a negative value on error - public static int avcodec_open2(AVCodecContext* @avctx, AVCodec* @codec, AVDictionary** @options) - { - return avcodec_open2_fptr(@avctx, @codec, @options); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodecParameters* avcodec_parameters_alloc_delegate(); - private static avcodec_parameters_alloc_delegate avcodec_parameters_alloc_fptr = () => - { - avcodec_parameters_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_parameters_alloc"); - if (avcodec_parameters_alloc_fptr == null) - { - avcodec_parameters_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_parameters_alloc")); - }; - } - return avcodec_parameters_alloc_fptr(); - }; - /// Allocate a new AVCodecParameters and set its fields to default values (unknown/invalid/0). The returned struct must be freed with avcodec_parameters_free(). - public static AVCodecParameters* avcodec_parameters_alloc() - { - return avcodec_parameters_alloc_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avcodec_parameters_copy_delegate(AVCodecParameters* @dst, AVCodecParameters* @src); - private static avcodec_parameters_copy_delegate avcodec_parameters_copy_fptr = (AVCodecParameters* @dst, AVCodecParameters* @src) => - { - avcodec_parameters_copy_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_parameters_copy"); - if (avcodec_parameters_copy_fptr == null) - { - avcodec_parameters_copy_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_parameters_copy")); - }; - } - return avcodec_parameters_copy_fptr(@dst, @src); - }; - /// Copy the contents of src to dst. Any allocated fields in dst are freed and replaced with newly allocated duplicates of the corresponding fields in src. - /// >= 0 on success, a negative AVERROR code on failure. - public static int avcodec_parameters_copy(AVCodecParameters* @dst, AVCodecParameters* @src) - { - return avcodec_parameters_copy_fptr(@dst, @src); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avcodec_parameters_free_delegate(AVCodecParameters** @par); - private static avcodec_parameters_free_delegate avcodec_parameters_free_fptr = (AVCodecParameters** @par) => - { - avcodec_parameters_free_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_parameters_free"); - if (avcodec_parameters_free_fptr == null) - { - avcodec_parameters_free_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_parameters_free")); - }; - } - avcodec_parameters_free_fptr(@par); - }; - /// Free an AVCodecParameters instance and everything associated with it and write NULL to the supplied pointer. - public static void avcodec_parameters_free(AVCodecParameters** @par) - { - avcodec_parameters_free_fptr(@par); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avcodec_parameters_from_context_delegate(AVCodecParameters* @par, AVCodecContext* @codec); - private static avcodec_parameters_from_context_delegate avcodec_parameters_from_context_fptr = (AVCodecParameters* @par, AVCodecContext* @codec) => - { - avcodec_parameters_from_context_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_parameters_from_context"); - if (avcodec_parameters_from_context_fptr == null) - { - avcodec_parameters_from_context_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_parameters_from_context")); - }; - } - return avcodec_parameters_from_context_fptr(@par, @codec); - }; - /// Fill the parameters struct based on the values from the supplied codec context. Any allocated fields in par are freed and replaced with duplicates of the corresponding fields in codec. - /// >= 0 on success, a negative AVERROR code on failure - public static int avcodec_parameters_from_context(AVCodecParameters* @par, AVCodecContext* @codec) - { - return avcodec_parameters_from_context_fptr(@par, @codec); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avcodec_parameters_to_context_delegate(AVCodecContext* @codec, AVCodecParameters* @par); - private static avcodec_parameters_to_context_delegate avcodec_parameters_to_context_fptr = (AVCodecContext* @codec, AVCodecParameters* @par) => - { - avcodec_parameters_to_context_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_parameters_to_context"); - if (avcodec_parameters_to_context_fptr == null) - { - avcodec_parameters_to_context_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_parameters_to_context")); - }; - } - return avcodec_parameters_to_context_fptr(@codec, @par); - }; - /// Fill the codec context based on the values from the supplied codec parameters. Any allocated fields in codec that have a corresponding field in par are freed and replaced with duplicates of the corresponding field in par. Fields in codec that do not have a counterpart in par are not touched. - /// >= 0 on success, a negative AVERROR code on failure. - public static int avcodec_parameters_to_context(AVCodecContext* @codec, AVCodecParameters* @par) - { - return avcodec_parameters_to_context_fptr(@codec, @par); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint avcodec_pix_fmt_to_codec_tag_delegate(AVPixelFormat @pix_fmt); - private static avcodec_pix_fmt_to_codec_tag_delegate avcodec_pix_fmt_to_codec_tag_fptr = (AVPixelFormat @pix_fmt) => - { - avcodec_pix_fmt_to_codec_tag_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_pix_fmt_to_codec_tag"); - if (avcodec_pix_fmt_to_codec_tag_fptr == null) - { - avcodec_pix_fmt_to_codec_tag_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_pix_fmt_to_codec_tag")); - }; - } - return avcodec_pix_fmt_to_codec_tag_fptr(@pix_fmt); - }; - /// Return a value representing the fourCC code associated to the pixel format pix_fmt, or 0 if no associated fourCC code can be found. - public static uint avcodec_pix_fmt_to_codec_tag(AVPixelFormat @pix_fmt) - { - return avcodec_pix_fmt_to_codec_tag_fptr(@pix_fmt); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string avcodec_profile_name_delegate(AVCodecID @codec_id, int @profile); - private static avcodec_profile_name_delegate avcodec_profile_name_fptr = (AVCodecID @codec_id, int @profile) => - { - avcodec_profile_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_profile_name"); - if (avcodec_profile_name_fptr == null) - { - avcodec_profile_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_profile_name")); - }; - } - return avcodec_profile_name_fptr(@codec_id, @profile); - }; - /// Return a name for the specified profile, if available. - /// the ID of the codec to which the requested profile belongs - /// the profile value for which a name is requested - /// A name for the profile if found, NULL otherwise. - public static string avcodec_profile_name(AVCodecID @codec_id, int @profile) - { - return avcodec_profile_name_fptr(@codec_id, @profile); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avcodec_receive_frame_delegate(AVCodecContext* @avctx, AVFrame* @frame); - private static avcodec_receive_frame_delegate avcodec_receive_frame_fptr = (AVCodecContext* @avctx, AVFrame* @frame) => - { - avcodec_receive_frame_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_receive_frame"); - if (avcodec_receive_frame_fptr == null) - { - avcodec_receive_frame_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_receive_frame")); - }; - } - return avcodec_receive_frame_fptr(@avctx, @frame); - }; - /// Return decoded output data from a decoder. - /// codec context - /// This will be set to a reference-counted video or audio frame (depending on the decoder type) allocated by the decoder. Note that the function will always call av_frame_unref(frame) before doing anything else. - /// 0: success, a frame was returned AVERROR(EAGAIN): output is not available in this state - user must try to send new input AVERROR_EOF: the decoder has been fully flushed, and there will be no more output frames AVERROR(EINVAL): codec not opened, or it is an encoder AVERROR_INPUT_CHANGED: current decoded frame has changed parameters with respect to first decoded frame. Applicable when flag AV_CODEC_FLAG_DROPCHANGED is set. other negative values: legitimate decoding errors - public static int avcodec_receive_frame(AVCodecContext* @avctx, AVFrame* @frame) - { - return avcodec_receive_frame_fptr(@avctx, @frame); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avcodec_receive_packet_delegate(AVCodecContext* @avctx, AVPacket* @avpkt); - private static avcodec_receive_packet_delegate avcodec_receive_packet_fptr = (AVCodecContext* @avctx, AVPacket* @avpkt) => - { - avcodec_receive_packet_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_receive_packet"); - if (avcodec_receive_packet_fptr == null) - { - avcodec_receive_packet_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_receive_packet")); - }; - } - return avcodec_receive_packet_fptr(@avctx, @avpkt); - }; - /// Read encoded data from the encoder. - /// codec context - /// This will be set to a reference-counted packet allocated by the encoder. Note that the function will always call av_packet_unref(avpkt) before doing anything else. - /// 0 on success, otherwise negative error code: AVERROR(EAGAIN): output is not available in the current state - user must try to send input AVERROR_EOF: the encoder has been fully flushed, and there will be no more output packets AVERROR(EINVAL): codec not opened, or it is a decoder other errors: legitimate encoding errors - public static int avcodec_receive_packet(AVCodecContext* @avctx, AVPacket* @avpkt) - { - return avcodec_receive_packet_fptr(@avctx, @avpkt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avcodec_send_frame_delegate(AVCodecContext* @avctx, AVFrame* @frame); - private static avcodec_send_frame_delegate avcodec_send_frame_fptr = (AVCodecContext* @avctx, AVFrame* @frame) => - { - avcodec_send_frame_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_send_frame"); - if (avcodec_send_frame_fptr == null) - { - avcodec_send_frame_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_send_frame")); - }; - } - return avcodec_send_frame_fptr(@avctx, @frame); - }; - /// Supply a raw video or audio frame to the encoder. Use avcodec_receive_packet() to retrieve buffered output packets. - /// codec context - /// AVFrame containing the raw audio or video frame to be encoded. Ownership of the frame remains with the caller, and the encoder will not write to the frame. The encoder may create a reference to the frame data (or copy it if the frame is not reference-counted). It can be NULL, in which case it is considered a flush packet. This signals the end of the stream. If the encoder still has packets buffered, it will return them after this call. Once flushing mode has been entered, additional flush packets are ignored, and sending frames will return AVERROR_EOF. - /// 0 on success, otherwise negative error code: AVERROR(EAGAIN): input is not accepted in the current state - user must read output with avcodec_receive_packet() (once all output is read, the packet should be resent, and the call will not fail with EAGAIN). AVERROR_EOF: the encoder has been flushed, and no new frames can be sent to it AVERROR(EINVAL): codec not opened, it is a decoder, or requires flush AVERROR(ENOMEM): failed to add packet to internal queue, or similar other errors: legitimate encoding errors - public static int avcodec_send_frame(AVCodecContext* @avctx, AVFrame* @frame) - { - return avcodec_send_frame_fptr(@avctx, @frame); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avcodec_send_packet_delegate(AVCodecContext* @avctx, AVPacket* @avpkt); - private static avcodec_send_packet_delegate avcodec_send_packet_fptr = (AVCodecContext* @avctx, AVPacket* @avpkt) => - { - avcodec_send_packet_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_send_packet"); - if (avcodec_send_packet_fptr == null) - { - avcodec_send_packet_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_send_packet")); - }; - } - return avcodec_send_packet_fptr(@avctx, @avpkt); - }; - /// Supply raw packet data as input to a decoder. - /// codec context - /// The input AVPacket. Usually, this will be a single video frame, or several complete audio frames. Ownership of the packet remains with the caller, and the decoder will not write to the packet. The decoder may create a reference to the packet data (or copy it if the packet is not reference-counted). Unlike with older APIs, the packet is always fully consumed, and if it contains multiple frames (e.g. some audio codecs), will require you to call avcodec_receive_frame() multiple times afterwards before you can send a new packet. It can be NULL (or an AVPacket with data set to NULL and size set to 0); in this case, it is considered a flush packet, which signals the end of the stream. Sending the first flush packet will return success. Subsequent ones are unnecessary and will return AVERROR_EOF. If the decoder still has frames buffered, it will return them after sending a flush packet. - /// 0 on success, otherwise negative error code: AVERROR(EAGAIN): input is not accepted in the current state - user must read output with avcodec_receive_frame() (once all output is read, the packet should be resent, and the call will not fail with EAGAIN). AVERROR_EOF: the decoder has been flushed, and no new packets can be sent to it (also returned if more than 1 flush packet is sent) AVERROR(EINVAL): codec not opened, it is an encoder, or requires flush AVERROR(ENOMEM): failed to add packet to internal queue, or similar other errors: legitimate decoding errors - public static int avcodec_send_packet(AVCodecContext* @avctx, AVPacket* @avpkt) - { - return avcodec_send_packet_fptr(@avctx, @avpkt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avcodec_string_delegate(byte* @buf, int @buf_size, AVCodecContext* @enc, int @encode); - private static avcodec_string_delegate avcodec_string_fptr = (byte* @buf, int @buf_size, AVCodecContext* @enc, int @encode) => - { - avcodec_string_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_string"); - if (avcodec_string_fptr == null) - { - avcodec_string_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_string")); - }; - } - avcodec_string_fptr(@buf, @buf_size, @enc, @encode); - }; - /// @} - public static void avcodec_string(byte* @buf, int @buf_size, AVCodecContext* @enc, int @encode) - { - avcodec_string_fptr(@buf, @buf_size, @enc, @encode); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint avcodec_version_delegate(); - private static avcodec_version_delegate avcodec_version_fptr = () => - { - avcodec_version_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avcodec_version"); - if (avcodec_version_fptr == null) - { - avcodec_version_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avcodec_version")); - }; - } - return avcodec_version_fptr(); - }; - /// Return the LIBAVCODEC_VERSION_INT constant. - public static uint avcodec_version() - { - return avcodec_version_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avsubtitle_free_delegate(AVSubtitle* @sub); - private static avsubtitle_free_delegate avsubtitle_free_fptr = (AVSubtitle* @sub) => - { - avsubtitle_free_fptr = GetFunctionDelegate(GetOrLoadLibrary("avcodec"), "avsubtitle_free"); - if (avsubtitle_free_fptr == null) - { - avsubtitle_free_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avsubtitle_free")); - }; - } - avsubtitle_free_fptr(@sub); - }; - /// Free all allocated data in the given subtitle struct. - /// AVSubtitle to free. - public static void avsubtitle_free(AVSubtitle* @sub) - { - avsubtitle_free_fptr(@sub); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVInputFormat* av_input_audio_device_next_delegate(AVInputFormat* @d); - private static av_input_audio_device_next_delegate av_input_audio_device_next_fptr = (AVInputFormat* @d) => - { - av_input_audio_device_next_fptr = GetFunctionDelegate(GetOrLoadLibrary("avdevice"), "av_input_audio_device_next"); - if (av_input_audio_device_next_fptr == null) - { - av_input_audio_device_next_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_input_audio_device_next")); - }; - } - return av_input_audio_device_next_fptr(@d); - }; - /// Audio input devices iterator. - public static AVInputFormat* av_input_audio_device_next(AVInputFormat* @d) - { - return av_input_audio_device_next_fptr(@d); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVInputFormat* av_input_video_device_next_delegate(AVInputFormat* @d); - private static av_input_video_device_next_delegate av_input_video_device_next_fptr = (AVInputFormat* @d) => - { - av_input_video_device_next_fptr = GetFunctionDelegate(GetOrLoadLibrary("avdevice"), "av_input_video_device_next"); - if (av_input_video_device_next_fptr == null) - { - av_input_video_device_next_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_input_video_device_next")); - }; - } - return av_input_video_device_next_fptr(@d); - }; - /// Video input devices iterator. - public static AVInputFormat* av_input_video_device_next(AVInputFormat* @d) - { - return av_input_video_device_next_fptr(@d); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVOutputFormat* av_output_audio_device_next_delegate(AVOutputFormat* @d); - private static av_output_audio_device_next_delegate av_output_audio_device_next_fptr = (AVOutputFormat* @d) => - { - av_output_audio_device_next_fptr = GetFunctionDelegate(GetOrLoadLibrary("avdevice"), "av_output_audio_device_next"); - if (av_output_audio_device_next_fptr == null) - { - av_output_audio_device_next_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_output_audio_device_next")); - }; - } - return av_output_audio_device_next_fptr(@d); - }; - /// Audio output devices iterator. - public static AVOutputFormat* av_output_audio_device_next(AVOutputFormat* @d) - { - return av_output_audio_device_next_fptr(@d); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVOutputFormat* av_output_video_device_next_delegate(AVOutputFormat* @d); - private static av_output_video_device_next_delegate av_output_video_device_next_fptr = (AVOutputFormat* @d) => - { - av_output_video_device_next_fptr = GetFunctionDelegate(GetOrLoadLibrary("avdevice"), "av_output_video_device_next"); - if (av_output_video_device_next_fptr == null) - { - av_output_video_device_next_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_output_video_device_next")); - }; - } - return av_output_video_device_next_fptr(@d); - }; - /// Video output devices iterator. - public static AVOutputFormat* av_output_video_device_next(AVOutputFormat* @d) - { - return av_output_video_device_next_fptr(@d); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avdevice_app_to_dev_control_message_delegate(AVFormatContext* @s, AVAppToDevMessageType @type, void* @data, ulong @data_size); - private static avdevice_app_to_dev_control_message_delegate avdevice_app_to_dev_control_message_fptr = (AVFormatContext* @s, AVAppToDevMessageType @type, void* @data, ulong @data_size) => - { - avdevice_app_to_dev_control_message_fptr = GetFunctionDelegate(GetOrLoadLibrary("avdevice"), "avdevice_app_to_dev_control_message"); - if (avdevice_app_to_dev_control_message_fptr == null) - { - avdevice_app_to_dev_control_message_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avdevice_app_to_dev_control_message")); - }; - } - return avdevice_app_to_dev_control_message_fptr(@s, @type, @data, @data_size); - }; - /// Send control message from application to device. - /// device context. - /// message type. - /// message data. Exact type depends on message type. - /// size of message data. - /// >= 0 on success, negative on error. AVERROR(ENOSYS) when device doesn't implement handler of the message. - public static int avdevice_app_to_dev_control_message(AVFormatContext* @s, AVAppToDevMessageType @type, void* @data, ulong @data_size) - { - return avdevice_app_to_dev_control_message_fptr(@s, @type, @data, @data_size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avdevice_capabilities_create_delegate(AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s, AVDictionary** @device_options); - private static avdevice_capabilities_create_delegate avdevice_capabilities_create_fptr = (AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s, AVDictionary** @device_options) => - { - avdevice_capabilities_create_fptr = GetFunctionDelegate(GetOrLoadLibrary("avdevice"), "avdevice_capabilities_create"); - if (avdevice_capabilities_create_fptr == null) - { - avdevice_capabilities_create_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avdevice_capabilities_create")); - }; - } - return avdevice_capabilities_create_fptr(@caps, @s, @device_options); - }; - /// Initialize capabilities probing API based on AVOption API. - /// Device capabilities data. Pointer to a NULL pointer must be passed. - /// Context of the device. - /// An AVDictionary filled with device-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. The same options must be passed later to avformat_write_header() for output devices or avformat_open_input() for input devices, or at any other place that affects device-private options. - /// >= 0 on success, negative otherwise. - [Obsolete("")] - public static int avdevice_capabilities_create(AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s, AVDictionary** @device_options) - { - return avdevice_capabilities_create_fptr(@caps, @s, @device_options); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avdevice_capabilities_free_delegate(AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s); - private static avdevice_capabilities_free_delegate avdevice_capabilities_free_fptr = (AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s) => - { - avdevice_capabilities_free_fptr = GetFunctionDelegate(GetOrLoadLibrary("avdevice"), "avdevice_capabilities_free"); - if (avdevice_capabilities_free_fptr == null) - { - avdevice_capabilities_free_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avdevice_capabilities_free")); - }; - } - avdevice_capabilities_free_fptr(@caps, @s); - }; - /// Free resources created by avdevice_capabilities_create() - /// Device capabilities data to be freed. - /// Context of the device. - [Obsolete("")] - public static void avdevice_capabilities_free(AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s) - { - avdevice_capabilities_free_fptr(@caps, @s); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string avdevice_configuration_delegate(); - private static avdevice_configuration_delegate avdevice_configuration_fptr = () => - { - avdevice_configuration_fptr = GetFunctionDelegate(GetOrLoadLibrary("avdevice"), "avdevice_configuration"); - if (avdevice_configuration_fptr == null) - { - avdevice_configuration_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avdevice_configuration")); - }; - } - return avdevice_configuration_fptr(); - }; - /// Return the libavdevice build-time configuration. - public static string avdevice_configuration() - { - return avdevice_configuration_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avdevice_dev_to_app_control_message_delegate(AVFormatContext* @s, AVDevToAppMessageType @type, void* @data, ulong @data_size); - private static avdevice_dev_to_app_control_message_delegate avdevice_dev_to_app_control_message_fptr = (AVFormatContext* @s, AVDevToAppMessageType @type, void* @data, ulong @data_size) => - { - avdevice_dev_to_app_control_message_fptr = GetFunctionDelegate(GetOrLoadLibrary("avdevice"), "avdevice_dev_to_app_control_message"); - if (avdevice_dev_to_app_control_message_fptr == null) - { - avdevice_dev_to_app_control_message_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avdevice_dev_to_app_control_message")); - }; - } - return avdevice_dev_to_app_control_message_fptr(@s, @type, @data, @data_size); - }; - /// Send control message from device to application. - /// device context. - /// message type. - /// message data. Can be NULL. - /// size of message data. - /// >= 0 on success, negative on error. AVERROR(ENOSYS) when application doesn't implement handler of the message. - public static int avdevice_dev_to_app_control_message(AVFormatContext* @s, AVDevToAppMessageType @type, void* @data, ulong @data_size) - { - return avdevice_dev_to_app_control_message_fptr(@s, @type, @data, @data_size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avdevice_free_list_devices_delegate(AVDeviceInfoList** @device_list); - private static avdevice_free_list_devices_delegate avdevice_free_list_devices_fptr = (AVDeviceInfoList** @device_list) => - { - avdevice_free_list_devices_fptr = GetFunctionDelegate(GetOrLoadLibrary("avdevice"), "avdevice_free_list_devices"); - if (avdevice_free_list_devices_fptr == null) - { - avdevice_free_list_devices_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avdevice_free_list_devices")); - }; - } - avdevice_free_list_devices_fptr(@device_list); - }; - /// Convenient function to free result of avdevice_list_devices(). - public static void avdevice_free_list_devices(AVDeviceInfoList** @device_list) - { - avdevice_free_list_devices_fptr(@device_list); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string avdevice_license_delegate(); - private static avdevice_license_delegate avdevice_license_fptr = () => - { - avdevice_license_fptr = GetFunctionDelegate(GetOrLoadLibrary("avdevice"), "avdevice_license"); - if (avdevice_license_fptr == null) - { - avdevice_license_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avdevice_license")); - }; - } - return avdevice_license_fptr(); - }; - /// Return the libavdevice license. - public static string avdevice_license() - { - return avdevice_license_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avdevice_list_devices_delegate(AVFormatContext* @s, AVDeviceInfoList** @device_list); - private static avdevice_list_devices_delegate avdevice_list_devices_fptr = (AVFormatContext* @s, AVDeviceInfoList** @device_list) => - { - avdevice_list_devices_fptr = GetFunctionDelegate(GetOrLoadLibrary("avdevice"), "avdevice_list_devices"); - if (avdevice_list_devices_fptr == null) - { - avdevice_list_devices_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avdevice_list_devices")); - }; - } - return avdevice_list_devices_fptr(@s, @device_list); - }; - /// List devices. - /// device context. - /// list of autodetected devices. - /// count of autodetected devices, negative on error. - public static int avdevice_list_devices(AVFormatContext* @s, AVDeviceInfoList** @device_list) - { - return avdevice_list_devices_fptr(@s, @device_list); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avdevice_list_input_sources_delegate(AVInputFormat* @device, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list); - private static avdevice_list_input_sources_delegate avdevice_list_input_sources_fptr = (AVInputFormat* @device, string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list) => - { - avdevice_list_input_sources_fptr = GetFunctionDelegate(GetOrLoadLibrary("avdevice"), "avdevice_list_input_sources"); - if (avdevice_list_input_sources_fptr == null) - { - avdevice_list_input_sources_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avdevice_list_input_sources")); - }; - } - return avdevice_list_input_sources_fptr(@device, @device_name, @device_options, @device_list); - }; - /// List devices. - /// device format. May be NULL if device name is set. - /// device name. May be NULL if device format is set. - /// An AVDictionary filled with device-private options. May be NULL. The same options must be passed later to avformat_write_header() for output devices or avformat_open_input() for input devices, or at any other place that affects device-private options. - /// list of autodetected devices - /// count of autodetected devices, negative on error. - public static int avdevice_list_input_sources(AVInputFormat* @device, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list) - { - return avdevice_list_input_sources_fptr(@device, @device_name, @device_options, @device_list); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avdevice_list_output_sinks_delegate(AVOutputFormat* @device, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list); - private static avdevice_list_output_sinks_delegate avdevice_list_output_sinks_fptr = (AVOutputFormat* @device, string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list) => - { - avdevice_list_output_sinks_fptr = GetFunctionDelegate(GetOrLoadLibrary("avdevice"), "avdevice_list_output_sinks"); - if (avdevice_list_output_sinks_fptr == null) - { - avdevice_list_output_sinks_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avdevice_list_output_sinks")); - }; - } - return avdevice_list_output_sinks_fptr(@device, @device_name, @device_options, @device_list); - }; - public static int avdevice_list_output_sinks(AVOutputFormat* @device, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list) - { - return avdevice_list_output_sinks_fptr(@device, @device_name, @device_options, @device_list); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avdevice_register_all_delegate(); - private static avdevice_register_all_delegate avdevice_register_all_fptr = () => - { - avdevice_register_all_fptr = GetFunctionDelegate(GetOrLoadLibrary("avdevice"), "avdevice_register_all"); - if (avdevice_register_all_fptr == null) - { - avdevice_register_all_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avdevice_register_all")); - }; - } - avdevice_register_all_fptr(); - }; - /// Initialize libavdevice and register all the input and output devices. - public static void avdevice_register_all() - { - avdevice_register_all_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint avdevice_version_delegate(); - private static avdevice_version_delegate avdevice_version_fptr = () => - { - avdevice_version_fptr = GetFunctionDelegate(GetOrLoadLibrary("avdevice"), "avdevice_version"); - if (avdevice_version_fptr == null) - { - avdevice_version_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avdevice_version")); - }; - } - return avdevice_version_fptr(); - }; - /// Return the LIBAVDEVICE_VERSION_INT constant. - public static uint avdevice_version() - { - return avdevice_version_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVABufferSinkParams* av_abuffersink_params_alloc_delegate(); - private static av_abuffersink_params_alloc_delegate av_abuffersink_params_alloc_fptr = () => - { - av_abuffersink_params_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_abuffersink_params_alloc"); - if (av_abuffersink_params_alloc_fptr == null) - { - av_abuffersink_params_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_abuffersink_params_alloc")); - }; - } - return av_abuffersink_params_alloc_fptr(); - }; - /// Create an AVABufferSinkParams structure. - [Obsolete("")] - public static AVABufferSinkParams* av_abuffersink_params_alloc() - { - return av_abuffersink_params_alloc_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_buffersink_get_ch_layout_delegate(AVFilterContext* @ctx, AVChannelLayout* @ch_layout); - private static av_buffersink_get_ch_layout_delegate av_buffersink_get_ch_layout_fptr = (AVFilterContext* @ctx, AVChannelLayout* @ch_layout) => - { - av_buffersink_get_ch_layout_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersink_get_ch_layout"); - if (av_buffersink_get_ch_layout_fptr == null) - { - av_buffersink_get_ch_layout_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersink_get_ch_layout")); - }; - } - return av_buffersink_get_ch_layout_fptr(@ctx, @ch_layout); - }; - public static int av_buffersink_get_ch_layout(AVFilterContext* @ctx, AVChannelLayout* @ch_layout) - { - return av_buffersink_get_ch_layout_fptr(@ctx, @ch_layout); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate ulong av_buffersink_get_channel_layout_delegate(AVFilterContext* @ctx); - private static av_buffersink_get_channel_layout_delegate av_buffersink_get_channel_layout_fptr = (AVFilterContext* @ctx) => - { - av_buffersink_get_channel_layout_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersink_get_channel_layout"); - if (av_buffersink_get_channel_layout_fptr == null) - { - av_buffersink_get_channel_layout_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersink_get_channel_layout")); - }; - } - return av_buffersink_get_channel_layout_fptr(@ctx); - }; - [Obsolete("")] - public static ulong av_buffersink_get_channel_layout(AVFilterContext* @ctx) - { - return av_buffersink_get_channel_layout_fptr(@ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_buffersink_get_channels_delegate(AVFilterContext* @ctx); - private static av_buffersink_get_channels_delegate av_buffersink_get_channels_fptr = (AVFilterContext* @ctx) => - { - av_buffersink_get_channels_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersink_get_channels"); - if (av_buffersink_get_channels_fptr == null) - { - av_buffersink_get_channels_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersink_get_channels")); - }; - } - return av_buffersink_get_channels_fptr(@ctx); - }; - public static int av_buffersink_get_channels(AVFilterContext* @ctx) - { - return av_buffersink_get_channels_fptr(@ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_buffersink_get_format_delegate(AVFilterContext* @ctx); - private static av_buffersink_get_format_delegate av_buffersink_get_format_fptr = (AVFilterContext* @ctx) => - { - av_buffersink_get_format_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersink_get_format"); - if (av_buffersink_get_format_fptr == null) - { - av_buffersink_get_format_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersink_get_format")); - }; - } - return av_buffersink_get_format_fptr(@ctx); - }; - public static int av_buffersink_get_format(AVFilterContext* @ctx) - { - return av_buffersink_get_format_fptr(@ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_buffersink_get_frame_delegate(AVFilterContext* @ctx, AVFrame* @frame); - private static av_buffersink_get_frame_delegate av_buffersink_get_frame_fptr = (AVFilterContext* @ctx, AVFrame* @frame) => - { - av_buffersink_get_frame_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersink_get_frame"); - if (av_buffersink_get_frame_fptr == null) - { - av_buffersink_get_frame_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersink_get_frame")); - }; - } - return av_buffersink_get_frame_fptr(@ctx, @frame); - }; - /// Get a frame with filtered data from sink and put it in frame. - /// pointer to a context of a buffersink or abuffersink AVFilter. - /// pointer to an allocated frame that will be filled with data. The data must be freed using av_frame_unref() / av_frame_free() - /// - >= 0 if a frame was successfully returned. - AVERROR(EAGAIN) if no frames are available at this point; more input frames must be added to the filtergraph to get more output. - AVERROR_EOF if there will be no more output frames on this sink. - A different negative AVERROR code in other failure cases. - public static int av_buffersink_get_frame(AVFilterContext* @ctx, AVFrame* @frame) - { - return av_buffersink_get_frame_fptr(@ctx, @frame); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_buffersink_get_frame_flags_delegate(AVFilterContext* @ctx, AVFrame* @frame, int @flags); - private static av_buffersink_get_frame_flags_delegate av_buffersink_get_frame_flags_fptr = (AVFilterContext* @ctx, AVFrame* @frame, int @flags) => - { - av_buffersink_get_frame_flags_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersink_get_frame_flags"); - if (av_buffersink_get_frame_flags_fptr == null) - { - av_buffersink_get_frame_flags_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersink_get_frame_flags")); - }; - } - return av_buffersink_get_frame_flags_fptr(@ctx, @frame, @flags); - }; - /// Get a frame with filtered data from sink and put it in frame. - /// pointer to a buffersink or abuffersink filter context. - /// pointer to an allocated frame that will be filled with data. The data must be freed using av_frame_unref() / av_frame_free() - /// a combination of AV_BUFFERSINK_FLAG_* flags - /// >= 0 in for success, a negative AVERROR code for failure. - public static int av_buffersink_get_frame_flags(AVFilterContext* @ctx, AVFrame* @frame, int @flags) - { - return av_buffersink_get_frame_flags_fptr(@ctx, @frame, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVRational av_buffersink_get_frame_rate_delegate(AVFilterContext* @ctx); - private static av_buffersink_get_frame_rate_delegate av_buffersink_get_frame_rate_fptr = (AVFilterContext* @ctx) => - { - av_buffersink_get_frame_rate_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersink_get_frame_rate"); - if (av_buffersink_get_frame_rate_fptr == null) - { - av_buffersink_get_frame_rate_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersink_get_frame_rate")); - }; - } - return av_buffersink_get_frame_rate_fptr(@ctx); - }; - public static AVRational av_buffersink_get_frame_rate(AVFilterContext* @ctx) - { - return av_buffersink_get_frame_rate_fptr(@ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_buffersink_get_h_delegate(AVFilterContext* @ctx); - private static av_buffersink_get_h_delegate av_buffersink_get_h_fptr = (AVFilterContext* @ctx) => - { - av_buffersink_get_h_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersink_get_h"); - if (av_buffersink_get_h_fptr == null) - { - av_buffersink_get_h_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersink_get_h")); - }; - } - return av_buffersink_get_h_fptr(@ctx); - }; - public static int av_buffersink_get_h(AVFilterContext* @ctx) - { - return av_buffersink_get_h_fptr(@ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVBufferRef* av_buffersink_get_hw_frames_ctx_delegate(AVFilterContext* @ctx); - private static av_buffersink_get_hw_frames_ctx_delegate av_buffersink_get_hw_frames_ctx_fptr = (AVFilterContext* @ctx) => - { - av_buffersink_get_hw_frames_ctx_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersink_get_hw_frames_ctx"); - if (av_buffersink_get_hw_frames_ctx_fptr == null) - { - av_buffersink_get_hw_frames_ctx_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersink_get_hw_frames_ctx")); - }; - } - return av_buffersink_get_hw_frames_ctx_fptr(@ctx); - }; - public static AVBufferRef* av_buffersink_get_hw_frames_ctx(AVFilterContext* @ctx) - { - return av_buffersink_get_hw_frames_ctx_fptr(@ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVRational av_buffersink_get_sample_aspect_ratio_delegate(AVFilterContext* @ctx); - private static av_buffersink_get_sample_aspect_ratio_delegate av_buffersink_get_sample_aspect_ratio_fptr = (AVFilterContext* @ctx) => - { - av_buffersink_get_sample_aspect_ratio_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersink_get_sample_aspect_ratio"); - if (av_buffersink_get_sample_aspect_ratio_fptr == null) - { - av_buffersink_get_sample_aspect_ratio_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersink_get_sample_aspect_ratio")); - }; - } - return av_buffersink_get_sample_aspect_ratio_fptr(@ctx); - }; - public static AVRational av_buffersink_get_sample_aspect_ratio(AVFilterContext* @ctx) - { - return av_buffersink_get_sample_aspect_ratio_fptr(@ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_buffersink_get_sample_rate_delegate(AVFilterContext* @ctx); - private static av_buffersink_get_sample_rate_delegate av_buffersink_get_sample_rate_fptr = (AVFilterContext* @ctx) => - { - av_buffersink_get_sample_rate_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersink_get_sample_rate"); - if (av_buffersink_get_sample_rate_fptr == null) - { - av_buffersink_get_sample_rate_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersink_get_sample_rate")); - }; - } - return av_buffersink_get_sample_rate_fptr(@ctx); - }; - public static int av_buffersink_get_sample_rate(AVFilterContext* @ctx) - { - return av_buffersink_get_sample_rate_fptr(@ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_buffersink_get_samples_delegate(AVFilterContext* @ctx, AVFrame* @frame, int @nb_samples); - private static av_buffersink_get_samples_delegate av_buffersink_get_samples_fptr = (AVFilterContext* @ctx, AVFrame* @frame, int @nb_samples) => - { - av_buffersink_get_samples_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersink_get_samples"); - if (av_buffersink_get_samples_fptr == null) - { - av_buffersink_get_samples_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersink_get_samples")); - }; - } - return av_buffersink_get_samples_fptr(@ctx, @frame, @nb_samples); - }; - /// Same as av_buffersink_get_frame(), but with the ability to specify the number of samples read. This function is less efficient than av_buffersink_get_frame(), because it copies the data around. - /// pointer to a context of the abuffersink AVFilter. - /// pointer to an allocated frame that will be filled with data. The data must be freed using av_frame_unref() / av_frame_free() frame will contain exactly nb_samples audio samples, except at the end of stream, when it can contain less than nb_samples. - /// The return codes have the same meaning as for av_buffersink_get_frame(). - public static int av_buffersink_get_samples(AVFilterContext* @ctx, AVFrame* @frame, int @nb_samples) - { - return av_buffersink_get_samples_fptr(@ctx, @frame, @nb_samples); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVRational av_buffersink_get_time_base_delegate(AVFilterContext* @ctx); - private static av_buffersink_get_time_base_delegate av_buffersink_get_time_base_fptr = (AVFilterContext* @ctx) => - { - av_buffersink_get_time_base_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersink_get_time_base"); - if (av_buffersink_get_time_base_fptr == null) - { - av_buffersink_get_time_base_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersink_get_time_base")); - }; - } - return av_buffersink_get_time_base_fptr(@ctx); - }; - public static AVRational av_buffersink_get_time_base(AVFilterContext* @ctx) - { - return av_buffersink_get_time_base_fptr(@ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVMediaType av_buffersink_get_type_delegate(AVFilterContext* @ctx); - private static av_buffersink_get_type_delegate av_buffersink_get_type_fptr = (AVFilterContext* @ctx) => - { - av_buffersink_get_type_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersink_get_type"); - if (av_buffersink_get_type_fptr == null) - { - av_buffersink_get_type_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersink_get_type")); - }; - } - return av_buffersink_get_type_fptr(@ctx); - }; - /// Get the properties of the stream @{ - public static AVMediaType av_buffersink_get_type(AVFilterContext* @ctx) - { - return av_buffersink_get_type_fptr(@ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_buffersink_get_w_delegate(AVFilterContext* @ctx); - private static av_buffersink_get_w_delegate av_buffersink_get_w_fptr = (AVFilterContext* @ctx) => - { - av_buffersink_get_w_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersink_get_w"); - if (av_buffersink_get_w_fptr == null) - { - av_buffersink_get_w_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersink_get_w")); - }; - } - return av_buffersink_get_w_fptr(@ctx); - }; - public static int av_buffersink_get_w(AVFilterContext* @ctx) - { - return av_buffersink_get_w_fptr(@ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVBufferSinkParams* av_buffersink_params_alloc_delegate(); - private static av_buffersink_params_alloc_delegate av_buffersink_params_alloc_fptr = () => - { - av_buffersink_params_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersink_params_alloc"); - if (av_buffersink_params_alloc_fptr == null) - { - av_buffersink_params_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersink_params_alloc")); - }; - } - return av_buffersink_params_alloc_fptr(); - }; - /// Create an AVBufferSinkParams structure. - [Obsolete("")] - public static AVBufferSinkParams* av_buffersink_params_alloc() - { - return av_buffersink_params_alloc_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_buffersink_set_frame_size_delegate(AVFilterContext* @ctx, uint @frame_size); - private static av_buffersink_set_frame_size_delegate av_buffersink_set_frame_size_fptr = (AVFilterContext* @ctx, uint @frame_size) => - { - av_buffersink_set_frame_size_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersink_set_frame_size"); - if (av_buffersink_set_frame_size_fptr == null) - { - av_buffersink_set_frame_size_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersink_set_frame_size")); - }; - } - av_buffersink_set_frame_size_fptr(@ctx, @frame_size); - }; - /// Set the frame size for an audio buffer sink. - public static void av_buffersink_set_frame_size(AVFilterContext* @ctx, uint @frame_size) - { - av_buffersink_set_frame_size_fptr(@ctx, @frame_size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_buffersrc_add_frame_delegate(AVFilterContext* @ctx, AVFrame* @frame); - private static av_buffersrc_add_frame_delegate av_buffersrc_add_frame_fptr = (AVFilterContext* @ctx, AVFrame* @frame) => - { - av_buffersrc_add_frame_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersrc_add_frame"); - if (av_buffersrc_add_frame_fptr == null) - { - av_buffersrc_add_frame_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersrc_add_frame")); - }; - } - return av_buffersrc_add_frame_fptr(@ctx, @frame); - }; - /// Add a frame to the buffer source. - /// an instance of the buffersrc filter - /// frame to be added. If the frame is reference counted, this function will take ownership of the reference(s) and reset the frame. Otherwise the frame data will be copied. If this function returns an error, the input frame is not touched. - /// 0 on success, a negative AVERROR on error. - public static int av_buffersrc_add_frame(AVFilterContext* @ctx, AVFrame* @frame) - { - return av_buffersrc_add_frame_fptr(@ctx, @frame); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_buffersrc_add_frame_flags_delegate(AVFilterContext* @buffer_src, AVFrame* @frame, int @flags); - private static av_buffersrc_add_frame_flags_delegate av_buffersrc_add_frame_flags_fptr = (AVFilterContext* @buffer_src, AVFrame* @frame, int @flags) => - { - av_buffersrc_add_frame_flags_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersrc_add_frame_flags"); - if (av_buffersrc_add_frame_flags_fptr == null) - { - av_buffersrc_add_frame_flags_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersrc_add_frame_flags")); - }; - } - return av_buffersrc_add_frame_flags_fptr(@buffer_src, @frame, @flags); - }; - /// Add a frame to the buffer source. - /// pointer to a buffer source context - /// a frame, or NULL to mark EOF - /// a combination of AV_BUFFERSRC_FLAG_* - /// >= 0 in case of success, a negative AVERROR code in case of failure - public static int av_buffersrc_add_frame_flags(AVFilterContext* @buffer_src, AVFrame* @frame, int @flags) - { - return av_buffersrc_add_frame_flags_fptr(@buffer_src, @frame, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_buffersrc_close_delegate(AVFilterContext* @ctx, long @pts, uint @flags); - private static av_buffersrc_close_delegate av_buffersrc_close_fptr = (AVFilterContext* @ctx, long @pts, uint @flags) => - { - av_buffersrc_close_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersrc_close"); - if (av_buffersrc_close_fptr == null) - { - av_buffersrc_close_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersrc_close")); - }; - } - return av_buffersrc_close_fptr(@ctx, @pts, @flags); - }; - /// Close the buffer source after EOF. - public static int av_buffersrc_close(AVFilterContext* @ctx, long @pts, uint @flags) - { - return av_buffersrc_close_fptr(@ctx, @pts, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint av_buffersrc_get_nb_failed_requests_delegate(AVFilterContext* @buffer_src); - private static av_buffersrc_get_nb_failed_requests_delegate av_buffersrc_get_nb_failed_requests_fptr = (AVFilterContext* @buffer_src) => - { - av_buffersrc_get_nb_failed_requests_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersrc_get_nb_failed_requests"); - if (av_buffersrc_get_nb_failed_requests_fptr == null) - { - av_buffersrc_get_nb_failed_requests_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersrc_get_nb_failed_requests")); - }; - } - return av_buffersrc_get_nb_failed_requests_fptr(@buffer_src); - }; - /// Get the number of failed requests. - public static uint av_buffersrc_get_nb_failed_requests(AVFilterContext* @buffer_src) - { - return av_buffersrc_get_nb_failed_requests_fptr(@buffer_src); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVBufferSrcParameters* av_buffersrc_parameters_alloc_delegate(); - private static av_buffersrc_parameters_alloc_delegate av_buffersrc_parameters_alloc_fptr = () => - { - av_buffersrc_parameters_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersrc_parameters_alloc"); - if (av_buffersrc_parameters_alloc_fptr == null) - { - av_buffersrc_parameters_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersrc_parameters_alloc")); - }; - } - return av_buffersrc_parameters_alloc_fptr(); - }; - /// Allocate a new AVBufferSrcParameters instance. It should be freed by the caller with av_free(). - public static AVBufferSrcParameters* av_buffersrc_parameters_alloc() - { - return av_buffersrc_parameters_alloc_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_buffersrc_parameters_set_delegate(AVFilterContext* @ctx, AVBufferSrcParameters* @param); - private static av_buffersrc_parameters_set_delegate av_buffersrc_parameters_set_fptr = (AVFilterContext* @ctx, AVBufferSrcParameters* @param) => - { - av_buffersrc_parameters_set_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersrc_parameters_set"); - if (av_buffersrc_parameters_set_fptr == null) - { - av_buffersrc_parameters_set_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersrc_parameters_set")); - }; - } - return av_buffersrc_parameters_set_fptr(@ctx, @param); - }; - /// Initialize the buffersrc or abuffersrc filter with the provided parameters. This function may be called multiple times, the later calls override the previous ones. Some of the parameters may also be set through AVOptions, then whatever method is used last takes precedence. - /// an instance of the buffersrc or abuffersrc filter - /// the stream parameters. The frames later passed to this filter must conform to those parameters. All the allocated fields in param remain owned by the caller, libavfilter will make internal copies or references when necessary. - /// 0 on success, a negative AVERROR code on failure. - public static int av_buffersrc_parameters_set(AVFilterContext* @ctx, AVBufferSrcParameters* @param) - { - return av_buffersrc_parameters_set_fptr(@ctx, @param); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_buffersrc_write_frame_delegate(AVFilterContext* @ctx, AVFrame* @frame); - private static av_buffersrc_write_frame_delegate av_buffersrc_write_frame_fptr = (AVFilterContext* @ctx, AVFrame* @frame) => - { - av_buffersrc_write_frame_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_buffersrc_write_frame"); - if (av_buffersrc_write_frame_fptr == null) - { - av_buffersrc_write_frame_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffersrc_write_frame")); - }; - } - return av_buffersrc_write_frame_fptr(@ctx, @frame); - }; - /// Add a frame to the buffer source. - /// an instance of the buffersrc filter - /// frame to be added. If the frame is reference counted, this function will make a new reference to it. Otherwise the frame data will be copied. - /// 0 on success, a negative AVERROR on error - public static int av_buffersrc_write_frame(AVFilterContext* @ctx, AVFrame* @frame) - { - return av_buffersrc_write_frame_fptr(@ctx, @frame); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVFilter* av_filter_iterate_delegate(void** @opaque); - private static av_filter_iterate_delegate av_filter_iterate_fptr = (void** @opaque) => - { - av_filter_iterate_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "av_filter_iterate"); - if (av_filter_iterate_fptr == null) - { - av_filter_iterate_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_filter_iterate")); - }; - } - return av_filter_iterate_fptr(@opaque); - }; - /// Iterate over all registered filters. - /// a pointer where libavfilter will store the iteration state. Must point to NULL to start the iteration. - /// the next registered filter or NULL when the iteration is finished - public static AVFilter* av_filter_iterate(void** @opaque) - { - return av_filter_iterate_fptr(@opaque); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avfilter_config_links_delegate(AVFilterContext* @filter); - private static avfilter_config_links_delegate avfilter_config_links_fptr = (AVFilterContext* @filter) => - { - avfilter_config_links_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_config_links"); - if (avfilter_config_links_fptr == null) - { - avfilter_config_links_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_config_links")); - }; - } - return avfilter_config_links_fptr(@filter); - }; - /// Negotiate the media format, dimensions, etc of all inputs to a filter. - /// the filter to negotiate the properties for its inputs - /// zero on successful negotiation - public static int avfilter_config_links(AVFilterContext* @filter) - { - return avfilter_config_links_fptr(@filter); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string avfilter_configuration_delegate(); - private static avfilter_configuration_delegate avfilter_configuration_fptr = () => - { - avfilter_configuration_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_configuration"); - if (avfilter_configuration_fptr == null) - { - avfilter_configuration_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_configuration")); - }; - } - return avfilter_configuration_fptr(); - }; - /// Return the libavfilter build-time configuration. - public static string avfilter_configuration() - { - return avfilter_configuration_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint avfilter_filter_pad_count_delegate(AVFilter* @filter, int @is_output); - private static avfilter_filter_pad_count_delegate avfilter_filter_pad_count_fptr = (AVFilter* @filter, int @is_output) => - { - avfilter_filter_pad_count_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_filter_pad_count"); - if (avfilter_filter_pad_count_fptr == null) - { - avfilter_filter_pad_count_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_filter_pad_count")); - }; - } - return avfilter_filter_pad_count_fptr(@filter, @is_output); - }; - /// Get the number of elements in an AVFilter's inputs or outputs array. - public static uint avfilter_filter_pad_count(AVFilter* @filter, int @is_output) - { - return avfilter_filter_pad_count_fptr(@filter, @is_output); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avfilter_free_delegate(AVFilterContext* @filter); - private static avfilter_free_delegate avfilter_free_fptr = (AVFilterContext* @filter) => - { - avfilter_free_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_free"); - if (avfilter_free_fptr == null) - { - avfilter_free_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_free")); - }; - } - avfilter_free_fptr(@filter); - }; - /// Free a filter context. This will also remove the filter from its filtergraph's list of filters. - /// the filter to free - public static void avfilter_free(AVFilterContext* @filter) - { - avfilter_free_fptr(@filter); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVFilter* avfilter_get_by_name_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name); - private static avfilter_get_by_name_delegate avfilter_get_by_name_fptr = (string @name) => - { - avfilter_get_by_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_get_by_name"); - if (avfilter_get_by_name_fptr == null) - { - avfilter_get_by_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_get_by_name")); - }; - } - return avfilter_get_by_name_fptr(@name); - }; - /// Get a filter definition matching the given name. - /// the filter name to find - /// the filter definition, if any matching one is registered. NULL if none found. - public static AVFilter* avfilter_get_by_name( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name) - { - return avfilter_get_by_name_fptr(@name); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVClass* avfilter_get_class_delegate(); - private static avfilter_get_class_delegate avfilter_get_class_fptr = () => - { - avfilter_get_class_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_get_class"); - if (avfilter_get_class_fptr == null) - { - avfilter_get_class_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_get_class")); - }; - } - return avfilter_get_class_fptr(); - }; - /// Returns AVClass for AVFilterContext. - /// AVClass for AVFilterContext. - public static AVClass* avfilter_get_class() - { - return avfilter_get_class_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVFilterGraph* avfilter_graph_alloc_delegate(); - private static avfilter_graph_alloc_delegate avfilter_graph_alloc_fptr = () => - { - avfilter_graph_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_graph_alloc"); - if (avfilter_graph_alloc_fptr == null) - { - avfilter_graph_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_graph_alloc")); - }; - } - return avfilter_graph_alloc_fptr(); - }; - /// Allocate a filter graph. - /// the allocated filter graph on success or NULL. - public static AVFilterGraph* avfilter_graph_alloc() - { - return avfilter_graph_alloc_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVFilterContext* avfilter_graph_alloc_filter_delegate(AVFilterGraph* @graph, AVFilter* @filter, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name); - private static avfilter_graph_alloc_filter_delegate avfilter_graph_alloc_filter_fptr = (AVFilterGraph* @graph, AVFilter* @filter, string @name) => - { - avfilter_graph_alloc_filter_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_graph_alloc_filter"); - if (avfilter_graph_alloc_filter_fptr == null) - { - avfilter_graph_alloc_filter_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_graph_alloc_filter")); - }; - } - return avfilter_graph_alloc_filter_fptr(@graph, @filter, @name); - }; - /// Create a new filter instance in a filter graph. - /// graph in which the new filter will be used - /// the filter to create an instance of - /// Name to give to the new instance (will be copied to AVFilterContext.name). This may be used by the caller to identify different filters, libavfilter itself assigns no semantics to this parameter. May be NULL. - /// the context of the newly created filter instance (note that it is also retrievable directly through AVFilterGraph.filters or with avfilter_graph_get_filter()) on success or NULL on failure. - public static AVFilterContext* avfilter_graph_alloc_filter(AVFilterGraph* @graph, AVFilter* @filter, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name) - { - return avfilter_graph_alloc_filter_fptr(@graph, @filter, @name); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avfilter_graph_config_delegate(AVFilterGraph* @graphctx, void* @log_ctx); - private static avfilter_graph_config_delegate avfilter_graph_config_fptr = (AVFilterGraph* @graphctx, void* @log_ctx) => - { - avfilter_graph_config_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_graph_config"); - if (avfilter_graph_config_fptr == null) - { - avfilter_graph_config_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_graph_config")); - }; - } - return avfilter_graph_config_fptr(@graphctx, @log_ctx); - }; - /// Check validity and configure all the links and formats in the graph. - /// the filter graph - /// context used for logging - /// >= 0 in case of success, a negative AVERROR code otherwise - public static int avfilter_graph_config(AVFilterGraph* @graphctx, void* @log_ctx) - { - return avfilter_graph_config_fptr(@graphctx, @log_ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avfilter_graph_create_filter_delegate(AVFilterContext** @filt_ctx, AVFilter* @filt, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @args, void* @opaque, AVFilterGraph* @graph_ctx); - private static avfilter_graph_create_filter_delegate avfilter_graph_create_filter_fptr = (AVFilterContext** @filt_ctx, AVFilter* @filt, string @name, string @args, void* @opaque, AVFilterGraph* @graph_ctx) => - { - avfilter_graph_create_filter_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_graph_create_filter"); - if (avfilter_graph_create_filter_fptr == null) - { - avfilter_graph_create_filter_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_graph_create_filter")); - }; - } - return avfilter_graph_create_filter_fptr(@filt_ctx, @filt, @name, @args, @opaque, @graph_ctx); - }; - /// Create and add a filter instance into an existing graph. The filter instance is created from the filter filt and inited with the parameter args. opaque is currently ignored. - /// the instance name to give to the created filter instance - /// the filter graph - /// a negative AVERROR error code in case of failure, a non negative value otherwise - public static int avfilter_graph_create_filter(AVFilterContext** @filt_ctx, AVFilter* @filt, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @args, void* @opaque, AVFilterGraph* @graph_ctx) - { - return avfilter_graph_create_filter_fptr(@filt_ctx, @filt, @name, @args, @opaque, @graph_ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate byte* avfilter_graph_dump_delegate(AVFilterGraph* @graph, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @options); - private static avfilter_graph_dump_delegate avfilter_graph_dump_fptr = (AVFilterGraph* @graph, string @options) => - { - avfilter_graph_dump_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_graph_dump"); - if (avfilter_graph_dump_fptr == null) - { - avfilter_graph_dump_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_graph_dump")); - }; - } - return avfilter_graph_dump_fptr(@graph, @options); - }; - /// Dump a graph into a human-readable string representation. - /// the graph to dump - /// formatting options; currently ignored - /// a string, or NULL in case of memory allocation failure; the string must be freed using av_free - public static byte* avfilter_graph_dump(AVFilterGraph* @graph, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @options) - { - return avfilter_graph_dump_fptr(@graph, @options); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avfilter_graph_free_delegate(AVFilterGraph** @graph); - private static avfilter_graph_free_delegate avfilter_graph_free_fptr = (AVFilterGraph** @graph) => - { - avfilter_graph_free_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_graph_free"); - if (avfilter_graph_free_fptr == null) - { - avfilter_graph_free_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_graph_free")); - }; - } - avfilter_graph_free_fptr(@graph); - }; - /// Free a graph, destroy its links, and set *graph to NULL. If *graph is NULL, do nothing. - public static void avfilter_graph_free(AVFilterGraph** @graph) - { - avfilter_graph_free_fptr(@graph); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVFilterContext* avfilter_graph_get_filter_delegate(AVFilterGraph* @graph, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name); - private static avfilter_graph_get_filter_delegate avfilter_graph_get_filter_fptr = (AVFilterGraph* @graph, string @name) => - { - avfilter_graph_get_filter_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_graph_get_filter"); - if (avfilter_graph_get_filter_fptr == null) - { - avfilter_graph_get_filter_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_graph_get_filter")); - }; - } - return avfilter_graph_get_filter_fptr(@graph, @name); - }; - /// Get a filter instance identified by instance name from graph. - /// filter graph to search through. - /// filter instance name (should be unique in the graph). - /// the pointer to the found filter instance or NULL if it cannot be found. - public static AVFilterContext* avfilter_graph_get_filter(AVFilterGraph* @graph, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name) - { - return avfilter_graph_get_filter_fptr(@graph, @name); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avfilter_graph_parse_delegate(AVFilterGraph* @graph, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @filters, AVFilterInOut* @inputs, AVFilterInOut* @outputs, void* @log_ctx); - private static avfilter_graph_parse_delegate avfilter_graph_parse_fptr = (AVFilterGraph* @graph, string @filters, AVFilterInOut* @inputs, AVFilterInOut* @outputs, void* @log_ctx) => - { - avfilter_graph_parse_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_graph_parse"); - if (avfilter_graph_parse_fptr == null) - { - avfilter_graph_parse_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_graph_parse")); - }; - } - return avfilter_graph_parse_fptr(@graph, @filters, @inputs, @outputs, @log_ctx); - }; - /// Add a graph described by a string to a graph. - /// the filter graph where to link the parsed graph context - /// string to be parsed - /// linked list to the inputs of the graph - /// linked list to the outputs of the graph - /// zero on success, a negative AVERROR code on error - public static int avfilter_graph_parse(AVFilterGraph* @graph, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @filters, AVFilterInOut* @inputs, AVFilterInOut* @outputs, void* @log_ctx) - { - return avfilter_graph_parse_fptr(@graph, @filters, @inputs, @outputs, @log_ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avfilter_graph_parse_ptr_delegate(AVFilterGraph* @graph, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs, void* @log_ctx); - private static avfilter_graph_parse_ptr_delegate avfilter_graph_parse_ptr_fptr = (AVFilterGraph* @graph, string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs, void* @log_ctx) => - { - avfilter_graph_parse_ptr_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_graph_parse_ptr"); - if (avfilter_graph_parse_ptr_fptr == null) - { - avfilter_graph_parse_ptr_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_graph_parse_ptr")); - }; - } - return avfilter_graph_parse_ptr_fptr(@graph, @filters, @inputs, @outputs, @log_ctx); - }; - /// Add a graph described by a string to a graph. - /// the filter graph where to link the parsed graph context - /// string to be parsed - /// pointer to a linked list to the inputs of the graph, may be NULL. If non-NULL, *inputs is updated to contain the list of open inputs after the parsing, should be freed with avfilter_inout_free(). - /// pointer to a linked list to the outputs of the graph, may be NULL. If non-NULL, *outputs is updated to contain the list of open outputs after the parsing, should be freed with avfilter_inout_free(). - /// non negative on success, a negative AVERROR code on error - public static int avfilter_graph_parse_ptr(AVFilterGraph* @graph, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs, void* @log_ctx) - { - return avfilter_graph_parse_ptr_fptr(@graph, @filters, @inputs, @outputs, @log_ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avfilter_graph_parse2_delegate(AVFilterGraph* @graph, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs); - private static avfilter_graph_parse2_delegate avfilter_graph_parse2_fptr = (AVFilterGraph* @graph, string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs) => - { - avfilter_graph_parse2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_graph_parse2"); - if (avfilter_graph_parse2_fptr == null) - { - avfilter_graph_parse2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_graph_parse2")); - }; - } - return avfilter_graph_parse2_fptr(@graph, @filters, @inputs, @outputs); - }; - /// Add a graph described by a string to a graph. - /// the filter graph where to link the parsed graph context - /// string to be parsed - /// a linked list of all free (unlinked) inputs of the parsed graph will be returned here. It is to be freed by the caller using avfilter_inout_free(). - /// a linked list of all free (unlinked) outputs of the parsed graph will be returned here. It is to be freed by the caller using avfilter_inout_free(). - /// zero on success, a negative AVERROR code on error - public static int avfilter_graph_parse2(AVFilterGraph* @graph, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs) - { - return avfilter_graph_parse2_fptr(@graph, @filters, @inputs, @outputs); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avfilter_graph_queue_command_delegate(AVFilterGraph* @graph, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @target, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @cmd, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @arg, int @flags, double @ts); - private static avfilter_graph_queue_command_delegate avfilter_graph_queue_command_fptr = (AVFilterGraph* @graph, string @target, string @cmd, string @arg, int @flags, double @ts) => - { - avfilter_graph_queue_command_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_graph_queue_command"); - if (avfilter_graph_queue_command_fptr == null) - { - avfilter_graph_queue_command_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_graph_queue_command")); - }; - } - return avfilter_graph_queue_command_fptr(@graph, @target, @cmd, @arg, @flags, @ts); - }; - /// Queue a command for one or more filter instances. - /// the filter graph - /// the filter(s) to which the command should be sent "all" sends to all filters otherwise it can be a filter or filter instance name which will send the command to all matching filters. - /// the command to sent, for handling simplicity all commands must be alphanumeric only - /// the argument for the command - /// time at which the command should be sent to the filter - public static int avfilter_graph_queue_command(AVFilterGraph* @graph, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @target, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @cmd, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @arg, int @flags, double @ts) - { - return avfilter_graph_queue_command_fptr(@graph, @target, @cmd, @arg, @flags, @ts); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avfilter_graph_request_oldest_delegate(AVFilterGraph* @graph); - private static avfilter_graph_request_oldest_delegate avfilter_graph_request_oldest_fptr = (AVFilterGraph* @graph) => - { - avfilter_graph_request_oldest_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_graph_request_oldest"); - if (avfilter_graph_request_oldest_fptr == null) - { - avfilter_graph_request_oldest_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_graph_request_oldest")); - }; - } - return avfilter_graph_request_oldest_fptr(@graph); - }; - /// Request a frame on the oldest sink link. - /// the return value of ff_request_frame(), or AVERROR_EOF if all links returned AVERROR_EOF - public static int avfilter_graph_request_oldest(AVFilterGraph* @graph) - { - return avfilter_graph_request_oldest_fptr(@graph); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avfilter_graph_send_command_delegate(AVFilterGraph* @graph, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @target, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @cmd, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @arg, byte* @res, int @res_len, int @flags); - private static avfilter_graph_send_command_delegate avfilter_graph_send_command_fptr = (AVFilterGraph* @graph, string @target, string @cmd, string @arg, byte* @res, int @res_len, int @flags) => - { - avfilter_graph_send_command_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_graph_send_command"); - if (avfilter_graph_send_command_fptr == null) - { - avfilter_graph_send_command_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_graph_send_command")); - }; - } - return avfilter_graph_send_command_fptr(@graph, @target, @cmd, @arg, @res, @res_len, @flags); - }; - /// Send a command to one or more filter instances. - /// the filter graph - /// the filter(s) to which the command should be sent "all" sends to all filters otherwise it can be a filter or filter instance name which will send the command to all matching filters. - /// the command to send, for handling simplicity all commands must be alphanumeric only - /// the argument for the command - /// a buffer with size res_size where the filter(s) can return a response. - public static int avfilter_graph_send_command(AVFilterGraph* @graph, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @target, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @cmd, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @arg, byte* @res, int @res_len, int @flags) - { - return avfilter_graph_send_command_fptr(@graph, @target, @cmd, @arg, @res, @res_len, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avfilter_graph_set_auto_convert_delegate(AVFilterGraph* @graph, uint @flags); - private static avfilter_graph_set_auto_convert_delegate avfilter_graph_set_auto_convert_fptr = (AVFilterGraph* @graph, uint @flags) => - { - avfilter_graph_set_auto_convert_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_graph_set_auto_convert"); - if (avfilter_graph_set_auto_convert_fptr == null) - { - avfilter_graph_set_auto_convert_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_graph_set_auto_convert")); - }; - } - avfilter_graph_set_auto_convert_fptr(@graph, @flags); - }; - /// Enable or disable automatic format conversion inside the graph. - /// any of the AVFILTER_AUTO_CONVERT_* constants - public static void avfilter_graph_set_auto_convert(AVFilterGraph* @graph, uint @flags) - { - avfilter_graph_set_auto_convert_fptr(@graph, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avfilter_init_dict_delegate(AVFilterContext* @ctx, AVDictionary** @options); - private static avfilter_init_dict_delegate avfilter_init_dict_fptr = (AVFilterContext* @ctx, AVDictionary** @options) => - { - avfilter_init_dict_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_init_dict"); - if (avfilter_init_dict_fptr == null) - { - avfilter_init_dict_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_init_dict")); - }; - } - return avfilter_init_dict_fptr(@ctx, @options); - }; - /// Initialize a filter with the supplied dictionary of options. - /// uninitialized filter context to initialize - /// An AVDictionary filled with options for this filter. On return this parameter will be destroyed and replaced with a dict containing options that were not found. This dictionary must be freed by the caller. May be NULL, then this function is equivalent to avfilter_init_str() with the second parameter set to NULL. - /// 0 on success, a negative AVERROR on failure - public static int avfilter_init_dict(AVFilterContext* @ctx, AVDictionary** @options) - { - return avfilter_init_dict_fptr(@ctx, @options); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avfilter_init_str_delegate(AVFilterContext* @ctx, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @args); - private static avfilter_init_str_delegate avfilter_init_str_fptr = (AVFilterContext* @ctx, string @args) => - { - avfilter_init_str_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_init_str"); - if (avfilter_init_str_fptr == null) - { - avfilter_init_str_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_init_str")); - }; - } - return avfilter_init_str_fptr(@ctx, @args); - }; - /// Initialize a filter with the supplied parameters. - /// uninitialized filter context to initialize - /// Options to initialize the filter with. This must be a ':'-separated list of options in the 'key=value' form. May be NULL if the options have been set directly using the AVOptions API or there are no options that need to be set. - /// 0 on success, a negative AVERROR on failure - public static int avfilter_init_str(AVFilterContext* @ctx, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @args) - { - return avfilter_init_str_fptr(@ctx, @args); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVFilterInOut* avfilter_inout_alloc_delegate(); - private static avfilter_inout_alloc_delegate avfilter_inout_alloc_fptr = () => - { - avfilter_inout_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_inout_alloc"); - if (avfilter_inout_alloc_fptr == null) - { - avfilter_inout_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_inout_alloc")); - }; - } - return avfilter_inout_alloc_fptr(); - }; - /// Allocate a single AVFilterInOut entry. Must be freed with avfilter_inout_free(). - /// allocated AVFilterInOut on success, NULL on failure. - public static AVFilterInOut* avfilter_inout_alloc() - { - return avfilter_inout_alloc_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avfilter_inout_free_delegate(AVFilterInOut** @inout); - private static avfilter_inout_free_delegate avfilter_inout_free_fptr = (AVFilterInOut** @inout) => - { - avfilter_inout_free_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_inout_free"); - if (avfilter_inout_free_fptr == null) - { - avfilter_inout_free_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_inout_free")); - }; - } - avfilter_inout_free_fptr(@inout); - }; - /// Free the supplied list of AVFilterInOut and set *inout to NULL. If *inout is NULL, do nothing. - public static void avfilter_inout_free(AVFilterInOut** @inout) - { - avfilter_inout_free_fptr(@inout); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avfilter_insert_filter_delegate(AVFilterLink* @link, AVFilterContext* @filt, uint @filt_srcpad_idx, uint @filt_dstpad_idx); - private static avfilter_insert_filter_delegate avfilter_insert_filter_fptr = (AVFilterLink* @link, AVFilterContext* @filt, uint @filt_srcpad_idx, uint @filt_dstpad_idx) => - { - avfilter_insert_filter_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_insert_filter"); - if (avfilter_insert_filter_fptr == null) - { - avfilter_insert_filter_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_insert_filter")); - }; - } - return avfilter_insert_filter_fptr(@link, @filt, @filt_srcpad_idx, @filt_dstpad_idx); - }; - /// Insert a filter in the middle of an existing link. - /// the link into which the filter should be inserted - /// the filter to be inserted - /// the input pad on the filter to connect - /// the output pad on the filter to connect - /// zero on success - public static int avfilter_insert_filter(AVFilterLink* @link, AVFilterContext* @filt, uint @filt_srcpad_idx, uint @filt_dstpad_idx) - { - return avfilter_insert_filter_fptr(@link, @filt, @filt_srcpad_idx, @filt_dstpad_idx); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string avfilter_license_delegate(); - private static avfilter_license_delegate avfilter_license_fptr = () => - { - avfilter_license_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_license"); - if (avfilter_license_fptr == null) - { - avfilter_license_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_license")); - }; - } - return avfilter_license_fptr(); - }; - /// Return the libavfilter license. - public static string avfilter_license() - { - return avfilter_license_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avfilter_link_delegate(AVFilterContext* @src, uint @srcpad, AVFilterContext* @dst, uint @dstpad); - private static avfilter_link_delegate avfilter_link_fptr = (AVFilterContext* @src, uint @srcpad, AVFilterContext* @dst, uint @dstpad) => - { - avfilter_link_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_link"); - if (avfilter_link_fptr == null) - { - avfilter_link_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_link")); - }; - } - return avfilter_link_fptr(@src, @srcpad, @dst, @dstpad); - }; - /// Link two filters together. - /// the source filter - /// index of the output pad on the source filter - /// the destination filter - /// index of the input pad on the destination filter - /// zero on success - public static int avfilter_link(AVFilterContext* @src, uint @srcpad, AVFilterContext* @dst, uint @dstpad) - { - return avfilter_link_fptr(@src, @srcpad, @dst, @dstpad); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avfilter_link_free_delegate(AVFilterLink** @link); - private static avfilter_link_free_delegate avfilter_link_free_fptr = (AVFilterLink** @link) => - { - avfilter_link_free_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_link_free"); - if (avfilter_link_free_fptr == null) - { - avfilter_link_free_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_link_free")); - }; - } - avfilter_link_free_fptr(@link); - }; - /// Free the link in *link, and set its pointer to NULL. - public static void avfilter_link_free(AVFilterLink** @link) - { - avfilter_link_free_fptr(@link); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avfilter_pad_count_delegate(AVFilterPad* @pads); - private static avfilter_pad_count_delegate avfilter_pad_count_fptr = (AVFilterPad* @pads) => - { - avfilter_pad_count_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_pad_count"); - if (avfilter_pad_count_fptr == null) - { - avfilter_pad_count_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_pad_count")); - }; - } - return avfilter_pad_count_fptr(@pads); - }; - /// Get the number of elements in an AVFilter's inputs or outputs array. - [Obsolete("Use avfilter_filter_pad_count() instead.")] - public static int avfilter_pad_count(AVFilterPad* @pads) - { - return avfilter_pad_count_fptr(@pads); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string avfilter_pad_get_name_delegate(AVFilterPad* @pads, int @pad_idx); - private static avfilter_pad_get_name_delegate avfilter_pad_get_name_fptr = (AVFilterPad* @pads, int @pad_idx) => - { - avfilter_pad_get_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_pad_get_name"); - if (avfilter_pad_get_name_fptr == null) - { - avfilter_pad_get_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_pad_get_name")); - }; - } - return avfilter_pad_get_name_fptr(@pads, @pad_idx); - }; - /// Get the name of an AVFilterPad. - /// an array of AVFilterPads - /// index of the pad in the array; it is the caller's responsibility to ensure the index is valid - /// name of the pad_idx'th pad in pads - public static string avfilter_pad_get_name(AVFilterPad* @pads, int @pad_idx) - { - return avfilter_pad_get_name_fptr(@pads, @pad_idx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVMediaType avfilter_pad_get_type_delegate(AVFilterPad* @pads, int @pad_idx); - private static avfilter_pad_get_type_delegate avfilter_pad_get_type_fptr = (AVFilterPad* @pads, int @pad_idx) => - { - avfilter_pad_get_type_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_pad_get_type"); - if (avfilter_pad_get_type_fptr == null) - { - avfilter_pad_get_type_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_pad_get_type")); - }; - } - return avfilter_pad_get_type_fptr(@pads, @pad_idx); - }; - /// Get the type of an AVFilterPad. - /// an array of AVFilterPads - /// index of the pad in the array; it is the caller's responsibility to ensure the index is valid - /// type of the pad_idx'th pad in pads - public static AVMediaType avfilter_pad_get_type(AVFilterPad* @pads, int @pad_idx) - { - return avfilter_pad_get_type_fptr(@pads, @pad_idx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avfilter_process_command_delegate(AVFilterContext* @filter, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @cmd, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @arg, byte* @res, int @res_len, int @flags); - private static avfilter_process_command_delegate avfilter_process_command_fptr = (AVFilterContext* @filter, string @cmd, string @arg, byte* @res, int @res_len, int @flags) => - { - avfilter_process_command_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_process_command"); - if (avfilter_process_command_fptr == null) - { - avfilter_process_command_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_process_command")); - }; - } - return avfilter_process_command_fptr(@filter, @cmd, @arg, @res, @res_len, @flags); - }; - /// Make the filter instance process a command. It is recommended to use avfilter_graph_send_command(). - public static int avfilter_process_command(AVFilterContext* @filter, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @cmd, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @arg, byte* @res, int @res_len, int @flags) - { - return avfilter_process_command_fptr(@filter, @cmd, @arg, @res, @res_len, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint avfilter_version_delegate(); - private static avfilter_version_delegate avfilter_version_fptr = () => - { - avfilter_version_fptr = GetFunctionDelegate(GetOrLoadLibrary("avfilter"), "avfilter_version"); - if (avfilter_version_fptr == null) - { - avfilter_version_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avfilter_version")); - }; - } - return avfilter_version_fptr(); - }; - /// Return the LIBAVFILTER_VERSION_INT constant. - public static uint avfilter_version() - { - return avfilter_version_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_add_index_entry_delegate(AVStream* @st, long @pos, long @timestamp, int @size, int @distance, int @flags); - private static av_add_index_entry_delegate av_add_index_entry_fptr = (AVStream* @st, long @pos, long @timestamp, int @size, int @distance, int @flags) => - { - av_add_index_entry_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_add_index_entry"); - if (av_add_index_entry_fptr == null) - { - av_add_index_entry_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_add_index_entry")); - }; - } - return av_add_index_entry_fptr(@st, @pos, @timestamp, @size, @distance, @flags); - }; - /// Add an index entry into a sorted list. Update the entry if the list already contains it. - /// timestamp in the time base of the given stream - public static int av_add_index_entry(AVStream* @st, long @pos, long @timestamp, int @size, int @distance, int @flags) - { - return av_add_index_entry_fptr(@st, @pos, @timestamp, @size, @distance, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_append_packet_delegate(AVIOContext* @s, AVPacket* @pkt, int @size); - private static av_append_packet_delegate av_append_packet_fptr = (AVIOContext* @s, AVPacket* @pkt, int @size) => - { - av_append_packet_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_append_packet"); - if (av_append_packet_fptr == null) - { - av_append_packet_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_append_packet")); - }; - } - return av_append_packet_fptr(@s, @pkt, @size); - }; - /// Read data and append it to the current content of the AVPacket. If pkt->size is 0 this is identical to av_get_packet. Note that this uses av_grow_packet and thus involves a realloc which is inefficient. Thus this function should only be used when there is no reasonable way to know (an upper bound of) the final size. - /// associated IO context - /// packet - /// amount of data to read - /// >0 (read size) if OK, AVERROR_xxx otherwise, previous data will not be lost even if an error occurs. - public static int av_append_packet(AVIOContext* @s, AVPacket* @pkt, int @size) - { - return av_append_packet_fptr(@s, @pkt, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodecID av_codec_get_id_delegate(AVCodecTag** @tags, uint @tag); - private static av_codec_get_id_delegate av_codec_get_id_fptr = (AVCodecTag** @tags, uint @tag) => - { - av_codec_get_id_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_codec_get_id"); - if (av_codec_get_id_fptr == null) - { - av_codec_get_id_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_codec_get_id")); - }; - } - return av_codec_get_id_fptr(@tags, @tag); - }; - /// Get the AVCodecID for the given codec tag tag. If no codec id is found returns AV_CODEC_ID_NONE. - /// list of supported codec_id-codec_tag pairs, as stored in AVInputFormat.codec_tag and AVOutputFormat.codec_tag - /// codec tag to match to a codec ID - public static AVCodecID av_codec_get_id(AVCodecTag** @tags, uint @tag) - { - return av_codec_get_id_fptr(@tags, @tag); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint av_codec_get_tag_delegate(AVCodecTag** @tags, AVCodecID @id); - private static av_codec_get_tag_delegate av_codec_get_tag_fptr = (AVCodecTag** @tags, AVCodecID @id) => - { - av_codec_get_tag_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_codec_get_tag"); - if (av_codec_get_tag_fptr == null) - { - av_codec_get_tag_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_codec_get_tag")); - }; - } - return av_codec_get_tag_fptr(@tags, @id); - }; - /// Get the codec tag for the given codec id id. If no codec tag is found returns 0. - /// list of supported codec_id-codec_tag pairs, as stored in AVInputFormat.codec_tag and AVOutputFormat.codec_tag - /// codec ID to match to a codec tag - public static uint av_codec_get_tag(AVCodecTag** @tags, AVCodecID @id) - { - return av_codec_get_tag_fptr(@tags, @id); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_codec_get_tag2_delegate(AVCodecTag** @tags, AVCodecID @id, uint* @tag); - private static av_codec_get_tag2_delegate av_codec_get_tag2_fptr = (AVCodecTag** @tags, AVCodecID @id, uint* @tag) => - { - av_codec_get_tag2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_codec_get_tag2"); - if (av_codec_get_tag2_fptr == null) - { - av_codec_get_tag2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_codec_get_tag2")); - }; - } - return av_codec_get_tag2_fptr(@tags, @id, @tag); - }; - /// Get the codec tag for the given codec id. - /// list of supported codec_id - codec_tag pairs, as stored in AVInputFormat.codec_tag and AVOutputFormat.codec_tag - /// codec id that should be searched for in the list - /// A pointer to the found tag - /// 0 if id was not found in tags, > 0 if it was found - public static int av_codec_get_tag2(AVCodecTag** @tags, AVCodecID @id, uint* @tag) - { - return av_codec_get_tag2_fptr(@tags, @id, @tag); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVInputFormat* av_demuxer_iterate_delegate(void** @opaque); - private static av_demuxer_iterate_delegate av_demuxer_iterate_fptr = (void** @opaque) => - { - av_demuxer_iterate_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_demuxer_iterate"); - if (av_demuxer_iterate_fptr == null) - { - av_demuxer_iterate_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_demuxer_iterate")); - }; - } - return av_demuxer_iterate_fptr(@opaque); - }; - /// Iterate over all registered demuxers. - /// a pointer where libavformat will store the iteration state. Must point to NULL to start the iteration. - /// the next registered demuxer or NULL when the iteration is finished - public static AVInputFormat* av_demuxer_iterate(void** @opaque) - { - return av_demuxer_iterate_fptr(@opaque); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_disposition_from_string_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @disp); - private static av_disposition_from_string_delegate av_disposition_from_string_fptr = (string @disp) => - { - av_disposition_from_string_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_disposition_from_string"); - if (av_disposition_from_string_fptr == null) - { - av_disposition_from_string_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_disposition_from_string")); - }; - } - return av_disposition_from_string_fptr(@disp); - }; - /// Returns The AV_DISPOSITION_* flag corresponding to disp or a negative error code if disp does not correspond to a known stream disposition. - /// The AV_DISPOSITION_* flag corresponding to disp or a negative error code if disp does not correspond to a known stream disposition. - public static int av_disposition_from_string( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @disp) - { - return av_disposition_from_string_fptr(@disp); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string av_disposition_to_string_delegate(int @disposition); - private static av_disposition_to_string_delegate av_disposition_to_string_fptr = (int @disposition) => - { - av_disposition_to_string_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_disposition_to_string"); - if (av_disposition_to_string_fptr == null) - { - av_disposition_to_string_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_disposition_to_string")); - }; - } - return av_disposition_to_string_fptr(@disposition); - }; - /// Returns The string description corresponding to the lowest set bit in disposition. NULL when the lowest set bit does not correspond to a known disposition or when disposition is 0. - /// a combination of AV_DISPOSITION_* values - /// The string description corresponding to the lowest set bit in disposition. NULL when the lowest set bit does not correspond to a known disposition or when disposition is 0. - public static string av_disposition_to_string(int @disposition) - { - return av_disposition_to_string_fptr(@disposition); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_dump_format_delegate(AVFormatContext* @ic, int @index, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url, int @is_output); - private static av_dump_format_delegate av_dump_format_fptr = (AVFormatContext* @ic, int @index, string @url, int @is_output) => - { - av_dump_format_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_dump_format"); - if (av_dump_format_fptr == null) - { - av_dump_format_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_dump_format")); - }; - } - av_dump_format_fptr(@ic, @index, @url, @is_output); - }; - /// Print detailed information about the input or output format, such as duration, bitrate, streams, container, programs, metadata, side data, codec and time base. - /// the context to analyze - /// index of the stream to dump information about - /// the URL to print, such as source or destination file - /// Select whether the specified context is an input(0) or output(1) - public static void av_dump_format(AVFormatContext* @ic, int @index, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url, int @is_output) - { - av_dump_format_fptr(@ic, @index, @url, @is_output); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_filename_number_test_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @filename); - private static av_filename_number_test_delegate av_filename_number_test_fptr = (string @filename) => - { - av_filename_number_test_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_filename_number_test"); - if (av_filename_number_test_fptr == null) - { - av_filename_number_test_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_filename_number_test")); - }; - } - return av_filename_number_test_fptr(@filename); - }; - /// Check whether filename actually is a numbered sequence generator. - /// possible numbered sequence string - /// 1 if a valid numbered sequence string, 0 otherwise - public static int av_filename_number_test( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @filename) - { - return av_filename_number_test_fptr(@filename); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_find_best_stream_delegate(AVFormatContext* @ic, AVMediaType @type, int @wanted_stream_nb, int @related_stream, AVCodec** @decoder_ret, int @flags); - private static av_find_best_stream_delegate av_find_best_stream_fptr = (AVFormatContext* @ic, AVMediaType @type, int @wanted_stream_nb, int @related_stream, AVCodec** @decoder_ret, int @flags) => - { - av_find_best_stream_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_find_best_stream"); - if (av_find_best_stream_fptr == null) - { - av_find_best_stream_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_find_best_stream")); - }; - } - return av_find_best_stream_fptr(@ic, @type, @wanted_stream_nb, @related_stream, @decoder_ret, @flags); - }; - /// Find the "best" stream in the file. The best stream is determined according to various heuristics as the most likely to be what the user expects. If the decoder parameter is non-NULL, av_find_best_stream will find the default decoder for the stream's codec; streams for which no decoder can be found are ignored. - /// media file handle - /// stream type: video, audio, subtitles, etc. - /// user-requested stream number, or -1 for automatic selection - /// try to find a stream related (eg. in the same program) to this one, or -1 if none - /// if non-NULL, returns the decoder for the selected stream - /// flags; none are currently defined - /// the non-negative stream number in case of success, AVERROR_STREAM_NOT_FOUND if no stream with the requested type could be found, AVERROR_DECODER_NOT_FOUND if streams were found but no decoder - public static int av_find_best_stream(AVFormatContext* @ic, AVMediaType @type, int @wanted_stream_nb, int @related_stream, AVCodec** @decoder_ret, int @flags) - { - return av_find_best_stream_fptr(@ic, @type, @wanted_stream_nb, @related_stream, @decoder_ret, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_find_default_stream_index_delegate(AVFormatContext* @s); - private static av_find_default_stream_index_delegate av_find_default_stream_index_fptr = (AVFormatContext* @s) => - { - av_find_default_stream_index_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_find_default_stream_index"); - if (av_find_default_stream_index_fptr == null) - { - av_find_default_stream_index_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_find_default_stream_index")); - }; - } - return av_find_default_stream_index_fptr(@s); - }; - public static int av_find_default_stream_index(AVFormatContext* @s) - { - return av_find_default_stream_index_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVInputFormat* av_find_input_format_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @short_name); - private static av_find_input_format_delegate av_find_input_format_fptr = (string @short_name) => - { - av_find_input_format_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_find_input_format"); - if (av_find_input_format_fptr == null) - { - av_find_input_format_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_find_input_format")); - }; - } - return av_find_input_format_fptr(@short_name); - }; - /// Find AVInputFormat based on the short name of the input format. - public static AVInputFormat* av_find_input_format( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @short_name) - { - return av_find_input_format_fptr(@short_name); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVProgram* av_find_program_from_stream_delegate(AVFormatContext* @ic, AVProgram* @last, int @s); - private static av_find_program_from_stream_delegate av_find_program_from_stream_fptr = (AVFormatContext* @ic, AVProgram* @last, int @s) => - { - av_find_program_from_stream_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_find_program_from_stream"); - if (av_find_program_from_stream_fptr == null) - { - av_find_program_from_stream_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_find_program_from_stream")); - }; - } - return av_find_program_from_stream_fptr(@ic, @last, @s); - }; - /// Find the programs which belong to a given stream. - /// media file handle - /// the last found program, the search will start after this program, or from the beginning if it is NULL - /// stream index - /// the next program which belongs to s, NULL if no program is found or the last program is not among the programs of ic. - public static AVProgram* av_find_program_from_stream(AVFormatContext* @ic, AVProgram* @last, int @s) - { - return av_find_program_from_stream_fptr(@ic, @last, @s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVDurationEstimationMethod av_fmt_ctx_get_duration_estimation_method_delegate(AVFormatContext* @ctx); - private static av_fmt_ctx_get_duration_estimation_method_delegate av_fmt_ctx_get_duration_estimation_method_fptr = (AVFormatContext* @ctx) => - { - av_fmt_ctx_get_duration_estimation_method_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_fmt_ctx_get_duration_estimation_method"); - if (av_fmt_ctx_get_duration_estimation_method_fptr == null) - { - av_fmt_ctx_get_duration_estimation_method_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_fmt_ctx_get_duration_estimation_method")); - }; - } - return av_fmt_ctx_get_duration_estimation_method_fptr(@ctx); - }; - /// Returns the method used to set ctx->duration. - /// AVFMT_DURATION_FROM_PTS, AVFMT_DURATION_FROM_STREAM, or AVFMT_DURATION_FROM_BITRATE. - public static AVDurationEstimationMethod av_fmt_ctx_get_duration_estimation_method(AVFormatContext* @ctx) - { - return av_fmt_ctx_get_duration_estimation_method_fptr(@ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_format_inject_global_side_data_delegate(AVFormatContext* @s); - private static av_format_inject_global_side_data_delegate av_format_inject_global_side_data_fptr = (AVFormatContext* @s) => - { - av_format_inject_global_side_data_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_format_inject_global_side_data"); - if (av_format_inject_global_side_data_fptr == null) - { - av_format_inject_global_side_data_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_format_inject_global_side_data")); - }; - } - av_format_inject_global_side_data_fptr(@s); - }; - /// This function will cause global side data to be injected in the next packet of each stream as well as after any subsequent seek. - public static void av_format_inject_global_side_data(AVFormatContext* @s) - { - av_format_inject_global_side_data_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_get_frame_filename_delegate(byte* @buf, int @buf_size, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @path, int @number); - private static av_get_frame_filename_delegate av_get_frame_filename_fptr = (byte* @buf, int @buf_size, string @path, int @number) => - { - av_get_frame_filename_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_get_frame_filename"); - if (av_get_frame_filename_fptr == null) - { - av_get_frame_filename_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_frame_filename")); - }; - } - return av_get_frame_filename_fptr(@buf, @buf_size, @path, @number); - }; - public static int av_get_frame_filename(byte* @buf, int @buf_size, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @path, int @number) - { - return av_get_frame_filename_fptr(@buf, @buf_size, @path, @number); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_get_frame_filename2_delegate(byte* @buf, int @buf_size, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @path, int @number, int @flags); - private static av_get_frame_filename2_delegate av_get_frame_filename2_fptr = (byte* @buf, int @buf_size, string @path, int @number, int @flags) => - { - av_get_frame_filename2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_get_frame_filename2"); - if (av_get_frame_filename2_fptr == null) - { - av_get_frame_filename2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_frame_filename2")); - }; - } - return av_get_frame_filename2_fptr(@buf, @buf_size, @path, @number, @flags); - }; - /// Return in 'buf' the path with '%d' replaced by a number. - /// destination buffer - /// destination buffer size - /// numbered sequence string - /// frame number - /// AV_FRAME_FILENAME_FLAGS_* - /// 0 if OK, -1 on format error - public static int av_get_frame_filename2(byte* @buf, int @buf_size, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @path, int @number, int @flags) - { - return av_get_frame_filename2_fptr(@buf, @buf_size, @path, @number, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_get_output_timestamp_delegate(AVFormatContext* @s, int @stream, long* @dts, long* @wall); - private static av_get_output_timestamp_delegate av_get_output_timestamp_fptr = (AVFormatContext* @s, int @stream, long* @dts, long* @wall) => - { - av_get_output_timestamp_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_get_output_timestamp"); - if (av_get_output_timestamp_fptr == null) - { - av_get_output_timestamp_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_output_timestamp")); - }; - } - return av_get_output_timestamp_fptr(@s, @stream, @dts, @wall); - }; - /// Get timing information for the data currently output. The exact meaning of "currently output" depends on the format. It is mostly relevant for devices that have an internal buffer and/or work in real time. - /// media file handle - /// stream in the media file - /// DTS of the last packet output for the stream, in stream time_base units - /// absolute time when that packet whas output, in microsecond - /// 0 if OK, AVERROR(ENOSYS) if the format does not support it Note: some formats or devices may not allow to measure dts and wall atomically. - public static int av_get_output_timestamp(AVFormatContext* @s, int @stream, long* @dts, long* @wall) - { - return av_get_output_timestamp_fptr(@s, @stream, @dts, @wall); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_get_packet_delegate(AVIOContext* @s, AVPacket* @pkt, int @size); - private static av_get_packet_delegate av_get_packet_fptr = (AVIOContext* @s, AVPacket* @pkt, int @size) => - { - av_get_packet_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_get_packet"); - if (av_get_packet_fptr == null) - { - av_get_packet_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_packet")); - }; - } - return av_get_packet_fptr(@s, @pkt, @size); - }; - /// Allocate and read the payload of a packet and initialize its fields with default values. - /// associated IO context - /// packet - /// desired payload size - /// >0 (read size) if OK, AVERROR_xxx otherwise - public static int av_get_packet(AVIOContext* @s, AVPacket* @pkt, int @size) - { - return av_get_packet_fptr(@s, @pkt, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodecID av_guess_codec_delegate(AVOutputFormat* @fmt, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @short_name, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @filename, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @mime_type, AVMediaType @type); - private static av_guess_codec_delegate av_guess_codec_fptr = (AVOutputFormat* @fmt, string @short_name, string @filename, string @mime_type, AVMediaType @type) => - { - av_guess_codec_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_guess_codec"); - if (av_guess_codec_fptr == null) - { - av_guess_codec_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_guess_codec")); - }; - } - return av_guess_codec_fptr(@fmt, @short_name, @filename, @mime_type, @type); - }; - /// Guess the codec ID based upon muxer and filename. - public static AVCodecID av_guess_codec(AVOutputFormat* @fmt, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @short_name, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @filename, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @mime_type, AVMediaType @type) - { - return av_guess_codec_fptr(@fmt, @short_name, @filename, @mime_type, @type); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVOutputFormat* av_guess_format_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @short_name, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @filename, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @mime_type); - private static av_guess_format_delegate av_guess_format_fptr = (string @short_name, string @filename, string @mime_type) => - { - av_guess_format_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_guess_format"); - if (av_guess_format_fptr == null) - { - av_guess_format_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_guess_format")); - }; - } - return av_guess_format_fptr(@short_name, @filename, @mime_type); - }; - /// Return the output format in the list of registered output formats which best matches the provided parameters, or return NULL if there is no match. - /// if non-NULL checks if short_name matches with the names of the registered formats - /// if non-NULL checks if filename terminates with the extensions of the registered formats - /// if non-NULL checks if mime_type matches with the MIME type of the registered formats - public static AVOutputFormat* av_guess_format( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @short_name, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @filename, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @mime_type) - { - return av_guess_format_fptr(@short_name, @filename, @mime_type); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVRational av_guess_frame_rate_delegate(AVFormatContext* @ctx, AVStream* @stream, AVFrame* @frame); - private static av_guess_frame_rate_delegate av_guess_frame_rate_fptr = (AVFormatContext* @ctx, AVStream* @stream, AVFrame* @frame) => - { - av_guess_frame_rate_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_guess_frame_rate"); - if (av_guess_frame_rate_fptr == null) - { - av_guess_frame_rate_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_guess_frame_rate")); - }; - } - return av_guess_frame_rate_fptr(@ctx, @stream, @frame); - }; - /// Guess the frame rate, based on both the container and codec information. - /// the format context which the stream is part of - /// the stream which the frame is part of - /// the frame for which the frame rate should be determined, may be NULL - /// the guessed (valid) frame rate, 0/1 if no idea - public static AVRational av_guess_frame_rate(AVFormatContext* @ctx, AVStream* @stream, AVFrame* @frame) - { - return av_guess_frame_rate_fptr(@ctx, @stream, @frame); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVRational av_guess_sample_aspect_ratio_delegate(AVFormatContext* @format, AVStream* @stream, AVFrame* @frame); - private static av_guess_sample_aspect_ratio_delegate av_guess_sample_aspect_ratio_fptr = (AVFormatContext* @format, AVStream* @stream, AVFrame* @frame) => - { - av_guess_sample_aspect_ratio_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_guess_sample_aspect_ratio"); - if (av_guess_sample_aspect_ratio_fptr == null) - { - av_guess_sample_aspect_ratio_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_guess_sample_aspect_ratio")); - }; - } - return av_guess_sample_aspect_ratio_fptr(@format, @stream, @frame); - }; - /// Guess the sample aspect ratio of a frame, based on both the stream and the frame aspect ratio. - /// the format context which the stream is part of - /// the stream which the frame is part of - /// the frame with the aspect ratio to be determined - /// the guessed (valid) sample_aspect_ratio, 0/1 if no idea - public static AVRational av_guess_sample_aspect_ratio(AVFormatContext* @format, AVStream* @stream, AVFrame* @frame) - { - return av_guess_sample_aspect_ratio_fptr(@format, @stream, @frame); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_hex_dump_delegate(_iobuf* @f, byte* @buf, int @size); - private static av_hex_dump_delegate av_hex_dump_fptr = (_iobuf* @f, byte* @buf, int @size) => - { - av_hex_dump_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_hex_dump"); - if (av_hex_dump_fptr == null) - { - av_hex_dump_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_hex_dump")); - }; - } - av_hex_dump_fptr(@f, @buf, @size); - }; - /// Send a nice hexadecimal dump of a buffer to the specified file stream. - /// The file stream pointer where the dump should be sent to. - /// buffer - /// buffer size - public static void av_hex_dump(_iobuf* @f, byte* @buf, int @size) - { - av_hex_dump_fptr(@f, @buf, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_hex_dump_log_delegate(void* @avcl, int @level, byte* @buf, int @size); - private static av_hex_dump_log_delegate av_hex_dump_log_fptr = (void* @avcl, int @level, byte* @buf, int @size) => - { - av_hex_dump_log_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_hex_dump_log"); - if (av_hex_dump_log_fptr == null) - { - av_hex_dump_log_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_hex_dump_log")); - }; - } - av_hex_dump_log_fptr(@avcl, @level, @buf, @size); - }; - /// Send a nice hexadecimal dump of a buffer to the log. - /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct. - /// The importance level of the message, lower values signifying higher importance. - /// buffer - /// buffer size - public static void av_hex_dump_log(void* @avcl, int @level, byte* @buf, int @size) - { - av_hex_dump_log_fptr(@avcl, @level, @buf, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_index_search_timestamp_delegate(AVStream* @st, long @timestamp, int @flags); - private static av_index_search_timestamp_delegate av_index_search_timestamp_fptr = (AVStream* @st, long @timestamp, int @flags) => - { - av_index_search_timestamp_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_index_search_timestamp"); - if (av_index_search_timestamp_fptr == null) - { - av_index_search_timestamp_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_index_search_timestamp")); - }; - } - return av_index_search_timestamp_fptr(@st, @timestamp, @flags); - }; - /// Get the index for a specific timestamp. - /// stream that the timestamp belongs to - /// timestamp to retrieve the index for - /// if AVSEEK_FLAG_BACKWARD then the returned index will correspond to the timestamp which is < = the requested one, if backward is 0, then it will be >= if AVSEEK_FLAG_ANY seek to any frame, only keyframes otherwise - /// < 0 if no such timestamp could be found - public static int av_index_search_timestamp(AVStream* @st, long @timestamp, int @flags) - { - return av_index_search_timestamp_fptr(@st, @timestamp, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_interleaved_write_frame_delegate(AVFormatContext* @s, AVPacket* @pkt); - private static av_interleaved_write_frame_delegate av_interleaved_write_frame_fptr = (AVFormatContext* @s, AVPacket* @pkt) => - { - av_interleaved_write_frame_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_interleaved_write_frame"); - if (av_interleaved_write_frame_fptr == null) - { - av_interleaved_write_frame_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_interleaved_write_frame")); - }; - } - return av_interleaved_write_frame_fptr(@s, @pkt); - }; - /// Write a packet to an output media file ensuring correct interleaving. - /// media file handle - /// The packet containing the data to be written. If the packet is reference-counted, this function will take ownership of this reference and unreference it later when it sees fit. If the packet is not reference-counted, libavformat will make a copy. The returned packet will be blank (as if returned from av_packet_alloc()), even on error. This parameter can be NULL (at any time, not just at the end), to flush the interleaving queues. Packet's "stream_index" field must be set to the index of the corresponding stream in "s->streams". The timestamps ( "pts", "dts") must be set to correct values in the stream's timebase (unless the output format is flagged with the AVFMT_NOTIMESTAMPS flag, then they can be set to AV_NOPTS_VALUE). The dts for subsequent packets in one stream must be strictly increasing (unless the output format is flagged with the AVFMT_TS_NONSTRICT, then they merely have to be nondecreasing). "duration" should also be set if known. - /// 0 on success, a negative AVERROR on error. - public static int av_interleaved_write_frame(AVFormatContext* @s, AVPacket* @pkt) - { - return av_interleaved_write_frame_fptr(@s, @pkt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_interleaved_write_uncoded_frame_delegate(AVFormatContext* @s, int @stream_index, AVFrame* @frame); - private static av_interleaved_write_uncoded_frame_delegate av_interleaved_write_uncoded_frame_fptr = (AVFormatContext* @s, int @stream_index, AVFrame* @frame) => - { - av_interleaved_write_uncoded_frame_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_interleaved_write_uncoded_frame"); - if (av_interleaved_write_uncoded_frame_fptr == null) - { - av_interleaved_write_uncoded_frame_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_interleaved_write_uncoded_frame")); - }; - } - return av_interleaved_write_uncoded_frame_fptr(@s, @stream_index, @frame); - }; - /// Write an uncoded frame to an output media file. - /// >=0 for success, a negative code on error - public static int av_interleaved_write_uncoded_frame(AVFormatContext* @s, int @stream_index, AVFrame* @frame) - { - return av_interleaved_write_uncoded_frame_fptr(@s, @stream_index, @frame); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_match_ext_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @filename, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @extensions); - private static av_match_ext_delegate av_match_ext_fptr = (string @filename, string @extensions) => - { - av_match_ext_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_match_ext"); - if (av_match_ext_fptr == null) - { - av_match_ext_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_match_ext")); - }; - } - return av_match_ext_fptr(@filename, @extensions); - }; - /// Return a positive value if the given filename has one of the given extensions, 0 otherwise. - /// file name to check against the given extensions - /// a comma-separated list of filename extensions - public static int av_match_ext( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @filename, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @extensions) - { - return av_match_ext_fptr(@filename, @extensions); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVOutputFormat* av_muxer_iterate_delegate(void** @opaque); - private static av_muxer_iterate_delegate av_muxer_iterate_fptr = (void** @opaque) => - { - av_muxer_iterate_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_muxer_iterate"); - if (av_muxer_iterate_fptr == null) - { - av_muxer_iterate_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_muxer_iterate")); - }; - } - return av_muxer_iterate_fptr(@opaque); - }; - /// Iterate over all registered muxers. - /// a pointer where libavformat will store the iteration state. Must point to NULL to start the iteration. - /// the next registered muxer or NULL when the iteration is finished - public static AVOutputFormat* av_muxer_iterate(void** @opaque) - { - return av_muxer_iterate_fptr(@opaque); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVProgram* av_new_program_delegate(AVFormatContext* @s, int @id); - private static av_new_program_delegate av_new_program_fptr = (AVFormatContext* @s, int @id) => - { - av_new_program_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_new_program"); - if (av_new_program_fptr == null) - { - av_new_program_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_new_program")); - }; - } - return av_new_program_fptr(@s, @id); - }; - public static AVProgram* av_new_program(AVFormatContext* @s, int @id) - { - return av_new_program_fptr(@s, @id); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_pkt_dump_log2_delegate(void* @avcl, int @level, AVPacket* @pkt, int @dump_payload, AVStream* @st); - private static av_pkt_dump_log2_delegate av_pkt_dump_log2_fptr = (void* @avcl, int @level, AVPacket* @pkt, int @dump_payload, AVStream* @st) => - { - av_pkt_dump_log2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_pkt_dump_log2"); - if (av_pkt_dump_log2_fptr == null) - { - av_pkt_dump_log2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_pkt_dump_log2")); - }; - } - av_pkt_dump_log2_fptr(@avcl, @level, @pkt, @dump_payload, @st); - }; - /// Send a nice dump of a packet to the log. - /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct. - /// The importance level of the message, lower values signifying higher importance. - /// packet to dump - /// True if the payload must be displayed, too. - /// AVStream that the packet belongs to - public static void av_pkt_dump_log2(void* @avcl, int @level, AVPacket* @pkt, int @dump_payload, AVStream* @st) - { - av_pkt_dump_log2_fptr(@avcl, @level, @pkt, @dump_payload, @st); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_pkt_dump2_delegate(_iobuf* @f, AVPacket* @pkt, int @dump_payload, AVStream* @st); - private static av_pkt_dump2_delegate av_pkt_dump2_fptr = (_iobuf* @f, AVPacket* @pkt, int @dump_payload, AVStream* @st) => - { - av_pkt_dump2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_pkt_dump2"); - if (av_pkt_dump2_fptr == null) - { - av_pkt_dump2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_pkt_dump2")); - }; - } - av_pkt_dump2_fptr(@f, @pkt, @dump_payload, @st); - }; - /// Send a nice dump of a packet to the specified file stream. - /// The file stream pointer where the dump should be sent to. - /// packet to dump - /// True if the payload must be displayed, too. - /// AVStream that the packet belongs to - public static void av_pkt_dump2(_iobuf* @f, AVPacket* @pkt, int @dump_payload, AVStream* @st) - { - av_pkt_dump2_fptr(@f, @pkt, @dump_payload, @st); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_probe_input_buffer_delegate(AVIOContext* @pb, AVInputFormat** @fmt, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url, void* @logctx, uint @offset, uint @max_probe_size); - private static av_probe_input_buffer_delegate av_probe_input_buffer_fptr = (AVIOContext* @pb, AVInputFormat** @fmt, string @url, void* @logctx, uint @offset, uint @max_probe_size) => - { - av_probe_input_buffer_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_probe_input_buffer"); - if (av_probe_input_buffer_fptr == null) - { - av_probe_input_buffer_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_probe_input_buffer")); - }; - } - return av_probe_input_buffer_fptr(@pb, @fmt, @url, @logctx, @offset, @max_probe_size); - }; - /// Like av_probe_input_buffer2() but returns 0 on success - public static int av_probe_input_buffer(AVIOContext* @pb, AVInputFormat** @fmt, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url, void* @logctx, uint @offset, uint @max_probe_size) - { - return av_probe_input_buffer_fptr(@pb, @fmt, @url, @logctx, @offset, @max_probe_size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_probe_input_buffer2_delegate(AVIOContext* @pb, AVInputFormat** @fmt, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url, void* @logctx, uint @offset, uint @max_probe_size); - private static av_probe_input_buffer2_delegate av_probe_input_buffer2_fptr = (AVIOContext* @pb, AVInputFormat** @fmt, string @url, void* @logctx, uint @offset, uint @max_probe_size) => - { - av_probe_input_buffer2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_probe_input_buffer2"); - if (av_probe_input_buffer2_fptr == null) - { - av_probe_input_buffer2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_probe_input_buffer2")); - }; - } - return av_probe_input_buffer2_fptr(@pb, @fmt, @url, @logctx, @offset, @max_probe_size); - }; - /// Probe a bytestream to determine the input format. Each time a probe returns with a score that is too low, the probe buffer size is increased and another attempt is made. When the maximum probe size is reached, the input format with the highest score is returned. - /// the bytestream to probe - /// the input format is put here - /// the url of the stream - /// the log context - /// the offset within the bytestream to probe from - /// the maximum probe buffer size (zero for default) - /// the score in case of success, a negative value corresponding to an the maximal score is AVPROBE_SCORE_MAX AVERROR code otherwise - public static int av_probe_input_buffer2(AVIOContext* @pb, AVInputFormat** @fmt, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url, void* @logctx, uint @offset, uint @max_probe_size) - { - return av_probe_input_buffer2_fptr(@pb, @fmt, @url, @logctx, @offset, @max_probe_size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVInputFormat* av_probe_input_format_delegate(AVProbeData* @pd, int @is_opened); - private static av_probe_input_format_delegate av_probe_input_format_fptr = (AVProbeData* @pd, int @is_opened) => - { - av_probe_input_format_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_probe_input_format"); - if (av_probe_input_format_fptr == null) - { - av_probe_input_format_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_probe_input_format")); - }; - } - return av_probe_input_format_fptr(@pd, @is_opened); - }; - /// Guess the file format. - /// data to be probed - /// Whether the file is already opened; determines whether demuxers with or without AVFMT_NOFILE are probed. - public static AVInputFormat* av_probe_input_format(AVProbeData* @pd, int @is_opened) - { - return av_probe_input_format_fptr(@pd, @is_opened); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVInputFormat* av_probe_input_format2_delegate(AVProbeData* @pd, int @is_opened, int* @score_max); - private static av_probe_input_format2_delegate av_probe_input_format2_fptr = (AVProbeData* @pd, int @is_opened, int* @score_max) => - { - av_probe_input_format2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_probe_input_format2"); - if (av_probe_input_format2_fptr == null) - { - av_probe_input_format2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_probe_input_format2")); - }; - } - return av_probe_input_format2_fptr(@pd, @is_opened, @score_max); - }; - /// Guess the file format. - /// data to be probed - /// Whether the file is already opened; determines whether demuxers with or without AVFMT_NOFILE are probed. - /// A probe score larger that this is required to accept a detection, the variable is set to the actual detection score afterwards. If the score is < = AVPROBE_SCORE_MAX / 4 it is recommended to retry with a larger probe buffer. - public static AVInputFormat* av_probe_input_format2(AVProbeData* @pd, int @is_opened, int* @score_max) - { - return av_probe_input_format2_fptr(@pd, @is_opened, @score_max); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVInputFormat* av_probe_input_format3_delegate(AVProbeData* @pd, int @is_opened, int* @score_ret); - private static av_probe_input_format3_delegate av_probe_input_format3_fptr = (AVProbeData* @pd, int @is_opened, int* @score_ret) => - { - av_probe_input_format3_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_probe_input_format3"); - if (av_probe_input_format3_fptr == null) - { - av_probe_input_format3_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_probe_input_format3")); - }; - } - return av_probe_input_format3_fptr(@pd, @is_opened, @score_ret); - }; - /// Guess the file format. - /// Whether the file is already opened; determines whether demuxers with or without AVFMT_NOFILE are probed. - /// The score of the best detection. - public static AVInputFormat* av_probe_input_format3(AVProbeData* @pd, int @is_opened, int* @score_ret) - { - return av_probe_input_format3_fptr(@pd, @is_opened, @score_ret); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_program_add_stream_index_delegate(AVFormatContext* @ac, int @progid, uint @idx); - private static av_program_add_stream_index_delegate av_program_add_stream_index_fptr = (AVFormatContext* @ac, int @progid, uint @idx) => - { - av_program_add_stream_index_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_program_add_stream_index"); - if (av_program_add_stream_index_fptr == null) - { - av_program_add_stream_index_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_program_add_stream_index")); - }; - } - av_program_add_stream_index_fptr(@ac, @progid, @idx); - }; - public static void av_program_add_stream_index(AVFormatContext* @ac, int @progid, uint @idx) - { - av_program_add_stream_index_fptr(@ac, @progid, @idx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_read_frame_delegate(AVFormatContext* @s, AVPacket* @pkt); - private static av_read_frame_delegate av_read_frame_fptr = (AVFormatContext* @s, AVPacket* @pkt) => - { - av_read_frame_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_read_frame"); - if (av_read_frame_fptr == null) - { - av_read_frame_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_read_frame")); - }; - } - return av_read_frame_fptr(@s, @pkt); - }; - /// Return the next frame of a stream. This function returns what is stored in the file, and does not validate that what is there are valid frames for the decoder. It will split what is stored in the file into frames and return one for each call. It will not omit invalid data between valid frames so as to give the decoder the maximum information possible for decoding. - /// 0 if OK, < 0 on error or end of file. On error, pkt will be blank (as if it came from av_packet_alloc()). - public static int av_read_frame(AVFormatContext* @s, AVPacket* @pkt) - { - return av_read_frame_fptr(@s, @pkt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_read_pause_delegate(AVFormatContext* @s); - private static av_read_pause_delegate av_read_pause_fptr = (AVFormatContext* @s) => - { - av_read_pause_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_read_pause"); - if (av_read_pause_fptr == null) - { - av_read_pause_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_read_pause")); - }; - } - return av_read_pause_fptr(@s); - }; - /// Pause a network-based stream (e.g. RTSP stream). - public static int av_read_pause(AVFormatContext* @s) - { - return av_read_pause_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_read_play_delegate(AVFormatContext* @s); - private static av_read_play_delegate av_read_play_fptr = (AVFormatContext* @s) => - { - av_read_play_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_read_play"); - if (av_read_play_fptr == null) - { - av_read_play_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_read_play")); - }; - } - return av_read_play_fptr(@s); - }; - /// Start playing a network-based stream (e.g. RTSP stream) at the current position. - public static int av_read_play(AVFormatContext* @s) - { - return av_read_play_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_sdp_create_delegate(AVFormatContext** @ac, int @n_files, byte* @buf, int @size); - private static av_sdp_create_delegate av_sdp_create_fptr = (AVFormatContext** @ac, int @n_files, byte* @buf, int @size) => - { - av_sdp_create_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_sdp_create"); - if (av_sdp_create_fptr == null) - { - av_sdp_create_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_sdp_create")); - }; - } - return av_sdp_create_fptr(@ac, @n_files, @buf, @size); - }; - /// Generate an SDP for an RTP session. - /// array of AVFormatContexts describing the RTP streams. If the array is composed by only one context, such context can contain multiple AVStreams (one AVStream per RTP stream). Otherwise, all the contexts in the array (an AVCodecContext per RTP stream) must contain only one AVStream. - /// number of AVCodecContexts contained in ac - /// buffer where the SDP will be stored (must be allocated by the caller) - /// the size of the buffer - /// 0 if OK, AVERROR_xxx on error - public static int av_sdp_create(AVFormatContext** @ac, int @n_files, byte* @buf, int @size) - { - return av_sdp_create_fptr(@ac, @n_files, @buf, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_seek_frame_delegate(AVFormatContext* @s, int @stream_index, long @timestamp, int @flags); - private static av_seek_frame_delegate av_seek_frame_fptr = (AVFormatContext* @s, int @stream_index, long @timestamp, int @flags) => - { - av_seek_frame_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_seek_frame"); - if (av_seek_frame_fptr == null) - { - av_seek_frame_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_seek_frame")); - }; - } - return av_seek_frame_fptr(@s, @stream_index, @timestamp, @flags); - }; - /// Seek to the keyframe at timestamp. 'timestamp' in 'stream_index'. - /// media file handle - /// If stream_index is (-1), a default stream is selected, and timestamp is automatically converted from AV_TIME_BASE units to the stream specific time_base. - /// Timestamp in AVStream.time_base units or, if no stream is specified, in AV_TIME_BASE units. - /// flags which select direction and seeking mode - /// >= 0 on success - public static int av_seek_frame(AVFormatContext* @s, int @stream_index, long @timestamp, int @flags) - { - return av_seek_frame_fptr(@s, @stream_index, @timestamp, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_stream_add_side_data_delegate(AVStream* @st, AVPacketSideDataType @type, byte* @data, ulong @size); - private static av_stream_add_side_data_delegate av_stream_add_side_data_fptr = (AVStream* @st, AVPacketSideDataType @type, byte* @data, ulong @size) => - { - av_stream_add_side_data_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_stream_add_side_data"); - if (av_stream_add_side_data_fptr == null) - { - av_stream_add_side_data_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_stream_add_side_data")); - }; - } - return av_stream_add_side_data_fptr(@st, @type, @data, @size); - }; - /// Wrap an existing array as stream side data. - /// stream - /// side information type - /// the side data array. It must be allocated with the av_malloc() family of functions. The ownership of the data is transferred to st. - /// side information size - /// zero on success, a negative AVERROR code on failure. On failure, the stream is unchanged and the data remains owned by the caller. - public static int av_stream_add_side_data(AVStream* @st, AVPacketSideDataType @type, byte* @data, ulong @size) - { - return av_stream_add_side_data_fptr(@st, @type, @data, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVClass* av_stream_get_class_delegate(); - private static av_stream_get_class_delegate av_stream_get_class_fptr = () => - { - av_stream_get_class_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_stream_get_class"); - if (av_stream_get_class_fptr == null) - { - av_stream_get_class_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_stream_get_class")); - }; - } - return av_stream_get_class_fptr(); - }; - /// Get the AVClass for AVStream. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. - public static AVClass* av_stream_get_class() - { - return av_stream_get_class_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVRational av_stream_get_codec_timebase_delegate(AVStream* @st); - private static av_stream_get_codec_timebase_delegate av_stream_get_codec_timebase_fptr = (AVStream* @st) => - { - av_stream_get_codec_timebase_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_stream_get_codec_timebase"); - if (av_stream_get_codec_timebase_fptr == null) - { - av_stream_get_codec_timebase_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_stream_get_codec_timebase")); - }; - } - return av_stream_get_codec_timebase_fptr(@st); - }; - /// Get the internal codec timebase from a stream. - /// input stream to extract the timebase from - public static AVRational av_stream_get_codec_timebase(AVStream* @st) - { - return av_stream_get_codec_timebase_fptr(@st); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate long av_stream_get_end_pts_delegate(AVStream* @st); - private static av_stream_get_end_pts_delegate av_stream_get_end_pts_fptr = (AVStream* @st) => - { - av_stream_get_end_pts_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_stream_get_end_pts"); - if (av_stream_get_end_pts_fptr == null) - { - av_stream_get_end_pts_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_stream_get_end_pts")); - }; - } - return av_stream_get_end_pts_fptr(@st); - }; - /// Returns the pts of the last muxed packet + its duration - public static long av_stream_get_end_pts(AVStream* @st) - { - return av_stream_get_end_pts_fptr(@st); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodecParserContext* av_stream_get_parser_delegate(AVStream* @s); - private static av_stream_get_parser_delegate av_stream_get_parser_fptr = (AVStream* @s) => - { - av_stream_get_parser_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_stream_get_parser"); - if (av_stream_get_parser_fptr == null) - { - av_stream_get_parser_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_stream_get_parser")); - }; - } - return av_stream_get_parser_fptr(@s); - }; - public static AVCodecParserContext* av_stream_get_parser(AVStream* @s) - { - return av_stream_get_parser_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate byte* av_stream_get_side_data_delegate(AVStream* @stream, AVPacketSideDataType @type, ulong* @size); - private static av_stream_get_side_data_delegate av_stream_get_side_data_fptr = (AVStream* @stream, AVPacketSideDataType @type, ulong* @size) => - { - av_stream_get_side_data_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_stream_get_side_data"); - if (av_stream_get_side_data_fptr == null) - { - av_stream_get_side_data_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_stream_get_side_data")); - }; - } - return av_stream_get_side_data_fptr(@stream, @type, @size); - }; - /// Get side information from stream. - /// stream - /// desired side information type - /// If supplied, *size will be set to the size of the side data or to zero if the desired side data is not present. - /// pointer to data if present or NULL otherwise - public static byte* av_stream_get_side_data(AVStream* @stream, AVPacketSideDataType @type, ulong* @size) - { - return av_stream_get_side_data_fptr(@stream, @type, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate byte* av_stream_new_side_data_delegate(AVStream* @stream, AVPacketSideDataType @type, ulong @size); - private static av_stream_new_side_data_delegate av_stream_new_side_data_fptr = (AVStream* @stream, AVPacketSideDataType @type, ulong @size) => - { - av_stream_new_side_data_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_stream_new_side_data"); - if (av_stream_new_side_data_fptr == null) - { - av_stream_new_side_data_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_stream_new_side_data")); - }; - } - return av_stream_new_side_data_fptr(@stream, @type, @size); - }; - /// Allocate new information from stream. - /// stream - /// desired side information type - /// side information size - /// pointer to fresh allocated data or NULL otherwise - public static byte* av_stream_new_side_data(AVStream* @stream, AVPacketSideDataType @type, ulong @size) - { - return av_stream_new_side_data_fptr(@stream, @type, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_url_split_delegate(byte* @proto, int @proto_size, byte* @authorization, int @authorization_size, byte* @hostname, int @hostname_size, int* @port_ptr, byte* @path, int @path_size, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url); - private static av_url_split_delegate av_url_split_fptr = (byte* @proto, int @proto_size, byte* @authorization, int @authorization_size, byte* @hostname, int @hostname_size, int* @port_ptr, byte* @path, int @path_size, string @url) => - { - av_url_split_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_url_split"); - if (av_url_split_fptr == null) - { - av_url_split_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_url_split")); - }; - } - av_url_split_fptr(@proto, @proto_size, @authorization, @authorization_size, @hostname, @hostname_size, @port_ptr, @path, @path_size, @url); - }; - /// Split a URL string into components. - /// the buffer for the protocol - /// the size of the proto buffer - /// the buffer for the authorization - /// the size of the authorization buffer - /// the buffer for the host name - /// the size of the hostname buffer - /// a pointer to store the port number in - /// the buffer for the path - /// the size of the path buffer - /// the URL to split - public static void av_url_split(byte* @proto, int @proto_size, byte* @authorization, int @authorization_size, byte* @hostname, int @hostname_size, int* @port_ptr, byte* @path, int @path_size, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url) - { - av_url_split_fptr(@proto, @proto_size, @authorization, @authorization_size, @hostname, @hostname_size, @port_ptr, @path, @path_size, @url); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_write_frame_delegate(AVFormatContext* @s, AVPacket* @pkt); - private static av_write_frame_delegate av_write_frame_fptr = (AVFormatContext* @s, AVPacket* @pkt) => - { - av_write_frame_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_write_frame"); - if (av_write_frame_fptr == null) - { - av_write_frame_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_write_frame")); - }; - } - return av_write_frame_fptr(@s, @pkt); - }; - /// Write a packet to an output media file. - /// media file handle - /// The packet containing the data to be written. Note that unlike av_interleaved_write_frame(), this function does not take ownership of the packet passed to it (though some muxers may make an internal reference to the input packet). This parameter can be NULL (at any time, not just at the end), in order to immediately flush data buffered within the muxer, for muxers that buffer up data internally before writing it to the output. Packet's "stream_index" field must be set to the index of the corresponding stream in "s->streams". The timestamps ( "pts", "dts") must be set to correct values in the stream's timebase (unless the output format is flagged with the AVFMT_NOTIMESTAMPS flag, then they can be set to AV_NOPTS_VALUE). The dts for subsequent packets passed to this function must be strictly increasing when compared in their respective timebases (unless the output format is flagged with the AVFMT_TS_NONSTRICT, then they merely have to be nondecreasing). "duration") should also be set if known. - /// < 0 on error, = 0 if OK, 1 if flushed and there is no more data to flush - public static int av_write_frame(AVFormatContext* @s, AVPacket* @pkt) - { - return av_write_frame_fptr(@s, @pkt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_write_trailer_delegate(AVFormatContext* @s); - private static av_write_trailer_delegate av_write_trailer_fptr = (AVFormatContext* @s) => - { - av_write_trailer_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_write_trailer"); - if (av_write_trailer_fptr == null) - { - av_write_trailer_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_write_trailer")); - }; - } - return av_write_trailer_fptr(@s); - }; - /// Write the stream trailer to an output media file and free the file private data. - /// media file handle - /// 0 if OK, AVERROR_xxx on error - public static int av_write_trailer(AVFormatContext* @s) - { - return av_write_trailer_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_write_uncoded_frame_delegate(AVFormatContext* @s, int @stream_index, AVFrame* @frame); - private static av_write_uncoded_frame_delegate av_write_uncoded_frame_fptr = (AVFormatContext* @s, int @stream_index, AVFrame* @frame) => - { - av_write_uncoded_frame_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_write_uncoded_frame"); - if (av_write_uncoded_frame_fptr == null) - { - av_write_uncoded_frame_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_write_uncoded_frame")); - }; - } - return av_write_uncoded_frame_fptr(@s, @stream_index, @frame); - }; - /// Write an uncoded frame to an output media file. - public static int av_write_uncoded_frame(AVFormatContext* @s, int @stream_index, AVFrame* @frame) - { - return av_write_uncoded_frame_fptr(@s, @stream_index, @frame); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_write_uncoded_frame_query_delegate(AVFormatContext* @s, int @stream_index); - private static av_write_uncoded_frame_query_delegate av_write_uncoded_frame_query_fptr = (AVFormatContext* @s, int @stream_index) => - { - av_write_uncoded_frame_query_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "av_write_uncoded_frame_query"); - if (av_write_uncoded_frame_query_fptr == null) - { - av_write_uncoded_frame_query_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_write_uncoded_frame_query")); - }; - } - return av_write_uncoded_frame_query_fptr(@s, @stream_index); - }; - /// Test whether a muxer supports uncoded frame. - /// >=0 if an uncoded frame can be written to that muxer and stream, < 0 if not - public static int av_write_uncoded_frame_query(AVFormatContext* @s, int @stream_index) - { - return av_write_uncoded_frame_query_fptr(@s, @stream_index); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVFormatContext* avformat_alloc_context_delegate(); - private static avformat_alloc_context_delegate avformat_alloc_context_fptr = () => - { - avformat_alloc_context_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_alloc_context"); - if (avformat_alloc_context_fptr == null) - { - avformat_alloc_context_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_alloc_context")); - }; - } - return avformat_alloc_context_fptr(); - }; - /// Allocate an AVFormatContext. avformat_free_context() can be used to free the context and everything allocated by the framework within it. - public static AVFormatContext* avformat_alloc_context() - { - return avformat_alloc_context_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avformat_alloc_output_context2_delegate(AVFormatContext** @ctx, AVOutputFormat* @oformat, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @format_name, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @filename); - private static avformat_alloc_output_context2_delegate avformat_alloc_output_context2_fptr = (AVFormatContext** @ctx, AVOutputFormat* @oformat, string @format_name, string @filename) => - { - avformat_alloc_output_context2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_alloc_output_context2"); - if (avformat_alloc_output_context2_fptr == null) - { - avformat_alloc_output_context2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_alloc_output_context2")); - }; - } - return avformat_alloc_output_context2_fptr(@ctx, @oformat, @format_name, @filename); - }; - /// Allocate an AVFormatContext for an output format. avformat_free_context() can be used to free the context and everything allocated by the framework within it. - /// format to use for allocating the context, if NULL format_name and filename are used instead - /// the name of output format to use for allocating the context, if NULL filename is used instead - /// the name of the filename to use for allocating the context, may be NULL - /// >= 0 in case of success, a negative AVERROR code in case of failure - public static int avformat_alloc_output_context2(AVFormatContext** @ctx, AVOutputFormat* @oformat, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @format_name, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @filename) - { - return avformat_alloc_output_context2_fptr(@ctx, @oformat, @format_name, @filename); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avformat_close_input_delegate(AVFormatContext** @s); - private static avformat_close_input_delegate avformat_close_input_fptr = (AVFormatContext** @s) => - { - avformat_close_input_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_close_input"); - if (avformat_close_input_fptr == null) - { - avformat_close_input_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_close_input")); - }; - } - avformat_close_input_fptr(@s); - }; - /// Close an opened input AVFormatContext. Free it and all its contents and set *s to NULL. - public static void avformat_close_input(AVFormatContext** @s) - { - avformat_close_input_fptr(@s); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string avformat_configuration_delegate(); - private static avformat_configuration_delegate avformat_configuration_fptr = () => - { - avformat_configuration_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_configuration"); - if (avformat_configuration_fptr == null) - { - avformat_configuration_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_configuration")); - }; - } - return avformat_configuration_fptr(); - }; - /// Return the libavformat build-time configuration. - public static string avformat_configuration() - { - return avformat_configuration_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avformat_find_stream_info_delegate(AVFormatContext* @ic, AVDictionary** @options); - private static avformat_find_stream_info_delegate avformat_find_stream_info_fptr = (AVFormatContext* @ic, AVDictionary** @options) => - { - avformat_find_stream_info_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_find_stream_info"); - if (avformat_find_stream_info_fptr == null) - { - avformat_find_stream_info_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_find_stream_info")); - }; - } - return avformat_find_stream_info_fptr(@ic, @options); - }; - /// Read packets of a media file to get stream information. This is useful for file formats with no headers such as MPEG. This function also computes the real framerate in case of MPEG-2 repeat frame mode. The logical file position is not changed by this function; examined packets may be buffered for later processing. - /// media file handle - /// If non-NULL, an ic.nb_streams long array of pointers to dictionaries, where i-th member contains options for codec corresponding to i-th stream. On return each dictionary will be filled with options that were not found. - /// >=0 if OK, AVERROR_xxx on error - public static int avformat_find_stream_info(AVFormatContext* @ic, AVDictionary** @options) - { - return avformat_find_stream_info_fptr(@ic, @options); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avformat_flush_delegate(AVFormatContext* @s); - private static avformat_flush_delegate avformat_flush_fptr = (AVFormatContext* @s) => - { - avformat_flush_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_flush"); - if (avformat_flush_fptr == null) - { - avformat_flush_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_flush")); - }; - } - return avformat_flush_fptr(@s); - }; - /// Discard all internally buffered data. This can be useful when dealing with discontinuities in the byte stream. Generally works only with formats that can resync. This includes headerless formats like MPEG-TS/TS but should also work with NUT, Ogg and in a limited way AVI for example. - /// media file handle - /// >=0 on success, error code otherwise - public static int avformat_flush(AVFormatContext* @s) - { - return avformat_flush_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avformat_free_context_delegate(AVFormatContext* @s); - private static avformat_free_context_delegate avformat_free_context_fptr = (AVFormatContext* @s) => - { - avformat_free_context_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_free_context"); - if (avformat_free_context_fptr == null) - { - avformat_free_context_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_free_context")); - }; - } - avformat_free_context_fptr(@s); - }; - /// Free an AVFormatContext and all its streams. - /// context to free - public static void avformat_free_context(AVFormatContext* @s) - { - avformat_free_context_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVClass* avformat_get_class_delegate(); - private static avformat_get_class_delegate avformat_get_class_fptr = () => - { - avformat_get_class_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_get_class"); - if (avformat_get_class_fptr == null) - { - avformat_get_class_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_get_class")); - }; - } - return avformat_get_class_fptr(); - }; - /// Get the AVClass for AVFormatContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. - public static AVClass* avformat_get_class() - { - return avformat_get_class_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodecTag* avformat_get_mov_audio_tags_delegate(); - private static avformat_get_mov_audio_tags_delegate avformat_get_mov_audio_tags_fptr = () => - { - avformat_get_mov_audio_tags_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_get_mov_audio_tags"); - if (avformat_get_mov_audio_tags_fptr == null) - { - avformat_get_mov_audio_tags_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_get_mov_audio_tags")); - }; - } - return avformat_get_mov_audio_tags_fptr(); - }; - /// Returns the table mapping MOV FourCCs for audio to AVCodecID. - /// the table mapping MOV FourCCs for audio to AVCodecID. - public static AVCodecTag* avformat_get_mov_audio_tags() - { - return avformat_get_mov_audio_tags_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodecTag* avformat_get_mov_video_tags_delegate(); - private static avformat_get_mov_video_tags_delegate avformat_get_mov_video_tags_fptr = () => - { - avformat_get_mov_video_tags_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_get_mov_video_tags"); - if (avformat_get_mov_video_tags_fptr == null) - { - avformat_get_mov_video_tags_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_get_mov_video_tags")); - }; - } - return avformat_get_mov_video_tags_fptr(); - }; - /// Returns the table mapping MOV FourCCs for video to libavcodec AVCodecID. - /// the table mapping MOV FourCCs for video to libavcodec AVCodecID. - public static AVCodecTag* avformat_get_mov_video_tags() - { - return avformat_get_mov_video_tags_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodecTag* avformat_get_riff_audio_tags_delegate(); - private static avformat_get_riff_audio_tags_delegate avformat_get_riff_audio_tags_fptr = () => - { - avformat_get_riff_audio_tags_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_get_riff_audio_tags"); - if (avformat_get_riff_audio_tags_fptr == null) - { - avformat_get_riff_audio_tags_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_get_riff_audio_tags")); - }; - } - return avformat_get_riff_audio_tags_fptr(); - }; - /// Returns the table mapping RIFF FourCCs for audio to AVCodecID. - /// the table mapping RIFF FourCCs for audio to AVCodecID. - public static AVCodecTag* avformat_get_riff_audio_tags() - { - return avformat_get_riff_audio_tags_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVCodecTag* avformat_get_riff_video_tags_delegate(); - private static avformat_get_riff_video_tags_delegate avformat_get_riff_video_tags_fptr = () => - { - avformat_get_riff_video_tags_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_get_riff_video_tags"); - if (avformat_get_riff_video_tags_fptr == null) - { - avformat_get_riff_video_tags_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_get_riff_video_tags")); - }; - } - return avformat_get_riff_video_tags_fptr(); - }; - /// @{ Get the tables mapping RIFF FourCCs to libavcodec AVCodecIDs. The tables are meant to be passed to av_codec_get_id()/av_codec_get_tag() as in the following code: - /// the table mapping RIFF FourCCs for video to libavcodec AVCodecID. - public static AVCodecTag* avformat_get_riff_video_tags() - { - return avformat_get_riff_video_tags_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avformat_index_get_entries_count_delegate(AVStream* @st); - private static avformat_index_get_entries_count_delegate avformat_index_get_entries_count_fptr = (AVStream* @st) => - { - avformat_index_get_entries_count_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_index_get_entries_count"); - if (avformat_index_get_entries_count_fptr == null) - { - avformat_index_get_entries_count_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_index_get_entries_count")); - }; - } - return avformat_index_get_entries_count_fptr(@st); - }; - /// Get the index entry count for the given AVStream. - /// stream - /// the number of index entries in the stream - public static int avformat_index_get_entries_count(AVStream* @st) - { - return avformat_index_get_entries_count_fptr(@st); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVIndexEntry* avformat_index_get_entry_delegate(AVStream* @st, int @idx); - private static avformat_index_get_entry_delegate avformat_index_get_entry_fptr = (AVStream* @st, int @idx) => - { - avformat_index_get_entry_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_index_get_entry"); - if (avformat_index_get_entry_fptr == null) - { - avformat_index_get_entry_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_index_get_entry")); - }; - } - return avformat_index_get_entry_fptr(@st, @idx); - }; - /// Get the AVIndexEntry corresponding to the given index. - /// Stream containing the requested AVIndexEntry. - /// The desired index. - /// A pointer to the requested AVIndexEntry if it exists, NULL otherwise. - public static AVIndexEntry* avformat_index_get_entry(AVStream* @st, int @idx) - { - return avformat_index_get_entry_fptr(@st, @idx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVIndexEntry* avformat_index_get_entry_from_timestamp_delegate(AVStream* @st, long @wanted_timestamp, int @flags); - private static avformat_index_get_entry_from_timestamp_delegate avformat_index_get_entry_from_timestamp_fptr = (AVStream* @st, long @wanted_timestamp, int @flags) => - { - avformat_index_get_entry_from_timestamp_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_index_get_entry_from_timestamp"); - if (avformat_index_get_entry_from_timestamp_fptr == null) - { - avformat_index_get_entry_from_timestamp_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_index_get_entry_from_timestamp")); - }; - } - return avformat_index_get_entry_from_timestamp_fptr(@st, @wanted_timestamp, @flags); - }; - /// Get the AVIndexEntry corresponding to the given timestamp. - /// Stream containing the requested AVIndexEntry. - /// If AVSEEK_FLAG_BACKWARD then the returned entry will correspond to the timestamp which is < = the requested one, if backward is 0, then it will be >= if AVSEEK_FLAG_ANY seek to any frame, only keyframes otherwise. - /// A pointer to the requested AVIndexEntry if it exists, NULL otherwise. - public static AVIndexEntry* avformat_index_get_entry_from_timestamp(AVStream* @st, long @wanted_timestamp, int @flags) - { - return avformat_index_get_entry_from_timestamp_fptr(@st, @wanted_timestamp, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avformat_init_output_delegate(AVFormatContext* @s, AVDictionary** @options); - private static avformat_init_output_delegate avformat_init_output_fptr = (AVFormatContext* @s, AVDictionary** @options) => - { - avformat_init_output_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_init_output"); - if (avformat_init_output_fptr == null) - { - avformat_init_output_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_init_output")); - }; - } - return avformat_init_output_fptr(@s, @options); - }; - /// Allocate the stream private data and initialize the codec, but do not write the header. May optionally be used before avformat_write_header to initialize stream parameters before actually writing the header. If using this function, do not pass the same options to avformat_write_header. - /// Media file handle, must be allocated with avformat_alloc_context(). Its oformat field must be set to the desired output format; Its pb field must be set to an already opened AVIOContext. - /// An AVDictionary filled with AVFormatContext and muxer-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. - /// AVSTREAM_INIT_IN_WRITE_HEADER on success if the codec requires avformat_write_header to fully initialize, AVSTREAM_INIT_IN_INIT_OUTPUT on success if the codec has been fully initialized, negative AVERROR on failure. - public static int avformat_init_output(AVFormatContext* @s, AVDictionary** @options) - { - return avformat_init_output_fptr(@s, @options); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string avformat_license_delegate(); - private static avformat_license_delegate avformat_license_fptr = () => - { - avformat_license_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_license"); - if (avformat_license_fptr == null) - { - avformat_license_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_license")); - }; - } - return avformat_license_fptr(); - }; - /// Return the libavformat license. - public static string avformat_license() - { - return avformat_license_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avformat_match_stream_specifier_delegate(AVFormatContext* @s, AVStream* @st, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @spec); - private static avformat_match_stream_specifier_delegate avformat_match_stream_specifier_fptr = (AVFormatContext* @s, AVStream* @st, string @spec) => - { - avformat_match_stream_specifier_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_match_stream_specifier"); - if (avformat_match_stream_specifier_fptr == null) - { - avformat_match_stream_specifier_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_match_stream_specifier")); - }; - } - return avformat_match_stream_specifier_fptr(@s, @st, @spec); - }; - /// Check if the stream st contained in s is matched by the stream specifier spec. - /// >0 if st is matched by spec; 0 if st is not matched by spec; AVERROR code if spec is invalid - public static int avformat_match_stream_specifier(AVFormatContext* @s, AVStream* @st, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @spec) - { - return avformat_match_stream_specifier_fptr(@s, @st, @spec); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avformat_network_deinit_delegate(); - private static avformat_network_deinit_delegate avformat_network_deinit_fptr = () => - { - avformat_network_deinit_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_network_deinit"); - if (avformat_network_deinit_fptr == null) - { - avformat_network_deinit_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_network_deinit")); - }; - } - return avformat_network_deinit_fptr(); - }; - /// Undo the initialization done by avformat_network_init. Call it only once for each time you called avformat_network_init. - public static int avformat_network_deinit() - { - return avformat_network_deinit_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avformat_network_init_delegate(); - private static avformat_network_init_delegate avformat_network_init_fptr = () => - { - avformat_network_init_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_network_init"); - if (avformat_network_init_fptr == null) - { - avformat_network_init_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_network_init")); - }; - } - return avformat_network_init_fptr(); - }; - /// Do global initialization of network libraries. This is optional, and not recommended anymore. - public static int avformat_network_init() - { - return avformat_network_init_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVStream* avformat_new_stream_delegate(AVFormatContext* @s, AVCodec* @c); - private static avformat_new_stream_delegate avformat_new_stream_fptr = (AVFormatContext* @s, AVCodec* @c) => - { - avformat_new_stream_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_new_stream"); - if (avformat_new_stream_fptr == null) - { - avformat_new_stream_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_new_stream")); - }; - } - return avformat_new_stream_fptr(@s, @c); - }; - /// Add a new stream to a media file. - /// media file handle - /// unused, does nothing - /// newly created stream or NULL on error. - public static AVStream* avformat_new_stream(AVFormatContext* @s, AVCodec* @c) - { - return avformat_new_stream_fptr(@s, @c); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avformat_open_input_delegate(AVFormatContext** @ps, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url, AVInputFormat* @fmt, AVDictionary** @options); - private static avformat_open_input_delegate avformat_open_input_fptr = (AVFormatContext** @ps, string @url, AVInputFormat* @fmt, AVDictionary** @options) => - { - avformat_open_input_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_open_input"); - if (avformat_open_input_fptr == null) - { - avformat_open_input_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_open_input")); - }; - } - return avformat_open_input_fptr(@ps, @url, @fmt, @options); - }; - /// Open an input stream and read the header. The codecs are not opened. The stream must be closed with avformat_close_input(). - /// Pointer to user-supplied AVFormatContext (allocated by avformat_alloc_context). May be a pointer to NULL, in which case an AVFormatContext is allocated by this function and written into ps. Note that a user-supplied AVFormatContext will be freed on failure. - /// URL of the stream to open. - /// If non-NULL, this parameter forces a specific input format. Otherwise the format is autodetected. - /// A dictionary filled with AVFormatContext and demuxer-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. - /// 0 on success, a negative AVERROR on failure. - public static int avformat_open_input(AVFormatContext** @ps, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url, AVInputFormat* @fmt, AVDictionary** @options) - { - return avformat_open_input_fptr(@ps, @url, @fmt, @options); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avformat_query_codec_delegate(AVOutputFormat* @ofmt, AVCodecID @codec_id, int @std_compliance); - private static avformat_query_codec_delegate avformat_query_codec_fptr = (AVOutputFormat* @ofmt, AVCodecID @codec_id, int @std_compliance) => - { - avformat_query_codec_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_query_codec"); - if (avformat_query_codec_fptr == null) - { - avformat_query_codec_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_query_codec")); - }; - } - return avformat_query_codec_fptr(@ofmt, @codec_id, @std_compliance); - }; - /// Test if the given container can store a codec. - /// container to check for compatibility - /// codec to potentially store in container - /// standards compliance level, one of FF_COMPLIANCE_* - /// 1 if codec with ID codec_id can be stored in ofmt, 0 if it cannot. A negative number if this information is not available. - public static int avformat_query_codec(AVOutputFormat* @ofmt, AVCodecID @codec_id, int @std_compliance) - { - return avformat_query_codec_fptr(@ofmt, @codec_id, @std_compliance); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avformat_queue_attached_pictures_delegate(AVFormatContext* @s); - private static avformat_queue_attached_pictures_delegate avformat_queue_attached_pictures_fptr = (AVFormatContext* @s) => - { - avformat_queue_attached_pictures_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_queue_attached_pictures"); - if (avformat_queue_attached_pictures_fptr == null) - { - avformat_queue_attached_pictures_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_queue_attached_pictures")); - }; - } - return avformat_queue_attached_pictures_fptr(@s); - }; - public static int avformat_queue_attached_pictures(AVFormatContext* @s) - { - return avformat_queue_attached_pictures_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avformat_seek_file_delegate(AVFormatContext* @s, int @stream_index, long @min_ts, long @ts, long @max_ts, int @flags); - private static avformat_seek_file_delegate avformat_seek_file_fptr = (AVFormatContext* @s, int @stream_index, long @min_ts, long @ts, long @max_ts, int @flags) => - { - avformat_seek_file_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_seek_file"); - if (avformat_seek_file_fptr == null) - { - avformat_seek_file_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_seek_file")); - }; - } - return avformat_seek_file_fptr(@s, @stream_index, @min_ts, @ts, @max_ts, @flags); - }; - /// Seek to timestamp ts. Seeking will be done so that the point from which all active streams can be presented successfully will be closest to ts and within min/max_ts. Active streams are all streams that have AVStream.discard < AVDISCARD_ALL. - /// media file handle - /// index of the stream which is used as time base reference - /// smallest acceptable timestamp - /// target timestamp - /// largest acceptable timestamp - /// flags - /// >=0 on success, error code otherwise - public static int avformat_seek_file(AVFormatContext* @s, int @stream_index, long @min_ts, long @ts, long @max_ts, int @flags) - { - return avformat_seek_file_fptr(@s, @stream_index, @min_ts, @ts, @max_ts, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avformat_transfer_internal_stream_timing_info_delegate(AVOutputFormat* @ofmt, AVStream* @ost, AVStream* @ist, AVTimebaseSource @copy_tb); - private static avformat_transfer_internal_stream_timing_info_delegate avformat_transfer_internal_stream_timing_info_fptr = (AVOutputFormat* @ofmt, AVStream* @ost, AVStream* @ist, AVTimebaseSource @copy_tb) => - { - avformat_transfer_internal_stream_timing_info_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_transfer_internal_stream_timing_info"); - if (avformat_transfer_internal_stream_timing_info_fptr == null) - { - avformat_transfer_internal_stream_timing_info_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_transfer_internal_stream_timing_info")); - }; - } - return avformat_transfer_internal_stream_timing_info_fptr(@ofmt, @ost, @ist, @copy_tb); - }; - /// Transfer internal timing information from one stream to another. - /// target output format for ost - /// output stream which needs timings copy and adjustments - /// reference input stream to copy timings from - /// define from where the stream codec timebase needs to be imported - public static int avformat_transfer_internal_stream_timing_info(AVOutputFormat* @ofmt, AVStream* @ost, AVStream* @ist, AVTimebaseSource @copy_tb) - { - return avformat_transfer_internal_stream_timing_info_fptr(@ofmt, @ost, @ist, @copy_tb); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint avformat_version_delegate(); - private static avformat_version_delegate avformat_version_fptr = () => - { - avformat_version_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_version"); - if (avformat_version_fptr == null) - { - avformat_version_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_version")); - }; - } - return avformat_version_fptr(); - }; - /// Return the LIBAVFORMAT_VERSION_INT constant. - public static uint avformat_version() - { - return avformat_version_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avformat_write_header_delegate(AVFormatContext* @s, AVDictionary** @options); - private static avformat_write_header_delegate avformat_write_header_fptr = (AVFormatContext* @s, AVDictionary** @options) => - { - avformat_write_header_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avformat_write_header"); - if (avformat_write_header_fptr == null) - { - avformat_write_header_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avformat_write_header")); - }; - } - return avformat_write_header_fptr(@s, @options); - }; - /// Allocate the stream private data and write the stream header to an output media file. - /// Media file handle, must be allocated with avformat_alloc_context(). Its oformat field must be set to the desired output format; Its pb field must be set to an already opened AVIOContext. - /// An AVDictionary filled with AVFormatContext and muxer-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. - /// AVSTREAM_INIT_IN_WRITE_HEADER on success if the codec had not already been fully initialized in avformat_init, AVSTREAM_INIT_IN_INIT_OUTPUT on success if the codec had already been fully initialized in avformat_init, negative AVERROR on failure. - public static int avformat_write_header(AVFormatContext* @s, AVDictionary** @options) - { - return avformat_write_header_fptr(@s, @options); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_accept_delegate(AVIOContext* @s, AVIOContext** @c); - private static avio_accept_delegate avio_accept_fptr = (AVIOContext* @s, AVIOContext** @c) => - { - avio_accept_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_accept"); - if (avio_accept_fptr == null) - { - avio_accept_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_accept")); - }; - } - return avio_accept_fptr(@s, @c); - }; - /// Accept and allocate a client context on a server context. - /// the server context - /// the client context, must be unallocated - /// >= 0 on success or a negative value corresponding to an AVERROR on failure - public static int avio_accept(AVIOContext* @s, AVIOContext** @c) - { - return avio_accept_fptr(@s, @c); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVIOContext* avio_alloc_context_delegate(byte* @buffer, int @buffer_size, int @write_flag, void* @opaque, avio_alloc_context_read_packet_func @read_packet, avio_alloc_context_write_packet_func @write_packet, avio_alloc_context_seek_func @seek); - private static avio_alloc_context_delegate avio_alloc_context_fptr = (byte* @buffer, int @buffer_size, int @write_flag, void* @opaque, avio_alloc_context_read_packet_func @read_packet, avio_alloc_context_write_packet_func @write_packet, avio_alloc_context_seek_func @seek) => - { - avio_alloc_context_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_alloc_context"); - if (avio_alloc_context_fptr == null) - { - avio_alloc_context_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_alloc_context")); - }; - } - return avio_alloc_context_fptr(@buffer, @buffer_size, @write_flag, @opaque, @read_packet, @write_packet, @seek); - }; - /// Allocate and initialize an AVIOContext for buffered I/O. It must be later freed with avio_context_free(). - /// Memory block for input/output operations via AVIOContext. The buffer must be allocated with av_malloc() and friends. It may be freed and replaced with a new buffer by libavformat. AVIOContext.buffer holds the buffer currently in use, which must be later freed with av_free(). - /// The buffer size is very important for performance. For protocols with fixed blocksize it should be set to this blocksize. For others a typical size is a cache page, e.g. 4kb. - /// Set to 1 if the buffer should be writable, 0 otherwise. - /// An opaque pointer to user-specific data. - /// A function for refilling the buffer, may be NULL. For stream protocols, must never return 0 but rather a proper AVERROR code. - /// A function for writing the buffer contents, may be NULL. The function may not change the input buffers content. - /// A function for seeking to specified byte position, may be NULL. - /// Allocated AVIOContext or NULL on failure. - public static AVIOContext* avio_alloc_context(byte* @buffer, int @buffer_size, int @write_flag, void* @opaque, avio_alloc_context_read_packet_func @read_packet, avio_alloc_context_write_packet_func @write_packet, avio_alloc_context_seek_func @seek) - { - return avio_alloc_context_fptr(@buffer, @buffer_size, @write_flag, @opaque, @read_packet, @write_packet, @seek); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_check_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url, int @flags); - private static avio_check_delegate avio_check_fptr = (string @url, int @flags) => - { - avio_check_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_check"); - if (avio_check_fptr == null) - { - avio_check_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_check")); - }; - } - return avio_check_fptr(@url, @flags); - }; - /// Return AVIO_FLAG_* access flags corresponding to the access permissions of the resource in url, or a negative value corresponding to an AVERROR code in case of failure. The returned access flags are masked by the value in flags. - public static int avio_check( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url, int @flags) - { - return avio_check_fptr(@url, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_close_delegate(AVIOContext* @s); - private static avio_close_delegate avio_close_fptr = (AVIOContext* @s) => - { - avio_close_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_close"); - if (avio_close_fptr == null) - { - avio_close_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_close")); - }; - } - return avio_close_fptr(@s); - }; - /// Close the resource accessed by the AVIOContext s and free it. This function can only be used if s was opened by avio_open(). - /// 0 on success, an AVERROR < 0 on error. - public static int avio_close(AVIOContext* @s) - { - return avio_close_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_close_dir_delegate(AVIODirContext** @s); - private static avio_close_dir_delegate avio_close_dir_fptr = (AVIODirContext** @s) => - { - avio_close_dir_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_close_dir"); - if (avio_close_dir_fptr == null) - { - avio_close_dir_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_close_dir")); - }; - } - return avio_close_dir_fptr(@s); - }; - /// Close directory. - /// directory read context. - /// >=0 on success or negative on error. - public static int avio_close_dir(AVIODirContext** @s) - { - return avio_close_dir_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_close_dyn_buf_delegate(AVIOContext* @s, byte** @pbuffer); - private static avio_close_dyn_buf_delegate avio_close_dyn_buf_fptr = (AVIOContext* @s, byte** @pbuffer) => - { - avio_close_dyn_buf_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_close_dyn_buf"); - if (avio_close_dyn_buf_fptr == null) - { - avio_close_dyn_buf_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_close_dyn_buf")); - }; - } - return avio_close_dyn_buf_fptr(@s, @pbuffer); - }; - /// Return the written size and a pointer to the buffer. The buffer must be freed with av_free(). Padding of AV_INPUT_BUFFER_PADDING_SIZE is added to the buffer. - /// IO context - /// pointer to a byte buffer - /// the length of the byte buffer - public static int avio_close_dyn_buf(AVIOContext* @s, byte** @pbuffer) - { - return avio_close_dyn_buf_fptr(@s, @pbuffer); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_closep_delegate(AVIOContext** @s); - private static avio_closep_delegate avio_closep_fptr = (AVIOContext** @s) => - { - avio_closep_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_closep"); - if (avio_closep_fptr == null) - { - avio_closep_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_closep")); - }; - } - return avio_closep_fptr(@s); - }; - /// Close the resource accessed by the AVIOContext *s, free it and set the pointer pointing to it to NULL. This function can only be used if s was opened by avio_open(). - /// 0 on success, an AVERROR < 0 on error. - public static int avio_closep(AVIOContext** @s) - { - return avio_closep_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avio_context_free_delegate(AVIOContext** @s); - private static avio_context_free_delegate avio_context_free_fptr = (AVIOContext** @s) => - { - avio_context_free_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_context_free"); - if (avio_context_free_fptr == null) - { - avio_context_free_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_context_free")); - }; - } - avio_context_free_fptr(@s); - }; - /// Free the supplied IO context and everything associated with it. - /// Double pointer to the IO context. This function will write NULL into s. - public static void avio_context_free(AVIOContext** @s) - { - avio_context_free_fptr(@s); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string avio_enum_protocols_delegate(void** @opaque, int @output); - private static avio_enum_protocols_delegate avio_enum_protocols_fptr = (void** @opaque, int @output) => - { - avio_enum_protocols_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_enum_protocols"); - if (avio_enum_protocols_fptr == null) - { - avio_enum_protocols_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_enum_protocols")); - }; - } - return avio_enum_protocols_fptr(@opaque, @output); - }; - /// Iterate through names of available protocols. - /// A private pointer representing current protocol. It must be a pointer to NULL on first iteration and will be updated by successive calls to avio_enum_protocols. - /// If set to 1, iterate over output protocols, otherwise over input protocols. - /// A static string containing the name of current protocol or NULL - public static string avio_enum_protocols(void** @opaque, int @output) - { - return avio_enum_protocols_fptr(@opaque, @output); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_feof_delegate(AVIOContext* @s); - private static avio_feof_delegate avio_feof_fptr = (AVIOContext* @s) => - { - avio_feof_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_feof"); - if (avio_feof_fptr == null) - { - avio_feof_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_feof")); - }; - } - return avio_feof_fptr(@s); - }; - /// Similar to feof() but also returns nonzero on read errors. - /// non zero if and only if at end of file or a read error happened when reading. - public static int avio_feof(AVIOContext* @s) - { - return avio_feof_fptr(@s); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string avio_find_protocol_name_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url); - private static avio_find_protocol_name_delegate avio_find_protocol_name_fptr = (string @url) => - { - avio_find_protocol_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_find_protocol_name"); - if (avio_find_protocol_name_fptr == null) - { - avio_find_protocol_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_find_protocol_name")); - }; - } - return avio_find_protocol_name_fptr(@url); - }; - /// Return the name of the protocol that will handle the passed URL. - /// Name of the protocol or NULL. - public static string avio_find_protocol_name( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url) - { - return avio_find_protocol_name_fptr(@url); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avio_flush_delegate(AVIOContext* @s); - private static avio_flush_delegate avio_flush_fptr = (AVIOContext* @s) => - { - avio_flush_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_flush"); - if (avio_flush_fptr == null) - { - avio_flush_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_flush")); - }; - } - avio_flush_fptr(@s); - }; - /// Force flushing of buffered data. - public static void avio_flush(AVIOContext* @s) - { - avio_flush_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avio_free_directory_entry_delegate(AVIODirEntry** @entry); - private static avio_free_directory_entry_delegate avio_free_directory_entry_fptr = (AVIODirEntry** @entry) => - { - avio_free_directory_entry_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_free_directory_entry"); - if (avio_free_directory_entry_fptr == null) - { - avio_free_directory_entry_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_free_directory_entry")); - }; - } - avio_free_directory_entry_fptr(@entry); - }; - /// Free entry allocated by avio_read_dir(). - /// entry to be freed. - public static void avio_free_directory_entry(AVIODirEntry** @entry) - { - avio_free_directory_entry_fptr(@entry); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_get_dyn_buf_delegate(AVIOContext* @s, byte** @pbuffer); - private static avio_get_dyn_buf_delegate avio_get_dyn_buf_fptr = (AVIOContext* @s, byte** @pbuffer) => - { - avio_get_dyn_buf_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_get_dyn_buf"); - if (avio_get_dyn_buf_fptr == null) - { - avio_get_dyn_buf_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_get_dyn_buf")); - }; - } - return avio_get_dyn_buf_fptr(@s, @pbuffer); - }; - /// Return the written size and a pointer to the buffer. The AVIOContext stream is left intact. The buffer must NOT be freed. No padding is added to the buffer. - /// IO context - /// pointer to a byte buffer - /// the length of the byte buffer - public static int avio_get_dyn_buf(AVIOContext* @s, byte** @pbuffer) - { - return avio_get_dyn_buf_fptr(@s, @pbuffer); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_get_str_delegate(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen); - private static avio_get_str_delegate avio_get_str_fptr = (AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen) => - { - avio_get_str_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_get_str"); - if (avio_get_str_fptr == null) - { - avio_get_str_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_get_str")); - }; - } - return avio_get_str_fptr(@pb, @maxlen, @buf, @buflen); - }; - /// Read a string from pb into buf. The reading will terminate when either a NULL character was encountered, maxlen bytes have been read, or nothing more can be read from pb. The result is guaranteed to be NULL-terminated, it will be truncated if buf is too small. Note that the string is not interpreted or validated in any way, it might get truncated in the middle of a sequence for multi-byte encodings. - /// number of bytes read (is always < = maxlen). If reading ends on EOF or error, the return value will be one more than bytes actually read. - public static int avio_get_str(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen) - { - return avio_get_str_fptr(@pb, @maxlen, @buf, @buflen); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_get_str16be_delegate(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen); - private static avio_get_str16be_delegate avio_get_str16be_fptr = (AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen) => - { - avio_get_str16be_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_get_str16be"); - if (avio_get_str16be_fptr == null) - { - avio_get_str16be_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_get_str16be")); - }; - } - return avio_get_str16be_fptr(@pb, @maxlen, @buf, @buflen); - }; - public static int avio_get_str16be(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen) - { - return avio_get_str16be_fptr(@pb, @maxlen, @buf, @buflen); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_get_str16le_delegate(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen); - private static avio_get_str16le_delegate avio_get_str16le_fptr = (AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen) => - { - avio_get_str16le_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_get_str16le"); - if (avio_get_str16le_fptr == null) - { - avio_get_str16le_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_get_str16le")); - }; - } - return avio_get_str16le_fptr(@pb, @maxlen, @buf, @buflen); - }; - /// Read a UTF-16 string from pb and convert it to UTF-8. The reading will terminate when either a null or invalid character was encountered or maxlen bytes have been read. - /// number of bytes read (is always < = maxlen) - public static int avio_get_str16le(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen) - { - return avio_get_str16le_fptr(@pb, @maxlen, @buf, @buflen); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_handshake_delegate(AVIOContext* @c); - private static avio_handshake_delegate avio_handshake_fptr = (AVIOContext* @c) => - { - avio_handshake_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_handshake"); - if (avio_handshake_fptr == null) - { - avio_handshake_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_handshake")); - }; - } - return avio_handshake_fptr(@c); - }; - /// Perform one step of the protocol handshake to accept a new client. This function must be called on a client returned by avio_accept() before using it as a read/write context. It is separate from avio_accept() because it may block. A step of the handshake is defined by places where the application may decide to change the proceedings. For example, on a protocol with a request header and a reply header, each one can constitute a step because the application may use the parameters from the request to change parameters in the reply; or each individual chunk of the request can constitute a step. If the handshake is already finished, avio_handshake() does nothing and returns 0 immediately. - /// the client context to perform the handshake on - /// 0 on a complete and successful handshake > 0 if the handshake progressed, but is not complete < 0 for an AVERROR code - public static int avio_handshake(AVIOContext* @c) - { - return avio_handshake_fptr(@c); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_open_delegate(AVIOContext** @s, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url, int @flags); - private static avio_open_delegate avio_open_fptr = (AVIOContext** @s, string @url, int @flags) => - { - avio_open_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_open"); - if (avio_open_fptr == null) - { - avio_open_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_open")); - }; - } - return avio_open_fptr(@s, @url, @flags); - }; - /// Create and initialize a AVIOContext for accessing the resource indicated by url. - /// Used to return the pointer to the created AVIOContext. In case of failure the pointed to value is set to NULL. - /// resource to access - /// flags which control how the resource indicated by url is to be opened - /// >= 0 in case of success, a negative value corresponding to an AVERROR code in case of failure - public static int avio_open(AVIOContext** @s, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url, int @flags) - { - return avio_open_fptr(@s, @url, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_open_dir_delegate(AVIODirContext** @s, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url, AVDictionary** @options); - private static avio_open_dir_delegate avio_open_dir_fptr = (AVIODirContext** @s, string @url, AVDictionary** @options) => - { - avio_open_dir_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_open_dir"); - if (avio_open_dir_fptr == null) - { - avio_open_dir_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_open_dir")); - }; - } - return avio_open_dir_fptr(@s, @url, @options); - }; - /// Open directory for reading. - /// directory read context. Pointer to a NULL pointer must be passed. - /// directory to be listed. - /// A dictionary filled with protocol-private options. On return this parameter will be destroyed and replaced with a dictionary containing options that were not found. May be NULL. - /// >=0 on success or negative on error. - public static int avio_open_dir(AVIODirContext** @s, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url, AVDictionary** @options) - { - return avio_open_dir_fptr(@s, @url, @options); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_open_dyn_buf_delegate(AVIOContext** @s); - private static avio_open_dyn_buf_delegate avio_open_dyn_buf_fptr = (AVIOContext** @s) => - { - avio_open_dyn_buf_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_open_dyn_buf"); - if (avio_open_dyn_buf_fptr == null) - { - avio_open_dyn_buf_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_open_dyn_buf")); - }; - } - return avio_open_dyn_buf_fptr(@s); - }; - /// Open a write only memory stream. - /// new IO context - /// zero if no error. - public static int avio_open_dyn_buf(AVIOContext** @s) - { - return avio_open_dyn_buf_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_open2_delegate(AVIOContext** @s, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url, int @flags, AVIOInterruptCB* @int_cb, AVDictionary** @options); - private static avio_open2_delegate avio_open2_fptr = (AVIOContext** @s, string @url, int @flags, AVIOInterruptCB* @int_cb, AVDictionary** @options) => - { - avio_open2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_open2"); - if (avio_open2_fptr == null) - { - avio_open2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_open2")); - }; - } - return avio_open2_fptr(@s, @url, @flags, @int_cb, @options); - }; - /// Create and initialize a AVIOContext for accessing the resource indicated by url. - /// Used to return the pointer to the created AVIOContext. In case of failure the pointed to value is set to NULL. - /// resource to access - /// flags which control how the resource indicated by url is to be opened - /// an interrupt callback to be used at the protocols level - /// A dictionary filled with protocol-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. - /// >= 0 in case of success, a negative value corresponding to an AVERROR code in case of failure - public static int avio_open2(AVIOContext** @s, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @url, int @flags, AVIOInterruptCB* @int_cb, AVDictionary** @options) - { - return avio_open2_fptr(@s, @url, @flags, @int_cb, @options); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_pause_delegate(AVIOContext* @h, int @pause); - private static avio_pause_delegate avio_pause_fptr = (AVIOContext* @h, int @pause) => - { - avio_pause_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_pause"); - if (avio_pause_fptr == null) - { - avio_pause_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_pause")); - }; - } - return avio_pause_fptr(@h, @pause); - }; - /// Pause and resume playing - only meaningful if using a network streaming protocol (e.g. MMS). - /// IO context from which to call the read_pause function pointer - /// 1 for pause, 0 for resume - public static int avio_pause(AVIOContext* @h, int @pause) - { - return avio_pause_fptr(@h, @pause); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avio_print_string_array_delegate(AVIOContext* @s, byte*[] @strings); - private static avio_print_string_array_delegate avio_print_string_array_fptr = (AVIOContext* @s, byte*[] @strings) => - { - avio_print_string_array_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_print_string_array"); - if (avio_print_string_array_fptr == null) - { - avio_print_string_array_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_print_string_array")); - }; - } - avio_print_string_array_fptr(@s, @strings); - }; - /// Write a NULL terminated array of strings to the context. Usually you don't need to use this function directly but its macro wrapper, avio_print. - public static void avio_print_string_array(AVIOContext* @s, byte*[] @strings) - { - avio_print_string_array_fptr(@s, @strings); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_printf_delegate(AVIOContext* @s, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @fmt); - private static avio_printf_delegate avio_printf_fptr = (AVIOContext* @s, string @fmt) => - { - avio_printf_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_printf"); - if (avio_printf_fptr == null) - { - avio_printf_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_printf")); - }; - } - return avio_printf_fptr(@s, @fmt); - }; - /// Writes a formatted string to the context. - /// number of bytes written, < 0 on error. - public static int avio_printf(AVIOContext* @s, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @fmt) - { - return avio_printf_fptr(@s, @fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVClass* avio_protocol_get_class_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name); - private static avio_protocol_get_class_delegate avio_protocol_get_class_fptr = (string @name) => - { - avio_protocol_get_class_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_protocol_get_class"); - if (avio_protocol_get_class_fptr == null) - { - avio_protocol_get_class_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_protocol_get_class")); - }; - } - return avio_protocol_get_class_fptr(@name); - }; - /// Get AVClass by names of available protocols. - /// A AVClass of input protocol name or NULL - public static AVClass* avio_protocol_get_class( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name) - { - return avio_protocol_get_class_fptr(@name); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_put_str_delegate(AVIOContext* @s, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @str); - private static avio_put_str_delegate avio_put_str_fptr = (AVIOContext* @s, string @str) => - { - avio_put_str_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_put_str"); - if (avio_put_str_fptr == null) - { - avio_put_str_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_put_str")); - }; - } - return avio_put_str_fptr(@s, @str); - }; - /// Write a NULL-terminated string. - /// number of bytes written. - public static int avio_put_str(AVIOContext* @s, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @str) - { - return avio_put_str_fptr(@s, @str); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_put_str16be_delegate(AVIOContext* @s, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @str); - private static avio_put_str16be_delegate avio_put_str16be_fptr = (AVIOContext* @s, string @str) => - { - avio_put_str16be_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_put_str16be"); - if (avio_put_str16be_fptr == null) - { - avio_put_str16be_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_put_str16be")); - }; - } - return avio_put_str16be_fptr(@s, @str); - }; - /// Convert an UTF-8 string to UTF-16BE and write it. - /// the AVIOContext - /// NULL-terminated UTF-8 string - /// number of bytes written. - public static int avio_put_str16be(AVIOContext* @s, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @str) - { - return avio_put_str16be_fptr(@s, @str); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_put_str16le_delegate(AVIOContext* @s, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @str); - private static avio_put_str16le_delegate avio_put_str16le_fptr = (AVIOContext* @s, string @str) => - { - avio_put_str16le_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_put_str16le"); - if (avio_put_str16le_fptr == null) - { - avio_put_str16le_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_put_str16le")); - }; - } - return avio_put_str16le_fptr(@s, @str); - }; - /// Convert an UTF-8 string to UTF-16LE and write it. - /// the AVIOContext - /// NULL-terminated UTF-8 string - /// number of bytes written. - public static int avio_put_str16le(AVIOContext* @s, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @str) - { - return avio_put_str16le_fptr(@s, @str); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_r8_delegate(AVIOContext* @s); - private static avio_r8_delegate avio_r8_fptr = (AVIOContext* @s) => - { - avio_r8_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_r8"); - if (avio_r8_fptr == null) - { - avio_r8_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_r8")); - }; - } - return avio_r8_fptr(@s); - }; - /// @{ - public static int avio_r8(AVIOContext* @s) - { - return avio_r8_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint avio_rb16_delegate(AVIOContext* @s); - private static avio_rb16_delegate avio_rb16_fptr = (AVIOContext* @s) => - { - avio_rb16_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_rb16"); - if (avio_rb16_fptr == null) - { - avio_rb16_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_rb16")); - }; - } - return avio_rb16_fptr(@s); - }; - public static uint avio_rb16(AVIOContext* @s) - { - return avio_rb16_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint avio_rb24_delegate(AVIOContext* @s); - private static avio_rb24_delegate avio_rb24_fptr = (AVIOContext* @s) => - { - avio_rb24_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_rb24"); - if (avio_rb24_fptr == null) - { - avio_rb24_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_rb24")); - }; - } - return avio_rb24_fptr(@s); - }; - public static uint avio_rb24(AVIOContext* @s) - { - return avio_rb24_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint avio_rb32_delegate(AVIOContext* @s); - private static avio_rb32_delegate avio_rb32_fptr = (AVIOContext* @s) => - { - avio_rb32_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_rb32"); - if (avio_rb32_fptr == null) - { - avio_rb32_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_rb32")); - }; - } - return avio_rb32_fptr(@s); - }; - public static uint avio_rb32(AVIOContext* @s) - { - return avio_rb32_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate ulong avio_rb64_delegate(AVIOContext* @s); - private static avio_rb64_delegate avio_rb64_fptr = (AVIOContext* @s) => - { - avio_rb64_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_rb64"); - if (avio_rb64_fptr == null) - { - avio_rb64_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_rb64")); - }; - } - return avio_rb64_fptr(@s); - }; - public static ulong avio_rb64(AVIOContext* @s) - { - return avio_rb64_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_read_delegate(AVIOContext* @s, byte* @buf, int @size); - private static avio_read_delegate avio_read_fptr = (AVIOContext* @s, byte* @buf, int @size) => - { - avio_read_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_read"); - if (avio_read_fptr == null) - { - avio_read_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_read")); - }; - } - return avio_read_fptr(@s, @buf, @size); - }; - /// Read size bytes from AVIOContext into buf. - /// number of bytes read or AVERROR - public static int avio_read(AVIOContext* @s, byte* @buf, int @size) - { - return avio_read_fptr(@s, @buf, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_read_dir_delegate(AVIODirContext* @s, AVIODirEntry** @next); - private static avio_read_dir_delegate avio_read_dir_fptr = (AVIODirContext* @s, AVIODirEntry** @next) => - { - avio_read_dir_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_read_dir"); - if (avio_read_dir_fptr == null) - { - avio_read_dir_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_read_dir")); - }; - } - return avio_read_dir_fptr(@s, @next); - }; - /// Get next directory entry. - /// directory read context. - /// next entry or NULL when no more entries. - /// >=0 on success or negative on error. End of list is not considered an error. - public static int avio_read_dir(AVIODirContext* @s, AVIODirEntry** @next) - { - return avio_read_dir_fptr(@s, @next); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_read_partial_delegate(AVIOContext* @s, byte* @buf, int @size); - private static avio_read_partial_delegate avio_read_partial_fptr = (AVIOContext* @s, byte* @buf, int @size) => - { - avio_read_partial_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_read_partial"); - if (avio_read_partial_fptr == null) - { - avio_read_partial_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_read_partial")); - }; - } - return avio_read_partial_fptr(@s, @buf, @size); - }; - /// Read size bytes from AVIOContext into buf. Unlike avio_read(), this is allowed to read fewer bytes than requested. The missing bytes can be read in the next call. This always tries to read at least 1 byte. Useful to reduce latency in certain cases. - /// number of bytes read or AVERROR - public static int avio_read_partial(AVIOContext* @s, byte* @buf, int @size) - { - return avio_read_partial_fptr(@s, @buf, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_read_to_bprint_delegate(AVIOContext* @h, AVBPrint* @pb, ulong @max_size); - private static avio_read_to_bprint_delegate avio_read_to_bprint_fptr = (AVIOContext* @h, AVBPrint* @pb, ulong @max_size) => - { - avio_read_to_bprint_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_read_to_bprint"); - if (avio_read_to_bprint_fptr == null) - { - avio_read_to_bprint_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_read_to_bprint")); - }; - } - return avio_read_to_bprint_fptr(@h, @pb, @max_size); - }; - /// Read contents of h into print buffer, up to max_size bytes, or up to EOF. - /// 0 for success (max_size bytes read or EOF reached), negative error code otherwise - public static int avio_read_to_bprint(AVIOContext* @h, AVBPrint* @pb, ulong @max_size) - { - return avio_read_to_bprint_fptr(@h, @pb, @max_size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint avio_rl16_delegate(AVIOContext* @s); - private static avio_rl16_delegate avio_rl16_fptr = (AVIOContext* @s) => - { - avio_rl16_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_rl16"); - if (avio_rl16_fptr == null) - { - avio_rl16_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_rl16")); - }; - } - return avio_rl16_fptr(@s); - }; - public static uint avio_rl16(AVIOContext* @s) - { - return avio_rl16_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint avio_rl24_delegate(AVIOContext* @s); - private static avio_rl24_delegate avio_rl24_fptr = (AVIOContext* @s) => - { - avio_rl24_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_rl24"); - if (avio_rl24_fptr == null) - { - avio_rl24_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_rl24")); - }; - } - return avio_rl24_fptr(@s); - }; - public static uint avio_rl24(AVIOContext* @s) - { - return avio_rl24_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint avio_rl32_delegate(AVIOContext* @s); - private static avio_rl32_delegate avio_rl32_fptr = (AVIOContext* @s) => - { - avio_rl32_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_rl32"); - if (avio_rl32_fptr == null) - { - avio_rl32_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_rl32")); - }; - } - return avio_rl32_fptr(@s); - }; - public static uint avio_rl32(AVIOContext* @s) - { - return avio_rl32_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate ulong avio_rl64_delegate(AVIOContext* @s); - private static avio_rl64_delegate avio_rl64_fptr = (AVIOContext* @s) => - { - avio_rl64_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_rl64"); - if (avio_rl64_fptr == null) - { - avio_rl64_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_rl64")); - }; - } - return avio_rl64_fptr(@s); - }; - public static ulong avio_rl64(AVIOContext* @s) - { - return avio_rl64_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate long avio_seek_delegate(AVIOContext* @s, long @offset, int @whence); - private static avio_seek_delegate avio_seek_fptr = (AVIOContext* @s, long @offset, int @whence) => - { - avio_seek_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_seek"); - if (avio_seek_fptr == null) - { - avio_seek_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_seek")); - }; - } - return avio_seek_fptr(@s, @offset, @whence); - }; - /// fseek() equivalent for AVIOContext. - /// new position or AVERROR. - public static long avio_seek(AVIOContext* @s, long @offset, int @whence) - { - return avio_seek_fptr(@s, @offset, @whence); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate long avio_seek_time_delegate(AVIOContext* @h, int @stream_index, long @timestamp, int @flags); - private static avio_seek_time_delegate avio_seek_time_fptr = (AVIOContext* @h, int @stream_index, long @timestamp, int @flags) => - { - avio_seek_time_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_seek_time"); - if (avio_seek_time_fptr == null) - { - avio_seek_time_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_seek_time")); - }; - } - return avio_seek_time_fptr(@h, @stream_index, @timestamp, @flags); - }; - /// Seek to a given timestamp relative to some component stream. Only meaningful if using a network streaming protocol (e.g. MMS.). - /// IO context from which to call the seek function pointers - /// The stream index that the timestamp is relative to. If stream_index is (-1) the timestamp should be in AV_TIME_BASE units from the beginning of the presentation. If a stream_index >= 0 is used and the protocol does not support seeking based on component streams, the call will fail. - /// timestamp in AVStream.time_base units or if there is no stream specified then in AV_TIME_BASE units. - /// Optional combination of AVSEEK_FLAG_BACKWARD, AVSEEK_FLAG_BYTE and AVSEEK_FLAG_ANY. The protocol may silently ignore AVSEEK_FLAG_BACKWARD and AVSEEK_FLAG_ANY, but AVSEEK_FLAG_BYTE will fail if used and not supported. - /// >= 0 on success - public static long avio_seek_time(AVIOContext* @h, int @stream_index, long @timestamp, int @flags) - { - return avio_seek_time_fptr(@h, @stream_index, @timestamp, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate long avio_size_delegate(AVIOContext* @s); - private static avio_size_delegate avio_size_fptr = (AVIOContext* @s) => - { - avio_size_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_size"); - if (avio_size_fptr == null) - { - avio_size_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_size")); - }; - } - return avio_size_fptr(@s); - }; - /// Get the filesize. - /// filesize or AVERROR - public static long avio_size(AVIOContext* @s) - { - return avio_size_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate long avio_skip_delegate(AVIOContext* @s, long @offset); - private static avio_skip_delegate avio_skip_fptr = (AVIOContext* @s, long @offset) => - { - avio_skip_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_skip"); - if (avio_skip_fptr == null) - { - avio_skip_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_skip")); - }; - } - return avio_skip_fptr(@s, @offset); - }; - /// Skip given number of bytes forward - /// new position or AVERROR. - public static long avio_skip(AVIOContext* @s, long @offset) - { - return avio_skip_fptr(@s, @offset); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int avio_vprintf_delegate(AVIOContext* @s, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @fmt, byte* @ap); - private static avio_vprintf_delegate avio_vprintf_fptr = (AVIOContext* @s, string @fmt, byte* @ap) => - { - avio_vprintf_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_vprintf"); - if (avio_vprintf_fptr == null) - { - avio_vprintf_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_vprintf")); - }; - } - return avio_vprintf_fptr(@s, @fmt, @ap); - }; - /// Writes a formatted string to the context taking a va_list. - /// number of bytes written, < 0 on error. - public static int avio_vprintf(AVIOContext* @s, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @fmt, byte* @ap) - { - return avio_vprintf_fptr(@s, @fmt, @ap); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avio_w8_delegate(AVIOContext* @s, int @b); - private static avio_w8_delegate avio_w8_fptr = (AVIOContext* @s, int @b) => - { - avio_w8_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_w8"); - if (avio_w8_fptr == null) - { - avio_w8_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_w8")); - }; - } - avio_w8_fptr(@s, @b); - }; - public static void avio_w8(AVIOContext* @s, int @b) - { - avio_w8_fptr(@s, @b); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avio_wb16_delegate(AVIOContext* @s, uint @val); - private static avio_wb16_delegate avio_wb16_fptr = (AVIOContext* @s, uint @val) => - { - avio_wb16_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_wb16"); - if (avio_wb16_fptr == null) - { - avio_wb16_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_wb16")); - }; - } - avio_wb16_fptr(@s, @val); - }; - public static void avio_wb16(AVIOContext* @s, uint @val) - { - avio_wb16_fptr(@s, @val); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avio_wb24_delegate(AVIOContext* @s, uint @val); - private static avio_wb24_delegate avio_wb24_fptr = (AVIOContext* @s, uint @val) => - { - avio_wb24_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_wb24"); - if (avio_wb24_fptr == null) - { - avio_wb24_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_wb24")); - }; - } - avio_wb24_fptr(@s, @val); - }; - public static void avio_wb24(AVIOContext* @s, uint @val) - { - avio_wb24_fptr(@s, @val); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avio_wb32_delegate(AVIOContext* @s, uint @val); - private static avio_wb32_delegate avio_wb32_fptr = (AVIOContext* @s, uint @val) => - { - avio_wb32_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_wb32"); - if (avio_wb32_fptr == null) - { - avio_wb32_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_wb32")); - }; - } - avio_wb32_fptr(@s, @val); - }; - public static void avio_wb32(AVIOContext* @s, uint @val) - { - avio_wb32_fptr(@s, @val); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avio_wb64_delegate(AVIOContext* @s, ulong @val); - private static avio_wb64_delegate avio_wb64_fptr = (AVIOContext* @s, ulong @val) => - { - avio_wb64_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_wb64"); - if (avio_wb64_fptr == null) - { - avio_wb64_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_wb64")); - }; - } - avio_wb64_fptr(@s, @val); - }; - public static void avio_wb64(AVIOContext* @s, ulong @val) - { - avio_wb64_fptr(@s, @val); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avio_wl16_delegate(AVIOContext* @s, uint @val); - private static avio_wl16_delegate avio_wl16_fptr = (AVIOContext* @s, uint @val) => - { - avio_wl16_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_wl16"); - if (avio_wl16_fptr == null) - { - avio_wl16_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_wl16")); - }; - } - avio_wl16_fptr(@s, @val); - }; - public static void avio_wl16(AVIOContext* @s, uint @val) - { - avio_wl16_fptr(@s, @val); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avio_wl24_delegate(AVIOContext* @s, uint @val); - private static avio_wl24_delegate avio_wl24_fptr = (AVIOContext* @s, uint @val) => - { - avio_wl24_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_wl24"); - if (avio_wl24_fptr == null) - { - avio_wl24_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_wl24")); - }; - } - avio_wl24_fptr(@s, @val); - }; - public static void avio_wl24(AVIOContext* @s, uint @val) - { - avio_wl24_fptr(@s, @val); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avio_wl32_delegate(AVIOContext* @s, uint @val); - private static avio_wl32_delegate avio_wl32_fptr = (AVIOContext* @s, uint @val) => - { - avio_wl32_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_wl32"); - if (avio_wl32_fptr == null) - { - avio_wl32_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_wl32")); - }; - } - avio_wl32_fptr(@s, @val); - }; - public static void avio_wl32(AVIOContext* @s, uint @val) - { - avio_wl32_fptr(@s, @val); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avio_wl64_delegate(AVIOContext* @s, ulong @val); - private static avio_wl64_delegate avio_wl64_fptr = (AVIOContext* @s, ulong @val) => - { - avio_wl64_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_wl64"); - if (avio_wl64_fptr == null) - { - avio_wl64_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_wl64")); - }; - } - avio_wl64_fptr(@s, @val); - }; - public static void avio_wl64(AVIOContext* @s, ulong @val) - { - avio_wl64_fptr(@s, @val); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avio_write_delegate(AVIOContext* @s, byte* @buf, int @size); - private static avio_write_delegate avio_write_fptr = (AVIOContext* @s, byte* @buf, int @size) => - { - avio_write_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_write"); - if (avio_write_fptr == null) - { - avio_write_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_write")); - }; - } - avio_write_fptr(@s, @buf, @size); - }; - public static void avio_write(AVIOContext* @s, byte* @buf, int @size) - { - avio_write_fptr(@s, @buf, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void avio_write_marker_delegate(AVIOContext* @s, long @time, AVIODataMarkerType @type); - private static avio_write_marker_delegate avio_write_marker_fptr = (AVIOContext* @s, long @time, AVIODataMarkerType @type) => - { - avio_write_marker_fptr = GetFunctionDelegate(GetOrLoadLibrary("avformat"), "avio_write_marker"); - if (avio_write_marker_fptr == null) - { - avio_write_marker_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avio_write_marker")); - }; - } - avio_write_marker_fptr(@s, @time, @type); - }; - /// Mark the written bytestream as a specific type. - /// the stream time the current bytestream pos corresponds to (in AV_TIME_BASE units), or AV_NOPTS_VALUE if unknown or not applicable - /// the kind of data written starting at the current pos - public static void avio_write_marker(AVIOContext* @s, long @time, AVIODataMarkerType @type) - { - avio_write_marker_fptr(@s, @time, @type); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVRational av_add_q_delegate(AVRational @b, AVRational @c); - private static av_add_q_delegate av_add_q_fptr = (AVRational @b, AVRational @c) => - { - av_add_q_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_add_q"); - if (av_add_q_fptr == null) - { - av_add_q_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_add_q")); - }; - } - return av_add_q_fptr(@b, @c); - }; - /// Add two rationals. - /// First rational - /// Second rational - /// b+c - public static AVRational av_add_q(AVRational @b, AVRational @c) - { - return av_add_q_fptr(@b, @c); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate long av_add_stable_delegate(AVRational @ts_tb, long @ts, AVRational @inc_tb, long @inc); - private static av_add_stable_delegate av_add_stable_fptr = (AVRational @ts_tb, long @ts, AVRational @inc_tb, long @inc) => - { - av_add_stable_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_add_stable"); - if (av_add_stable_fptr == null) - { - av_add_stable_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_add_stable")); - }; - } - return av_add_stable_fptr(@ts_tb, @ts, @inc_tb, @inc); - }; - /// Add a value to a timestamp. - /// Input timestamp time base - /// Input timestamp - /// Time base of `inc` - /// Value to be added - public static long av_add_stable(AVRational @ts_tb, long @ts, AVRational @inc_tb, long @inc) - { - return av_add_stable_fptr(@ts_tb, @ts, @inc_tb, @inc); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVAudioFifo* av_audio_fifo_alloc_delegate(AVSampleFormat @sample_fmt, int @channels, int @nb_samples); - private static av_audio_fifo_alloc_delegate av_audio_fifo_alloc_fptr = (AVSampleFormat @sample_fmt, int @channels, int @nb_samples) => - { - av_audio_fifo_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_audio_fifo_alloc"); - if (av_audio_fifo_alloc_fptr == null) - { - av_audio_fifo_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_audio_fifo_alloc")); - }; - } - return av_audio_fifo_alloc_fptr(@sample_fmt, @channels, @nb_samples); - }; - /// Allocate an AVAudioFifo. - /// sample format - /// number of channels - /// initial allocation size, in samples - /// newly allocated AVAudioFifo, or NULL on error - public static AVAudioFifo* av_audio_fifo_alloc(AVSampleFormat @sample_fmt, int @channels, int @nb_samples) - { - return av_audio_fifo_alloc_fptr(@sample_fmt, @channels, @nb_samples); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_audio_fifo_drain_delegate(AVAudioFifo* @af, int @nb_samples); - private static av_audio_fifo_drain_delegate av_audio_fifo_drain_fptr = (AVAudioFifo* @af, int @nb_samples) => - { - av_audio_fifo_drain_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_audio_fifo_drain"); - if (av_audio_fifo_drain_fptr == null) - { - av_audio_fifo_drain_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_audio_fifo_drain")); - }; - } - return av_audio_fifo_drain_fptr(@af, @nb_samples); - }; - /// Drain data from an AVAudioFifo. - /// AVAudioFifo to drain - /// number of samples to drain - /// 0 if OK, or negative AVERROR code on failure - public static int av_audio_fifo_drain(AVAudioFifo* @af, int @nb_samples) - { - return av_audio_fifo_drain_fptr(@af, @nb_samples); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_audio_fifo_free_delegate(AVAudioFifo* @af); - private static av_audio_fifo_free_delegate av_audio_fifo_free_fptr = (AVAudioFifo* @af) => - { - av_audio_fifo_free_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_audio_fifo_free"); - if (av_audio_fifo_free_fptr == null) - { - av_audio_fifo_free_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_audio_fifo_free")); - }; - } - av_audio_fifo_free_fptr(@af); - }; - /// Free an AVAudioFifo. - /// AVAudioFifo to free - public static void av_audio_fifo_free(AVAudioFifo* @af) - { - av_audio_fifo_free_fptr(@af); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_audio_fifo_peek_delegate(AVAudioFifo* @af, void** @data, int @nb_samples); - private static av_audio_fifo_peek_delegate av_audio_fifo_peek_fptr = (AVAudioFifo* @af, void** @data, int @nb_samples) => - { - av_audio_fifo_peek_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_audio_fifo_peek"); - if (av_audio_fifo_peek_fptr == null) - { - av_audio_fifo_peek_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_audio_fifo_peek")); - }; - } - return av_audio_fifo_peek_fptr(@af, @data, @nb_samples); - }; - /// Peek data from an AVAudioFifo. - /// AVAudioFifo to read from - /// audio data plane pointers - /// number of samples to peek - /// number of samples actually peek, or negative AVERROR code on failure. The number of samples actually peek will not be greater than nb_samples, and will only be less than nb_samples if av_audio_fifo_size is less than nb_samples. - public static int av_audio_fifo_peek(AVAudioFifo* @af, void** @data, int @nb_samples) - { - return av_audio_fifo_peek_fptr(@af, @data, @nb_samples); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_audio_fifo_peek_at_delegate(AVAudioFifo* @af, void** @data, int @nb_samples, int @offset); - private static av_audio_fifo_peek_at_delegate av_audio_fifo_peek_at_fptr = (AVAudioFifo* @af, void** @data, int @nb_samples, int @offset) => - { - av_audio_fifo_peek_at_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_audio_fifo_peek_at"); - if (av_audio_fifo_peek_at_fptr == null) - { - av_audio_fifo_peek_at_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_audio_fifo_peek_at")); - }; - } - return av_audio_fifo_peek_at_fptr(@af, @data, @nb_samples, @offset); - }; - /// Peek data from an AVAudioFifo. - /// AVAudioFifo to read from - /// audio data plane pointers - /// number of samples to peek - /// offset from current read position - /// number of samples actually peek, or negative AVERROR code on failure. The number of samples actually peek will not be greater than nb_samples, and will only be less than nb_samples if av_audio_fifo_size is less than nb_samples. - public static int av_audio_fifo_peek_at(AVAudioFifo* @af, void** @data, int @nb_samples, int @offset) - { - return av_audio_fifo_peek_at_fptr(@af, @data, @nb_samples, @offset); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_audio_fifo_read_delegate(AVAudioFifo* @af, void** @data, int @nb_samples); - private static av_audio_fifo_read_delegate av_audio_fifo_read_fptr = (AVAudioFifo* @af, void** @data, int @nb_samples) => - { - av_audio_fifo_read_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_audio_fifo_read"); - if (av_audio_fifo_read_fptr == null) - { - av_audio_fifo_read_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_audio_fifo_read")); - }; - } - return av_audio_fifo_read_fptr(@af, @data, @nb_samples); - }; - /// Read data from an AVAudioFifo. - /// AVAudioFifo to read from - /// audio data plane pointers - /// number of samples to read - /// number of samples actually read, or negative AVERROR code on failure. The number of samples actually read will not be greater than nb_samples, and will only be less than nb_samples if av_audio_fifo_size is less than nb_samples. - public static int av_audio_fifo_read(AVAudioFifo* @af, void** @data, int @nb_samples) - { - return av_audio_fifo_read_fptr(@af, @data, @nb_samples); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_audio_fifo_realloc_delegate(AVAudioFifo* @af, int @nb_samples); - private static av_audio_fifo_realloc_delegate av_audio_fifo_realloc_fptr = (AVAudioFifo* @af, int @nb_samples) => - { - av_audio_fifo_realloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_audio_fifo_realloc"); - if (av_audio_fifo_realloc_fptr == null) - { - av_audio_fifo_realloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_audio_fifo_realloc")); - }; - } - return av_audio_fifo_realloc_fptr(@af, @nb_samples); - }; - /// Reallocate an AVAudioFifo. - /// AVAudioFifo to reallocate - /// new allocation size, in samples - /// 0 if OK, or negative AVERROR code on failure - public static int av_audio_fifo_realloc(AVAudioFifo* @af, int @nb_samples) - { - return av_audio_fifo_realloc_fptr(@af, @nb_samples); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_audio_fifo_reset_delegate(AVAudioFifo* @af); - private static av_audio_fifo_reset_delegate av_audio_fifo_reset_fptr = (AVAudioFifo* @af) => - { - av_audio_fifo_reset_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_audio_fifo_reset"); - if (av_audio_fifo_reset_fptr == null) - { - av_audio_fifo_reset_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_audio_fifo_reset")); - }; - } - av_audio_fifo_reset_fptr(@af); - }; - /// Reset the AVAudioFifo buffer. - /// AVAudioFifo to reset - public static void av_audio_fifo_reset(AVAudioFifo* @af) - { - av_audio_fifo_reset_fptr(@af); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_audio_fifo_size_delegate(AVAudioFifo* @af); - private static av_audio_fifo_size_delegate av_audio_fifo_size_fptr = (AVAudioFifo* @af) => - { - av_audio_fifo_size_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_audio_fifo_size"); - if (av_audio_fifo_size_fptr == null) - { - av_audio_fifo_size_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_audio_fifo_size")); - }; - } - return av_audio_fifo_size_fptr(@af); - }; - /// Get the current number of samples in the AVAudioFifo available for reading. - /// the AVAudioFifo to query - /// number of samples available for reading - public static int av_audio_fifo_size(AVAudioFifo* @af) - { - return av_audio_fifo_size_fptr(@af); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_audio_fifo_space_delegate(AVAudioFifo* @af); - private static av_audio_fifo_space_delegate av_audio_fifo_space_fptr = (AVAudioFifo* @af) => - { - av_audio_fifo_space_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_audio_fifo_space"); - if (av_audio_fifo_space_fptr == null) - { - av_audio_fifo_space_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_audio_fifo_space")); - }; - } - return av_audio_fifo_space_fptr(@af); - }; - /// Get the current number of samples in the AVAudioFifo available for writing. - /// the AVAudioFifo to query - /// number of samples available for writing - public static int av_audio_fifo_space(AVAudioFifo* @af) - { - return av_audio_fifo_space_fptr(@af); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_audio_fifo_write_delegate(AVAudioFifo* @af, void** @data, int @nb_samples); - private static av_audio_fifo_write_delegate av_audio_fifo_write_fptr = (AVAudioFifo* @af, void** @data, int @nb_samples) => - { - av_audio_fifo_write_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_audio_fifo_write"); - if (av_audio_fifo_write_fptr == null) - { - av_audio_fifo_write_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_audio_fifo_write")); - }; - } - return av_audio_fifo_write_fptr(@af, @data, @nb_samples); - }; - /// Write data to an AVAudioFifo. - /// AVAudioFifo to write to - /// audio data plane pointers - /// number of samples to write - /// number of samples actually written, or negative AVERROR code on failure. If successful, the number of samples actually written will always be nb_samples. - public static int av_audio_fifo_write(AVAudioFifo* @af, void** @data, int @nb_samples) - { - return av_audio_fifo_write_fptr(@af, @data, @nb_samples); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_bprint_channel_layout_delegate(AVBPrint* @bp, int @nb_channels, ulong @channel_layout); - private static av_bprint_channel_layout_delegate av_bprint_channel_layout_fptr = (AVBPrint* @bp, int @nb_channels, ulong @channel_layout) => - { - av_bprint_channel_layout_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_bprint_channel_layout"); - if (av_bprint_channel_layout_fptr == null) - { - av_bprint_channel_layout_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_bprint_channel_layout")); - }; - } - av_bprint_channel_layout_fptr(@bp, @nb_channels, @channel_layout); - }; - /// Append a description of a channel layout to a bprint buffer. - [Obsolete("use av_channel_layout_describe()")] - public static void av_bprint_channel_layout(AVBPrint* @bp, int @nb_channels, ulong @channel_layout) - { - av_bprint_channel_layout_fptr(@bp, @nb_channels, @channel_layout); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVBufferRef* av_buffer_alloc_delegate(ulong @size); - private static av_buffer_alloc_delegate av_buffer_alloc_fptr = (ulong @size) => - { - av_buffer_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_buffer_alloc"); - if (av_buffer_alloc_fptr == null) - { - av_buffer_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffer_alloc")); - }; - } - return av_buffer_alloc_fptr(@size); - }; - /// Allocate an AVBuffer of the given size using av_malloc(). - /// an AVBufferRef of given size or NULL when out of memory - public static AVBufferRef* av_buffer_alloc(ulong @size) - { - return av_buffer_alloc_fptr(@size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVBufferRef* av_buffer_allocz_delegate(ulong @size); - private static av_buffer_allocz_delegate av_buffer_allocz_fptr = (ulong @size) => - { - av_buffer_allocz_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_buffer_allocz"); - if (av_buffer_allocz_fptr == null) - { - av_buffer_allocz_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffer_allocz")); - }; - } - return av_buffer_allocz_fptr(@size); - }; - /// Same as av_buffer_alloc(), except the returned buffer will be initialized to zero. - public static AVBufferRef* av_buffer_allocz(ulong @size) - { - return av_buffer_allocz_fptr(@size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVBufferRef* av_buffer_create_delegate(byte* @data, ulong @size, av_buffer_create_free_func @free, void* @opaque, int @flags); - private static av_buffer_create_delegate av_buffer_create_fptr = (byte* @data, ulong @size, av_buffer_create_free_func @free, void* @opaque, int @flags) => - { - av_buffer_create_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_buffer_create"); - if (av_buffer_create_fptr == null) - { - av_buffer_create_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffer_create")); - }; - } - return av_buffer_create_fptr(@data, @size, @free, @opaque, @flags); - }; - /// Create an AVBuffer from an existing array. - /// data array - /// size of data in bytes - /// a callback for freeing this buffer's data - /// parameter to be got for processing or passed to free - /// a combination of AV_BUFFER_FLAG_* - /// an AVBufferRef referring to data on success, NULL on failure. - public static AVBufferRef* av_buffer_create(byte* @data, ulong @size, av_buffer_create_free_func @free, void* @opaque, int @flags) - { - return av_buffer_create_fptr(@data, @size, @free, @opaque, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_buffer_default_free_delegate(void* @opaque, byte* @data); - private static av_buffer_default_free_delegate av_buffer_default_free_fptr = (void* @opaque, byte* @data) => - { - av_buffer_default_free_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_buffer_default_free"); - if (av_buffer_default_free_fptr == null) - { - av_buffer_default_free_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffer_default_free")); - }; - } - av_buffer_default_free_fptr(@opaque, @data); - }; - /// Default free callback, which calls av_free() on the buffer data. This function is meant to be passed to av_buffer_create(), not called directly. - public static void av_buffer_default_free(void* @opaque, byte* @data) - { - av_buffer_default_free_fptr(@opaque, @data); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void* av_buffer_get_opaque_delegate(AVBufferRef* @buf); - private static av_buffer_get_opaque_delegate av_buffer_get_opaque_fptr = (AVBufferRef* @buf) => - { - av_buffer_get_opaque_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_buffer_get_opaque"); - if (av_buffer_get_opaque_fptr == null) - { - av_buffer_get_opaque_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffer_get_opaque")); - }; - } - return av_buffer_get_opaque_fptr(@buf); - }; - /// Returns the opaque parameter set by av_buffer_create. - /// the opaque parameter set by av_buffer_create. - public static void* av_buffer_get_opaque(AVBufferRef* @buf) - { - return av_buffer_get_opaque_fptr(@buf); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_buffer_get_ref_count_delegate(AVBufferRef* @buf); - private static av_buffer_get_ref_count_delegate av_buffer_get_ref_count_fptr = (AVBufferRef* @buf) => - { - av_buffer_get_ref_count_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_buffer_get_ref_count"); - if (av_buffer_get_ref_count_fptr == null) - { - av_buffer_get_ref_count_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffer_get_ref_count")); - }; - } - return av_buffer_get_ref_count_fptr(@buf); - }; - public static int av_buffer_get_ref_count(AVBufferRef* @buf) - { - return av_buffer_get_ref_count_fptr(@buf); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_buffer_is_writable_delegate(AVBufferRef* @buf); - private static av_buffer_is_writable_delegate av_buffer_is_writable_fptr = (AVBufferRef* @buf) => - { - av_buffer_is_writable_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_buffer_is_writable"); - if (av_buffer_is_writable_fptr == null) - { - av_buffer_is_writable_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffer_is_writable")); - }; - } - return av_buffer_is_writable_fptr(@buf); - }; - /// Returns 1 if the caller may write to the data referred to by buf (which is true if and only if buf is the only reference to the underlying AVBuffer). Return 0 otherwise. A positive answer is valid until av_buffer_ref() is called on buf. - /// 1 if the caller may write to the data referred to by buf (which is true if and only if buf is the only reference to the underlying AVBuffer). Return 0 otherwise. A positive answer is valid until av_buffer_ref() is called on buf. - public static int av_buffer_is_writable(AVBufferRef* @buf) - { - return av_buffer_is_writable_fptr(@buf); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_buffer_make_writable_delegate(AVBufferRef** @buf); - private static av_buffer_make_writable_delegate av_buffer_make_writable_fptr = (AVBufferRef** @buf) => - { - av_buffer_make_writable_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_buffer_make_writable"); - if (av_buffer_make_writable_fptr == null) - { - av_buffer_make_writable_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffer_make_writable")); - }; - } - return av_buffer_make_writable_fptr(@buf); - }; - /// Create a writable reference from a given buffer reference, avoiding data copy if possible. - /// buffer reference to make writable. On success, buf is either left untouched, or it is unreferenced and a new writable AVBufferRef is written in its place. On failure, buf is left untouched. - /// 0 on success, a negative AVERROR on failure. - public static int av_buffer_make_writable(AVBufferRef** @buf) - { - return av_buffer_make_writable_fptr(@buf); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void* av_buffer_pool_buffer_get_opaque_delegate(AVBufferRef* @ref); - private static av_buffer_pool_buffer_get_opaque_delegate av_buffer_pool_buffer_get_opaque_fptr = (AVBufferRef* @ref) => - { - av_buffer_pool_buffer_get_opaque_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_buffer_pool_buffer_get_opaque"); - if (av_buffer_pool_buffer_get_opaque_fptr == null) - { - av_buffer_pool_buffer_get_opaque_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffer_pool_buffer_get_opaque")); - }; - } - return av_buffer_pool_buffer_get_opaque_fptr(@ref); - }; - /// Query the original opaque parameter of an allocated buffer in the pool. - /// a buffer reference to a buffer returned by av_buffer_pool_get. - /// the opaque parameter set by the buffer allocator function of the buffer pool. - public static void* av_buffer_pool_buffer_get_opaque(AVBufferRef* @ref) - { - return av_buffer_pool_buffer_get_opaque_fptr(@ref); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVBufferRef* av_buffer_pool_get_delegate(AVBufferPool* @pool); - private static av_buffer_pool_get_delegate av_buffer_pool_get_fptr = (AVBufferPool* @pool) => - { - av_buffer_pool_get_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_buffer_pool_get"); - if (av_buffer_pool_get_fptr == null) - { - av_buffer_pool_get_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffer_pool_get")); - }; - } - return av_buffer_pool_get_fptr(@pool); - }; - /// Allocate a new AVBuffer, reusing an old buffer from the pool when available. This function may be called simultaneously from multiple threads. - /// a reference to the new buffer on success, NULL on error. - public static AVBufferRef* av_buffer_pool_get(AVBufferPool* @pool) - { - return av_buffer_pool_get_fptr(@pool); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVBufferPool* av_buffer_pool_init_delegate(ulong @size, av_buffer_pool_init_alloc_func @alloc); - private static av_buffer_pool_init_delegate av_buffer_pool_init_fptr = (ulong @size, av_buffer_pool_init_alloc_func @alloc) => - { - av_buffer_pool_init_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_buffer_pool_init"); - if (av_buffer_pool_init_fptr == null) - { - av_buffer_pool_init_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffer_pool_init")); - }; - } - return av_buffer_pool_init_fptr(@size, @alloc); - }; - /// Allocate and initialize a buffer pool. - /// size of each buffer in this pool - /// a function that will be used to allocate new buffers when the pool is empty. May be NULL, then the default allocator will be used (av_buffer_alloc()). - /// newly created buffer pool on success, NULL on error. - public static AVBufferPool* av_buffer_pool_init(ulong @size, av_buffer_pool_init_alloc_func @alloc) - { - return av_buffer_pool_init_fptr(@size, @alloc); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVBufferPool* av_buffer_pool_init2_delegate(ulong @size, void* @opaque, av_buffer_pool_init2_alloc_func @alloc, av_buffer_pool_init2_pool_free_func @pool_free); - private static av_buffer_pool_init2_delegate av_buffer_pool_init2_fptr = (ulong @size, void* @opaque, av_buffer_pool_init2_alloc_func @alloc, av_buffer_pool_init2_pool_free_func @pool_free) => - { - av_buffer_pool_init2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_buffer_pool_init2"); - if (av_buffer_pool_init2_fptr == null) - { - av_buffer_pool_init2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffer_pool_init2")); - }; - } - return av_buffer_pool_init2_fptr(@size, @opaque, @alloc, @pool_free); - }; - /// Allocate and initialize a buffer pool with a more complex allocator. - /// size of each buffer in this pool - /// arbitrary user data used by the allocator - /// a function that will be used to allocate new buffers when the pool is empty. May be NULL, then the default allocator will be used (av_buffer_alloc()). - /// a function that will be called immediately before the pool is freed. I.e. after av_buffer_pool_uninit() is called by the caller and all the frames are returned to the pool and freed. It is intended to uninitialize the user opaque data. May be NULL. - /// newly created buffer pool on success, NULL on error. - public static AVBufferPool* av_buffer_pool_init2(ulong @size, void* @opaque, av_buffer_pool_init2_alloc_func @alloc, av_buffer_pool_init2_pool_free_func @pool_free) - { - return av_buffer_pool_init2_fptr(@size, @opaque, @alloc, @pool_free); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_buffer_pool_uninit_delegate(AVBufferPool** @pool); - private static av_buffer_pool_uninit_delegate av_buffer_pool_uninit_fptr = (AVBufferPool** @pool) => - { - av_buffer_pool_uninit_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_buffer_pool_uninit"); - if (av_buffer_pool_uninit_fptr == null) - { - av_buffer_pool_uninit_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffer_pool_uninit")); - }; - } - av_buffer_pool_uninit_fptr(@pool); - }; - /// Mark the pool as being available for freeing. It will actually be freed only once all the allocated buffers associated with the pool are released. Thus it is safe to call this function while some of the allocated buffers are still in use. - /// pointer to the pool to be freed. It will be set to NULL. - public static void av_buffer_pool_uninit(AVBufferPool** @pool) - { - av_buffer_pool_uninit_fptr(@pool); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_buffer_realloc_delegate(AVBufferRef** @buf, ulong @size); - private static av_buffer_realloc_delegate av_buffer_realloc_fptr = (AVBufferRef** @buf, ulong @size) => - { - av_buffer_realloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_buffer_realloc"); - if (av_buffer_realloc_fptr == null) - { - av_buffer_realloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffer_realloc")); - }; - } - return av_buffer_realloc_fptr(@buf, @size); - }; - /// Reallocate a given buffer. - /// a buffer reference to reallocate. On success, buf will be unreferenced and a new reference with the required size will be written in its place. On failure buf will be left untouched. *buf may be NULL, then a new buffer is allocated. - /// required new buffer size. - /// 0 on success, a negative AVERROR on failure. - public static int av_buffer_realloc(AVBufferRef** @buf, ulong @size) - { - return av_buffer_realloc_fptr(@buf, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVBufferRef* av_buffer_ref_delegate(AVBufferRef* @buf); - private static av_buffer_ref_delegate av_buffer_ref_fptr = (AVBufferRef* @buf) => - { - av_buffer_ref_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_buffer_ref"); - if (av_buffer_ref_fptr == null) - { - av_buffer_ref_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffer_ref")); - }; - } - return av_buffer_ref_fptr(@buf); - }; - /// Create a new reference to an AVBuffer. - /// a new AVBufferRef referring to the same AVBuffer as buf or NULL on failure. - public static AVBufferRef* av_buffer_ref(AVBufferRef* @buf) - { - return av_buffer_ref_fptr(@buf); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_buffer_replace_delegate(AVBufferRef** @dst, AVBufferRef* @src); - private static av_buffer_replace_delegate av_buffer_replace_fptr = (AVBufferRef** @dst, AVBufferRef* @src) => - { - av_buffer_replace_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_buffer_replace"); - if (av_buffer_replace_fptr == null) - { - av_buffer_replace_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffer_replace")); - }; - } - return av_buffer_replace_fptr(@dst, @src); - }; - /// Ensure dst refers to the same data as src. - /// Pointer to either a valid buffer reference or NULL. On success, this will point to a buffer reference equivalent to src. On failure, dst will be left untouched. - /// A buffer reference to replace dst with. May be NULL, then this function is equivalent to av_buffer_unref(dst). - /// 0 on success AVERROR(ENOMEM) on memory allocation failure. - public static int av_buffer_replace(AVBufferRef** @dst, AVBufferRef* @src) - { - return av_buffer_replace_fptr(@dst, @src); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_buffer_unref_delegate(AVBufferRef** @buf); - private static av_buffer_unref_delegate av_buffer_unref_fptr = (AVBufferRef** @buf) => - { - av_buffer_unref_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_buffer_unref"); - if (av_buffer_unref_fptr == null) - { - av_buffer_unref_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_buffer_unref")); - }; - } - av_buffer_unref_fptr(@buf); - }; - /// Free a given reference and automatically free the buffer if there are no more references to it. - /// the reference to be freed. The pointer is set to NULL on return. - public static void av_buffer_unref(AVBufferRef** @buf) - { - av_buffer_unref_fptr(@buf); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void* av_calloc_delegate(ulong @nmemb, ulong @size); - private static av_calloc_delegate av_calloc_fptr = (ulong @nmemb, ulong @size) => - { - av_calloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_calloc"); - if (av_calloc_fptr == null) - { - av_calloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_calloc")); - }; - } - return av_calloc_fptr(@nmemb, @size); - }; - /// Allocate a memory block for an array with av_mallocz(). - /// Number of elements - /// Size of the single element - /// Pointer to the allocated block, or `NULL` if the block cannot be allocated - public static void* av_calloc(ulong @nmemb, ulong @size) - { - return av_calloc_fptr(@nmemb, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_channel_description_delegate(byte* @buf, ulong @buf_size, AVChannel @channel); - private static av_channel_description_delegate av_channel_description_fptr = (byte* @buf, ulong @buf_size, AVChannel @channel) => - { - av_channel_description_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_description"); - if (av_channel_description_fptr == null) - { - av_channel_description_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_description")); - }; - } - return av_channel_description_fptr(@buf, @buf_size, @channel); - }; - /// Get a human readable string describing a given channel. - /// pre-allocated buffer where to put the generated string - /// size in bytes of the buffer. - /// amount of bytes needed to hold the output string, or a negative AVERROR on failure. If the returned value is bigger than buf_size, then the string was truncated. - public static int av_channel_description(byte* @buf, ulong @buf_size, AVChannel @channel) - { - return av_channel_description_fptr(@buf, @buf_size, @channel); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_channel_description_bprint_delegate(AVBPrint* @bp, AVChannel @channel_id); - private static av_channel_description_bprint_delegate av_channel_description_bprint_fptr = (AVBPrint* @bp, AVChannel @channel_id) => - { - av_channel_description_bprint_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_description_bprint"); - if (av_channel_description_bprint_fptr == null) - { - av_channel_description_bprint_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_description_bprint")); - }; - } - av_channel_description_bprint_fptr(@bp, @channel_id); - }; - /// bprint variant of av_channel_description(). - public static void av_channel_description_bprint(AVBPrint* @bp, AVChannel @channel_id) - { - av_channel_description_bprint_fptr(@bp, @channel_id); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVChannel av_channel_from_string_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name); - private static av_channel_from_string_delegate av_channel_from_string_fptr = (string @name) => - { - av_channel_from_string_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_from_string"); - if (av_channel_from_string_fptr == null) - { - av_channel_from_string_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_from_string")); - }; - } - return av_channel_from_string_fptr(@name); - }; - /// This is the inverse function of av_channel_name(). - /// the channel with the given name AV_CHAN_NONE when name does not identify a known channel - public static AVChannel av_channel_from_string( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name) - { - return av_channel_from_string_fptr(@name); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVChannel av_channel_layout_channel_from_index_delegate(AVChannelLayout* @channel_layout, uint @idx); - private static av_channel_layout_channel_from_index_delegate av_channel_layout_channel_from_index_fptr = (AVChannelLayout* @channel_layout, uint @idx) => - { - av_channel_layout_channel_from_index_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_layout_channel_from_index"); - if (av_channel_layout_channel_from_index_fptr == null) - { - av_channel_layout_channel_from_index_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_layout_channel_from_index")); - }; - } - return av_channel_layout_channel_from_index_fptr(@channel_layout, @idx); - }; - /// Get the channel with the given index in a channel layout. - /// input channel layout - /// channel with the index idx in channel_layout on success or AV_CHAN_NONE on failure (if idx is not valid or the channel order is unspecified) - public static AVChannel av_channel_layout_channel_from_index(AVChannelLayout* @channel_layout, uint @idx) - { - return av_channel_layout_channel_from_index_fptr(@channel_layout, @idx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVChannel av_channel_layout_channel_from_string_delegate(AVChannelLayout* @channel_layout, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name); - private static av_channel_layout_channel_from_string_delegate av_channel_layout_channel_from_string_fptr = (AVChannelLayout* @channel_layout, string @name) => - { - av_channel_layout_channel_from_string_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_layout_channel_from_string"); - if (av_channel_layout_channel_from_string_fptr == null) - { - av_channel_layout_channel_from_string_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_layout_channel_from_string")); - }; - } - return av_channel_layout_channel_from_string_fptr(@channel_layout, @name); - }; - /// Get a channel described by the given string. - /// input channel layout - /// a channel described by the given string in channel_layout on success or AV_CHAN_NONE on failure (if the string is not valid or the channel order is unspecified) - public static AVChannel av_channel_layout_channel_from_string(AVChannelLayout* @channel_layout, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name) - { - return av_channel_layout_channel_from_string_fptr(@channel_layout, @name); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_channel_layout_check_delegate(AVChannelLayout* @channel_layout); - private static av_channel_layout_check_delegate av_channel_layout_check_fptr = (AVChannelLayout* @channel_layout) => - { - av_channel_layout_check_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_layout_check"); - if (av_channel_layout_check_fptr == null) - { - av_channel_layout_check_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_layout_check")); - }; - } - return av_channel_layout_check_fptr(@channel_layout); - }; - /// Check whether a channel layout is valid, i.e. can possibly describe audio data. - /// input channel layout - /// 1 if channel_layout is valid, 0 otherwise. - public static int av_channel_layout_check(AVChannelLayout* @channel_layout) - { - return av_channel_layout_check_fptr(@channel_layout); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_channel_layout_compare_delegate(AVChannelLayout* @chl, AVChannelLayout* @chl1); - private static av_channel_layout_compare_delegate av_channel_layout_compare_fptr = (AVChannelLayout* @chl, AVChannelLayout* @chl1) => - { - av_channel_layout_compare_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_layout_compare"); - if (av_channel_layout_compare_fptr == null) - { - av_channel_layout_compare_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_layout_compare")); - }; - } - return av_channel_layout_compare_fptr(@chl, @chl1); - }; - /// Check whether two channel layouts are semantically the same, i.e. the same channels are present on the same positions in both. - /// input channel layout - /// input channel layout - /// 0 if chl and chl1 are equal, 1 if they are not equal. A negative AVERROR code if one or both are invalid. - public static int av_channel_layout_compare(AVChannelLayout* @chl, AVChannelLayout* @chl1) - { - return av_channel_layout_compare_fptr(@chl, @chl1); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_channel_layout_copy_delegate(AVChannelLayout* @dst, AVChannelLayout* @src); - private static av_channel_layout_copy_delegate av_channel_layout_copy_fptr = (AVChannelLayout* @dst, AVChannelLayout* @src) => - { - av_channel_layout_copy_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_layout_copy"); - if (av_channel_layout_copy_fptr == null) - { - av_channel_layout_copy_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_layout_copy")); - }; - } - return av_channel_layout_copy_fptr(@dst, @src); - }; - /// Make a copy of a channel layout. This differs from just assigning src to dst in that it allocates and copies the map for AV_CHANNEL_ORDER_CUSTOM. - /// destination channel layout - /// source channel layout - /// 0 on success, a negative AVERROR on error. - public static int av_channel_layout_copy(AVChannelLayout* @dst, AVChannelLayout* @src) - { - return av_channel_layout_copy_fptr(@dst, @src); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_channel_layout_default_delegate(AVChannelLayout* @ch_layout, int @nb_channels); - private static av_channel_layout_default_delegate av_channel_layout_default_fptr = (AVChannelLayout* @ch_layout, int @nb_channels) => - { - av_channel_layout_default_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_layout_default"); - if (av_channel_layout_default_fptr == null) - { - av_channel_layout_default_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_layout_default")); - }; - } - av_channel_layout_default_fptr(@ch_layout, @nb_channels); - }; - /// Get the default channel layout for a given number of channels. - /// number of channels - public static void av_channel_layout_default(AVChannelLayout* @ch_layout, int @nb_channels) - { - av_channel_layout_default_fptr(@ch_layout, @nb_channels); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_channel_layout_describe_delegate(AVChannelLayout* @channel_layout, byte* @buf, ulong @buf_size); - private static av_channel_layout_describe_delegate av_channel_layout_describe_fptr = (AVChannelLayout* @channel_layout, byte* @buf, ulong @buf_size) => - { - av_channel_layout_describe_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_layout_describe"); - if (av_channel_layout_describe_fptr == null) - { - av_channel_layout_describe_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_layout_describe")); - }; - } - return av_channel_layout_describe_fptr(@channel_layout, @buf, @buf_size); - }; - /// Get a human-readable string describing the channel layout properties. The string will be in the same format that is accepted by av_channel_layout_from_string(), allowing to rebuild the same channel layout, except for opaque pointers. - /// channel layout to be described - /// pre-allocated buffer where to put the generated string - /// size in bytes of the buffer. - /// amount of bytes needed to hold the output string, or a negative AVERROR on failure. If the returned value is bigger than buf_size, then the string was truncated. - public static int av_channel_layout_describe(AVChannelLayout* @channel_layout, byte* @buf, ulong @buf_size) - { - return av_channel_layout_describe_fptr(@channel_layout, @buf, @buf_size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_channel_layout_describe_bprint_delegate(AVChannelLayout* @channel_layout, AVBPrint* @bp); - private static av_channel_layout_describe_bprint_delegate av_channel_layout_describe_bprint_fptr = (AVChannelLayout* @channel_layout, AVBPrint* @bp) => - { - av_channel_layout_describe_bprint_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_layout_describe_bprint"); - if (av_channel_layout_describe_bprint_fptr == null) - { - av_channel_layout_describe_bprint_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_layout_describe_bprint")); - }; - } - return av_channel_layout_describe_bprint_fptr(@channel_layout, @bp); - }; - /// bprint variant of av_channel_layout_describe(). - /// 0 on success, or a negative AVERROR value on failure. - public static int av_channel_layout_describe_bprint(AVChannelLayout* @channel_layout, AVBPrint* @bp) - { - return av_channel_layout_describe_bprint_fptr(@channel_layout, @bp); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate ulong av_channel_layout_extract_channel_delegate(ulong @channel_layout, int @index); - private static av_channel_layout_extract_channel_delegate av_channel_layout_extract_channel_fptr = (ulong @channel_layout, int @index) => - { - av_channel_layout_extract_channel_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_layout_extract_channel"); - if (av_channel_layout_extract_channel_fptr == null) - { - av_channel_layout_extract_channel_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_layout_extract_channel")); - }; - } - return av_channel_layout_extract_channel_fptr(@channel_layout, @index); - }; - /// Get the channel with the given index in channel_layout. - [Obsolete("use av_channel_layout_channel_from_index()")] - public static ulong av_channel_layout_extract_channel(ulong @channel_layout, int @index) - { - return av_channel_layout_extract_channel_fptr(@channel_layout, @index); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_channel_layout_from_mask_delegate(AVChannelLayout* @channel_layout, ulong @mask); - private static av_channel_layout_from_mask_delegate av_channel_layout_from_mask_fptr = (AVChannelLayout* @channel_layout, ulong @mask) => - { - av_channel_layout_from_mask_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_layout_from_mask"); - if (av_channel_layout_from_mask_fptr == null) - { - av_channel_layout_from_mask_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_layout_from_mask")); - }; - } - return av_channel_layout_from_mask_fptr(@channel_layout, @mask); - }; - /// Initialize a native channel layout from a bitmask indicating which channels are present. - /// the layout structure to be initialized - /// bitmask describing the channel layout - /// 0 on success AVERROR(EINVAL) for invalid mask values - public static int av_channel_layout_from_mask(AVChannelLayout* @channel_layout, ulong @mask) - { - return av_channel_layout_from_mask_fptr(@channel_layout, @mask); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_channel_layout_from_string_delegate(AVChannelLayout* @channel_layout, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @str); - private static av_channel_layout_from_string_delegate av_channel_layout_from_string_fptr = (AVChannelLayout* @channel_layout, string @str) => - { - av_channel_layout_from_string_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_layout_from_string"); - if (av_channel_layout_from_string_fptr == null) - { - av_channel_layout_from_string_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_layout_from_string")); - }; - } - return av_channel_layout_from_string_fptr(@channel_layout, @str); - }; - /// Initialize a channel layout from a given string description. The input string can be represented by: - the formal channel layout name (returned by av_channel_layout_describe()) - single or multiple channel names (returned by av_channel_name(), eg. "FL", or concatenated with "+", each optionally containing a custom name after a "", eg. "FL+FR+LFE") - a decimal or hexadecimal value of a native channel layout (eg. "4" or "0x4") - the number of channels with default layout (eg. "4c") - the number of unordered channels (eg. "4C" or "4 channels") - the ambisonic order followed by optional non-diegetic channels (eg. "ambisonic 2+stereo") - /// input channel layout - /// string describing the channel layout - /// 0 channel layout was detected, AVERROR_INVALIDATATA otherwise - public static int av_channel_layout_from_string(AVChannelLayout* @channel_layout, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @str) - { - return av_channel_layout_from_string_fptr(@channel_layout, @str); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_channel_layout_index_from_channel_delegate(AVChannelLayout* @channel_layout, AVChannel @channel); - private static av_channel_layout_index_from_channel_delegate av_channel_layout_index_from_channel_fptr = (AVChannelLayout* @channel_layout, AVChannel @channel) => - { - av_channel_layout_index_from_channel_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_layout_index_from_channel"); - if (av_channel_layout_index_from_channel_fptr == null) - { - av_channel_layout_index_from_channel_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_layout_index_from_channel")); - }; - } - return av_channel_layout_index_from_channel_fptr(@channel_layout, @channel); - }; - /// Get the index of a given channel in a channel layout. In case multiple channels are found, only the first match will be returned. - /// input channel layout - /// index of channel in channel_layout on success or a negative number if channel is not present in channel_layout. - public static int av_channel_layout_index_from_channel(AVChannelLayout* @channel_layout, AVChannel @channel) - { - return av_channel_layout_index_from_channel_fptr(@channel_layout, @channel); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_channel_layout_index_from_string_delegate(AVChannelLayout* @channel_layout, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name); - private static av_channel_layout_index_from_string_delegate av_channel_layout_index_from_string_fptr = (AVChannelLayout* @channel_layout, string @name) => - { - av_channel_layout_index_from_string_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_layout_index_from_string"); - if (av_channel_layout_index_from_string_fptr == null) - { - av_channel_layout_index_from_string_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_layout_index_from_string")); - }; - } - return av_channel_layout_index_from_string_fptr(@channel_layout, @name); - }; - /// Get the index in a channel layout of a channel described by the given string. In case multiple channels are found, only the first match will be returned. - /// input channel layout - /// a channel index described by the given string, or a negative AVERROR value. - public static int av_channel_layout_index_from_string(AVChannelLayout* @channel_layout, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name) - { - return av_channel_layout_index_from_string_fptr(@channel_layout, @name); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVChannelLayout* av_channel_layout_standard_delegate(void** @opaque); - private static av_channel_layout_standard_delegate av_channel_layout_standard_fptr = (void** @opaque) => - { - av_channel_layout_standard_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_layout_standard"); - if (av_channel_layout_standard_fptr == null) - { - av_channel_layout_standard_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_layout_standard")); - }; - } - return av_channel_layout_standard_fptr(@opaque); - }; - /// Iterate over all standard channel layouts. - /// a pointer where libavutil will store the iteration state. Must point to NULL to start the iteration. - /// the standard channel layout or NULL when the iteration is finished - public static AVChannelLayout* av_channel_layout_standard(void** @opaque) - { - return av_channel_layout_standard_fptr(@opaque); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate ulong av_channel_layout_subset_delegate(AVChannelLayout* @channel_layout, ulong @mask); - private static av_channel_layout_subset_delegate av_channel_layout_subset_fptr = (AVChannelLayout* @channel_layout, ulong @mask) => - { - av_channel_layout_subset_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_layout_subset"); - if (av_channel_layout_subset_fptr == null) - { - av_channel_layout_subset_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_layout_subset")); - }; - } - return av_channel_layout_subset_fptr(@channel_layout, @mask); - }; - /// Find out what channels from a given set are present in a channel layout, without regard for their positions. - /// input channel layout - /// a combination of AV_CH_* representing a set of channels - /// a bitfield representing all the channels from mask that are present in channel_layout - public static ulong av_channel_layout_subset(AVChannelLayout* @channel_layout, ulong @mask) - { - return av_channel_layout_subset_fptr(@channel_layout, @mask); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_channel_layout_uninit_delegate(AVChannelLayout* @channel_layout); - private static av_channel_layout_uninit_delegate av_channel_layout_uninit_fptr = (AVChannelLayout* @channel_layout) => - { - av_channel_layout_uninit_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_layout_uninit"); - if (av_channel_layout_uninit_fptr == null) - { - av_channel_layout_uninit_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_layout_uninit")); - }; - } - av_channel_layout_uninit_fptr(@channel_layout); - }; - /// Free any allocated data in the channel layout and reset the channel count to 0. - /// the layout structure to be uninitialized - public static void av_channel_layout_uninit(AVChannelLayout* @channel_layout) - { - av_channel_layout_uninit_fptr(@channel_layout); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_channel_name_delegate(byte* @buf, ulong @buf_size, AVChannel @channel); - private static av_channel_name_delegate av_channel_name_fptr = (byte* @buf, ulong @buf_size, AVChannel @channel) => - { - av_channel_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_name"); - if (av_channel_name_fptr == null) - { - av_channel_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_name")); - }; - } - return av_channel_name_fptr(@buf, @buf_size, @channel); - }; - /// Get a human readable string in an abbreviated form describing a given channel. This is the inverse function of av_channel_from_string(). - /// pre-allocated buffer where to put the generated string - /// size in bytes of the buffer. - /// amount of bytes needed to hold the output string, or a negative AVERROR on failure. If the returned value is bigger than buf_size, then the string was truncated. - public static int av_channel_name(byte* @buf, ulong @buf_size, AVChannel @channel) - { - return av_channel_name_fptr(@buf, @buf_size, @channel); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_channel_name_bprint_delegate(AVBPrint* @bp, AVChannel @channel_id); - private static av_channel_name_bprint_delegate av_channel_name_bprint_fptr = (AVBPrint* @bp, AVChannel @channel_id) => - { - av_channel_name_bprint_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_channel_name_bprint"); - if (av_channel_name_bprint_fptr == null) - { - av_channel_name_bprint_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_channel_name_bprint")); - }; - } - av_channel_name_bprint_fptr(@bp, @channel_id); - }; - /// bprint variant of av_channel_name(). - public static void av_channel_name_bprint(AVBPrint* @bp, AVChannel @channel_id) - { - av_channel_name_bprint_fptr(@bp, @channel_id); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_chroma_location_from_name_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name); - private static av_chroma_location_from_name_delegate av_chroma_location_from_name_fptr = (string @name) => - { - av_chroma_location_from_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_chroma_location_from_name"); - if (av_chroma_location_from_name_fptr == null) - { - av_chroma_location_from_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_chroma_location_from_name")); - }; - } - return av_chroma_location_from_name_fptr(@name); - }; - /// Returns the AVChromaLocation value for name or an AVError if not found. - /// the AVChromaLocation value for name or an AVError if not found. - public static int av_chroma_location_from_name( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name) - { - return av_chroma_location_from_name_fptr(@name); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string av_chroma_location_name_delegate(AVChromaLocation @location); - private static av_chroma_location_name_delegate av_chroma_location_name_fptr = (AVChromaLocation @location) => - { - av_chroma_location_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_chroma_location_name"); - if (av_chroma_location_name_fptr == null) - { - av_chroma_location_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_chroma_location_name")); - }; - } - return av_chroma_location_name_fptr(@location); - }; - /// Returns the name for provided chroma location or NULL if unknown. - /// the name for provided chroma location or NULL if unknown. - public static string av_chroma_location_name(AVChromaLocation @location) - { - return av_chroma_location_name_fptr(@location); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_color_primaries_from_name_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name); - private static av_color_primaries_from_name_delegate av_color_primaries_from_name_fptr = (string @name) => - { - av_color_primaries_from_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_color_primaries_from_name"); - if (av_color_primaries_from_name_fptr == null) - { - av_color_primaries_from_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_color_primaries_from_name")); - }; - } - return av_color_primaries_from_name_fptr(@name); - }; - /// Returns the AVColorPrimaries value for name or an AVError if not found. - /// the AVColorPrimaries value for name or an AVError if not found. - public static int av_color_primaries_from_name( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name) - { - return av_color_primaries_from_name_fptr(@name); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string av_color_primaries_name_delegate(AVColorPrimaries @primaries); - private static av_color_primaries_name_delegate av_color_primaries_name_fptr = (AVColorPrimaries @primaries) => - { - av_color_primaries_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_color_primaries_name"); - if (av_color_primaries_name_fptr == null) - { - av_color_primaries_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_color_primaries_name")); - }; - } - return av_color_primaries_name_fptr(@primaries); - }; - /// Returns the name for provided color primaries or NULL if unknown. - /// the name for provided color primaries or NULL if unknown. - public static string av_color_primaries_name(AVColorPrimaries @primaries) - { - return av_color_primaries_name_fptr(@primaries); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_color_range_from_name_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name); - private static av_color_range_from_name_delegate av_color_range_from_name_fptr = (string @name) => - { - av_color_range_from_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_color_range_from_name"); - if (av_color_range_from_name_fptr == null) - { - av_color_range_from_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_color_range_from_name")); - }; - } - return av_color_range_from_name_fptr(@name); - }; - /// Returns the AVColorRange value for name or an AVError if not found. - /// the AVColorRange value for name or an AVError if not found. - public static int av_color_range_from_name( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name) - { - return av_color_range_from_name_fptr(@name); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string av_color_range_name_delegate(AVColorRange @range); - private static av_color_range_name_delegate av_color_range_name_fptr = (AVColorRange @range) => - { - av_color_range_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_color_range_name"); - if (av_color_range_name_fptr == null) - { - av_color_range_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_color_range_name")); - }; - } - return av_color_range_name_fptr(@range); - }; - /// Returns the name for provided color range or NULL if unknown. - /// the name for provided color range or NULL if unknown. - public static string av_color_range_name(AVColorRange @range) - { - return av_color_range_name_fptr(@range); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_color_space_from_name_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name); - private static av_color_space_from_name_delegate av_color_space_from_name_fptr = (string @name) => - { - av_color_space_from_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_color_space_from_name"); - if (av_color_space_from_name_fptr == null) - { - av_color_space_from_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_color_space_from_name")); - }; - } - return av_color_space_from_name_fptr(@name); - }; - /// Returns the AVColorSpace value for name or an AVError if not found. - /// the AVColorSpace value for name or an AVError if not found. - public static int av_color_space_from_name( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name) - { - return av_color_space_from_name_fptr(@name); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string av_color_space_name_delegate(AVColorSpace @space); - private static av_color_space_name_delegate av_color_space_name_fptr = (AVColorSpace @space) => - { - av_color_space_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_color_space_name"); - if (av_color_space_name_fptr == null) - { - av_color_space_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_color_space_name")); - }; - } - return av_color_space_name_fptr(@space); - }; - /// Returns the name for provided color space or NULL if unknown. - /// the name for provided color space or NULL if unknown. - public static string av_color_space_name(AVColorSpace @space) - { - return av_color_space_name_fptr(@space); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_color_transfer_from_name_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name); - private static av_color_transfer_from_name_delegate av_color_transfer_from_name_fptr = (string @name) => - { - av_color_transfer_from_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_color_transfer_from_name"); - if (av_color_transfer_from_name_fptr == null) - { - av_color_transfer_from_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_color_transfer_from_name")); - }; - } - return av_color_transfer_from_name_fptr(@name); - }; - /// Returns the AVColorTransferCharacteristic value for name or an AVError if not found. - /// the AVColorTransferCharacteristic value for name or an AVError if not found. - public static int av_color_transfer_from_name( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name) - { - return av_color_transfer_from_name_fptr(@name); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string av_color_transfer_name_delegate(AVColorTransferCharacteristic @transfer); - private static av_color_transfer_name_delegate av_color_transfer_name_fptr = (AVColorTransferCharacteristic @transfer) => - { - av_color_transfer_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_color_transfer_name"); - if (av_color_transfer_name_fptr == null) - { - av_color_transfer_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_color_transfer_name")); - }; - } - return av_color_transfer_name_fptr(@transfer); - }; - /// Returns the name for provided color transfer or NULL if unknown. - /// the name for provided color transfer or NULL if unknown. - public static string av_color_transfer_name(AVColorTransferCharacteristic @transfer) - { - return av_color_transfer_name_fptr(@transfer); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate long av_compare_mod_delegate(ulong @a, ulong @b, ulong @mod); - private static av_compare_mod_delegate av_compare_mod_fptr = (ulong @a, ulong @b, ulong @mod) => - { - av_compare_mod_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_compare_mod"); - if (av_compare_mod_fptr == null) - { - av_compare_mod_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_compare_mod")); - }; - } - return av_compare_mod_fptr(@a, @b, @mod); - }; - /// Compare the remainders of two integer operands divided by a common divisor. - /// Divisor; must be a power of 2 - /// - a negative value if `a % mod < b % mod` - a positive value if `a % mod > b % mod` - zero if `a % mod == b % mod` - public static long av_compare_mod(ulong @a, ulong @b, ulong @mod) - { - return av_compare_mod_fptr(@a, @b, @mod); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_compare_ts_delegate(long @ts_a, AVRational @tb_a, long @ts_b, AVRational @tb_b); - private static av_compare_ts_delegate av_compare_ts_fptr = (long @ts_a, AVRational @tb_a, long @ts_b, AVRational @tb_b) => - { - av_compare_ts_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_compare_ts"); - if (av_compare_ts_fptr == null) - { - av_compare_ts_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_compare_ts")); - }; - } - return av_compare_ts_fptr(@ts_a, @tb_a, @ts_b, @tb_b); - }; - /// Compare two timestamps each in its own time base. - /// One of the following values: - -1 if `ts_a` is before `ts_b` - 1 if `ts_a` is after `ts_b` - 0 if they represent the same position - public static int av_compare_ts(long @ts_a, AVRational @tb_a, long @ts_b, AVRational @tb_b) - { - return av_compare_ts_fptr(@ts_a, @tb_a, @ts_b, @tb_b); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVContentLightMetadata* av_content_light_metadata_alloc_delegate(ulong* @size); - private static av_content_light_metadata_alloc_delegate av_content_light_metadata_alloc_fptr = (ulong* @size) => - { - av_content_light_metadata_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_content_light_metadata_alloc"); - if (av_content_light_metadata_alloc_fptr == null) - { - av_content_light_metadata_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_content_light_metadata_alloc")); - }; - } - return av_content_light_metadata_alloc_fptr(@size); - }; - /// Allocate an AVContentLightMetadata structure and set its fields to default values. The resulting struct can be freed using av_freep(). - /// An AVContentLightMetadata filled with default values or NULL on failure. - public static AVContentLightMetadata* av_content_light_metadata_alloc(ulong* @size) - { - return av_content_light_metadata_alloc_fptr(@size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVContentLightMetadata* av_content_light_metadata_create_side_data_delegate(AVFrame* @frame); - private static av_content_light_metadata_create_side_data_delegate av_content_light_metadata_create_side_data_fptr = (AVFrame* @frame) => - { - av_content_light_metadata_create_side_data_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_content_light_metadata_create_side_data"); - if (av_content_light_metadata_create_side_data_fptr == null) - { - av_content_light_metadata_create_side_data_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_content_light_metadata_create_side_data")); - }; - } - return av_content_light_metadata_create_side_data_fptr(@frame); - }; - /// Allocate a complete AVContentLightMetadata and add it to the frame. - /// The frame which side data is added to. - /// The AVContentLightMetadata structure to be filled by caller. - public static AVContentLightMetadata* av_content_light_metadata_create_side_data(AVFrame* @frame) - { - return av_content_light_metadata_create_side_data_fptr(@frame); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_cpu_count_delegate(); - private static av_cpu_count_delegate av_cpu_count_fptr = () => - { - av_cpu_count_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_cpu_count"); - if (av_cpu_count_fptr == null) - { - av_cpu_count_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_cpu_count")); - }; - } - return av_cpu_count_fptr(); - }; - /// Returns the number of logical CPU cores present. - /// the number of logical CPU cores present. - public static int av_cpu_count() - { - return av_cpu_count_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_cpu_force_count_delegate(int @count); - private static av_cpu_force_count_delegate av_cpu_force_count_fptr = (int @count) => - { - av_cpu_force_count_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_cpu_force_count"); - if (av_cpu_force_count_fptr == null) - { - av_cpu_force_count_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_cpu_force_count")); - }; - } - av_cpu_force_count_fptr(@count); - }; - /// Overrides cpu count detection and forces the specified count. Count < 1 disables forcing of specific count. - public static void av_cpu_force_count(int @count) - { - av_cpu_force_count_fptr(@count); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate ulong av_cpu_max_align_delegate(); - private static av_cpu_max_align_delegate av_cpu_max_align_fptr = () => - { - av_cpu_max_align_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_cpu_max_align"); - if (av_cpu_max_align_fptr == null) - { - av_cpu_max_align_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_cpu_max_align")); - }; - } - return av_cpu_max_align_fptr(); - }; - /// Get the maximum data alignment that may be required by FFmpeg. - public static ulong av_cpu_max_align() - { - return av_cpu_max_align_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVRational av_d2q_delegate(double @d, int @max); - private static av_d2q_delegate av_d2q_fptr = (double @d, int @max) => - { - av_d2q_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_d2q"); - if (av_d2q_fptr == null) - { - av_d2q_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_d2q")); - }; - } - return av_d2q_fptr(@d, @max); - }; - /// Convert a double precision floating point number to a rational. - /// `double` to convert - /// Maximum allowed numerator and denominator - /// `d` in AVRational form - public static AVRational av_d2q(double @d, int @max) - { - return av_d2q_fptr(@d, @max); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVClassCategory av_default_get_category_delegate(void* @ptr); - private static av_default_get_category_delegate av_default_get_category_fptr = (void* @ptr) => - { - av_default_get_category_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_default_get_category"); - if (av_default_get_category_fptr == null) - { - av_default_get_category_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_default_get_category")); - }; - } - return av_default_get_category_fptr(@ptr); - }; - public static AVClassCategory av_default_get_category(void* @ptr) - { - return av_default_get_category_fptr(@ptr); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string av_default_item_name_delegate(void* @ctx); - private static av_default_item_name_delegate av_default_item_name_fptr = (void* @ctx) => - { - av_default_item_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_default_item_name"); - if (av_default_item_name_fptr == null) - { - av_default_item_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_default_item_name")); - }; - } - return av_default_item_name_fptr(@ctx); - }; - /// Return the context name - /// The AVClass context - /// The AVClass class_name - public static string av_default_item_name(void* @ctx) - { - return av_default_item_name_fptr(@ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_dict_copy_delegate(AVDictionary** @dst, AVDictionary* @src, int @flags); - private static av_dict_copy_delegate av_dict_copy_fptr = (AVDictionary** @dst, AVDictionary* @src, int @flags) => - { - av_dict_copy_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_dict_copy"); - if (av_dict_copy_fptr == null) - { - av_dict_copy_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_dict_copy")); - }; - } - return av_dict_copy_fptr(@dst, @src, @flags); - }; - /// Copy entries from one AVDictionary struct into another. - /// pointer to a pointer to a AVDictionary struct. If *dst is NULL, this function will allocate a struct for you and put it in *dst - /// pointer to source AVDictionary struct - /// flags to use when setting entries in *dst - /// 0 on success, negative AVERROR code on failure. If dst was allocated by this function, callers should free the associated memory. - public static int av_dict_copy(AVDictionary** @dst, AVDictionary* @src, int @flags) - { - return av_dict_copy_fptr(@dst, @src, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_dict_count_delegate(AVDictionary* @m); - private static av_dict_count_delegate av_dict_count_fptr = (AVDictionary* @m) => - { - av_dict_count_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_dict_count"); - if (av_dict_count_fptr == null) - { - av_dict_count_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_dict_count")); - }; - } - return av_dict_count_fptr(@m); - }; - /// Get number of entries in dictionary. - /// dictionary - /// number of entries in dictionary - public static int av_dict_count(AVDictionary* @m) - { - return av_dict_count_fptr(@m); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_dict_free_delegate(AVDictionary** @m); - private static av_dict_free_delegate av_dict_free_fptr = (AVDictionary** @m) => - { - av_dict_free_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_dict_free"); - if (av_dict_free_fptr == null) - { - av_dict_free_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_dict_free")); - }; - } - av_dict_free_fptr(@m); - }; - /// Free all the memory allocated for an AVDictionary struct and all keys and values. - public static void av_dict_free(AVDictionary** @m) - { - av_dict_free_fptr(@m); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVDictionaryEntry* av_dict_get_delegate(AVDictionary* @m, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @key, AVDictionaryEntry* @prev, int @flags); - private static av_dict_get_delegate av_dict_get_fptr = (AVDictionary* @m, string @key, AVDictionaryEntry* @prev, int @flags) => - { - av_dict_get_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_dict_get"); - if (av_dict_get_fptr == null) - { - av_dict_get_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_dict_get")); - }; - } - return av_dict_get_fptr(@m, @key, @prev, @flags); - }; - /// Get a dictionary entry with matching key. - /// matching key - /// Set to the previous matching element to find the next. If set to NULL the first matching element is returned. - /// a collection of AV_DICT_* flags controlling how the entry is retrieved - /// found entry or NULL in case no matching entry was found in the dictionary - public static AVDictionaryEntry* av_dict_get(AVDictionary* @m, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @key, AVDictionaryEntry* @prev, int @flags) - { - return av_dict_get_fptr(@m, @key, @prev, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_dict_get_string_delegate(AVDictionary* @m, byte** @buffer, byte @key_val_sep, byte @pairs_sep); - private static av_dict_get_string_delegate av_dict_get_string_fptr = (AVDictionary* @m, byte** @buffer, byte @key_val_sep, byte @pairs_sep) => - { - av_dict_get_string_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_dict_get_string"); - if (av_dict_get_string_fptr == null) - { - av_dict_get_string_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_dict_get_string")); - }; - } - return av_dict_get_string_fptr(@m, @buffer, @key_val_sep, @pairs_sep); - }; - /// Get dictionary entries as a string. - /// dictionary - /// Pointer to buffer that will be allocated with string containg entries. Buffer must be freed by the caller when is no longer needed. - /// character used to separate key from value - /// character used to separate two pairs from each other - /// >= 0 on success, negative on error - public static int av_dict_get_string(AVDictionary* @m, byte** @buffer, byte @key_val_sep, byte @pairs_sep) - { - return av_dict_get_string_fptr(@m, @buffer, @key_val_sep, @pairs_sep); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_dict_parse_string_delegate(AVDictionary** @pm, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @str, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @key_val_sep, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @pairs_sep, int @flags); - private static av_dict_parse_string_delegate av_dict_parse_string_fptr = (AVDictionary** @pm, string @str, string @key_val_sep, string @pairs_sep, int @flags) => - { - av_dict_parse_string_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_dict_parse_string"); - if (av_dict_parse_string_fptr == null) - { - av_dict_parse_string_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_dict_parse_string")); - }; - } - return av_dict_parse_string_fptr(@pm, @str, @key_val_sep, @pairs_sep, @flags); - }; - /// Parse the key/value pairs list and add the parsed entries to a dictionary. - /// a 0-terminated list of characters used to separate key from value - /// a 0-terminated list of characters used to separate two pairs from each other - /// flags to use when adding to dictionary. AV_DICT_DONT_STRDUP_KEY and AV_DICT_DONT_STRDUP_VAL are ignored since the key/value tokens will always be duplicated. - /// 0 on success, negative AVERROR code on failure - public static int av_dict_parse_string(AVDictionary** @pm, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @str, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @key_val_sep, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @pairs_sep, int @flags) - { - return av_dict_parse_string_fptr(@pm, @str, @key_val_sep, @pairs_sep, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_dict_set_delegate(AVDictionary** @pm, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @key, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @value, int @flags); - private static av_dict_set_delegate av_dict_set_fptr = (AVDictionary** @pm, string @key, string @value, int @flags) => - { - av_dict_set_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_dict_set"); - if (av_dict_set_fptr == null) - { - av_dict_set_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_dict_set")); - }; - } - return av_dict_set_fptr(@pm, @key, @value, @flags); - }; - /// Set the given entry in *pm, overwriting an existing entry. - /// pointer to a pointer to a dictionary struct. If *pm is NULL a dictionary struct is allocated and put in *pm. - /// entry key to add to *pm (will either be av_strduped or added as a new key depending on flags) - /// entry value to add to *pm (will be av_strduped or added as a new key depending on flags). Passing a NULL value will cause an existing entry to be deleted. - /// >= 0 on success otherwise an error code < 0 - public static int av_dict_set(AVDictionary** @pm, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @key, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @value, int @flags) - { - return av_dict_set_fptr(@pm, @key, @value, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_dict_set_int_delegate(AVDictionary** @pm, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @key, long @value, int @flags); - private static av_dict_set_int_delegate av_dict_set_int_fptr = (AVDictionary** @pm, string @key, long @value, int @flags) => - { - av_dict_set_int_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_dict_set_int"); - if (av_dict_set_int_fptr == null) - { - av_dict_set_int_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_dict_set_int")); - }; - } - return av_dict_set_int_fptr(@pm, @key, @value, @flags); - }; - /// Convenience wrapper for av_dict_set that converts the value to a string and stores it. - public static int av_dict_set_int(AVDictionary** @pm, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @key, long @value, int @flags) - { - return av_dict_set_int_fptr(@pm, @key, @value, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVRational av_div_q_delegate(AVRational @b, AVRational @c); - private static av_div_q_delegate av_div_q_fptr = (AVRational @b, AVRational @c) => - { - av_div_q_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_div_q"); - if (av_div_q_fptr == null) - { - av_div_q_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_div_q")); - }; - } - return av_div_q_fptr(@b, @c); - }; - /// Divide one rational by another. - /// First rational - /// Second rational - /// b/c - public static AVRational av_div_q(AVRational @b, AVRational @c) - { - return av_div_q_fptr(@b, @c); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVDynamicHDRPlus* av_dynamic_hdr_plus_alloc_delegate(ulong* @size); - private static av_dynamic_hdr_plus_alloc_delegate av_dynamic_hdr_plus_alloc_fptr = (ulong* @size) => - { - av_dynamic_hdr_plus_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_dynamic_hdr_plus_alloc"); - if (av_dynamic_hdr_plus_alloc_fptr == null) - { - av_dynamic_hdr_plus_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_dynamic_hdr_plus_alloc")); - }; - } - return av_dynamic_hdr_plus_alloc_fptr(@size); - }; - /// Allocate an AVDynamicHDRPlus structure and set its fields to default values. The resulting struct can be freed using av_freep(). - /// An AVDynamicHDRPlus filled with default values or NULL on failure. - public static AVDynamicHDRPlus* av_dynamic_hdr_plus_alloc(ulong* @size) - { - return av_dynamic_hdr_plus_alloc_fptr(@size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVDynamicHDRPlus* av_dynamic_hdr_plus_create_side_data_delegate(AVFrame* @frame); - private static av_dynamic_hdr_plus_create_side_data_delegate av_dynamic_hdr_plus_create_side_data_fptr = (AVFrame* @frame) => - { - av_dynamic_hdr_plus_create_side_data_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_dynamic_hdr_plus_create_side_data"); - if (av_dynamic_hdr_plus_create_side_data_fptr == null) - { - av_dynamic_hdr_plus_create_side_data_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_dynamic_hdr_plus_create_side_data")); - }; - } - return av_dynamic_hdr_plus_create_side_data_fptr(@frame); - }; - /// Allocate a complete AVDynamicHDRPlus and add it to the frame. - /// The frame which side data is added to. - /// The AVDynamicHDRPlus structure to be filled by caller or NULL on failure. - public static AVDynamicHDRPlus* av_dynamic_hdr_plus_create_side_data(AVFrame* @frame) - { - return av_dynamic_hdr_plus_create_side_data_fptr(@frame); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_dynarray_add_delegate(void* @tab_ptr, int* @nb_ptr, void* @elem); - private static av_dynarray_add_delegate av_dynarray_add_fptr = (void* @tab_ptr, int* @nb_ptr, void* @elem) => - { - av_dynarray_add_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_dynarray_add"); - if (av_dynarray_add_fptr == null) - { - av_dynarray_add_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_dynarray_add")); - }; - } - av_dynarray_add_fptr(@tab_ptr, @nb_ptr, @elem); - }; - /// Add the pointer to an element to a dynamic array. - /// Pointer to the array to grow - /// Pointer to the number of elements in the array - /// Element to add - public static void av_dynarray_add(void* @tab_ptr, int* @nb_ptr, void* @elem) - { - av_dynarray_add_fptr(@tab_ptr, @nb_ptr, @elem); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_dynarray_add_nofree_delegate(void* @tab_ptr, int* @nb_ptr, void* @elem); - private static av_dynarray_add_nofree_delegate av_dynarray_add_nofree_fptr = (void* @tab_ptr, int* @nb_ptr, void* @elem) => - { - av_dynarray_add_nofree_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_dynarray_add_nofree"); - if (av_dynarray_add_nofree_fptr == null) - { - av_dynarray_add_nofree_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_dynarray_add_nofree")); - }; - } - return av_dynarray_add_nofree_fptr(@tab_ptr, @nb_ptr, @elem); - }; - /// Add an element to a dynamic array. - /// >=0 on success, negative otherwise - public static int av_dynarray_add_nofree(void* @tab_ptr, int* @nb_ptr, void* @elem) - { - return av_dynarray_add_nofree_fptr(@tab_ptr, @nb_ptr, @elem); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void* av_dynarray2_add_delegate(void** @tab_ptr, int* @nb_ptr, ulong @elem_size, byte* @elem_data); - private static av_dynarray2_add_delegate av_dynarray2_add_fptr = (void** @tab_ptr, int* @nb_ptr, ulong @elem_size, byte* @elem_data) => - { - av_dynarray2_add_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_dynarray2_add"); - if (av_dynarray2_add_fptr == null) - { - av_dynarray2_add_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_dynarray2_add")); - }; - } - return av_dynarray2_add_fptr(@tab_ptr, @nb_ptr, @elem_size, @elem_data); - }; - /// Add an element of size `elem_size` to a dynamic array. - /// Pointer to the array to grow - /// Pointer to the number of elements in the array - /// Size in bytes of an element in the array - /// Pointer to the data of the element to add. If `NULL`, the space of the newly added element is allocated but left uninitialized. - /// Pointer to the data of the element to copy in the newly allocated space - public static void* av_dynarray2_add(void** @tab_ptr, int* @nb_ptr, ulong @elem_size, byte* @elem_data) - { - return av_dynarray2_add_fptr(@tab_ptr, @nb_ptr, @elem_size, @elem_data); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_fast_malloc_delegate(void* @ptr, uint* @size, ulong @min_size); - private static av_fast_malloc_delegate av_fast_malloc_fptr = (void* @ptr, uint* @size, ulong @min_size) => - { - av_fast_malloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_fast_malloc"); - if (av_fast_malloc_fptr == null) - { - av_fast_malloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_fast_malloc")); - }; - } - av_fast_malloc_fptr(@ptr, @size, @min_size); - }; - /// Allocate a buffer, reusing the given one if large enough. - /// Pointer to pointer to an already allocated buffer. `*ptr` will be overwritten with pointer to new buffer on success or `NULL` on failure - /// Pointer to the size of buffer `*ptr`. `*size` is updated to the new allocated size, in particular 0 in case of failure. - /// Desired minimal size of buffer `*ptr` - public static void av_fast_malloc(void* @ptr, uint* @size, ulong @min_size) - { - av_fast_malloc_fptr(@ptr, @size, @min_size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_fast_mallocz_delegate(void* @ptr, uint* @size, ulong @min_size); - private static av_fast_mallocz_delegate av_fast_mallocz_fptr = (void* @ptr, uint* @size, ulong @min_size) => - { - av_fast_mallocz_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_fast_mallocz"); - if (av_fast_mallocz_fptr == null) - { - av_fast_mallocz_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_fast_mallocz")); - }; - } - av_fast_mallocz_fptr(@ptr, @size, @min_size); - }; - /// Allocate and clear a buffer, reusing the given one if large enough. - /// Pointer to pointer to an already allocated buffer. `*ptr` will be overwritten with pointer to new buffer on success or `NULL` on failure - /// Pointer to the size of buffer `*ptr`. `*size` is updated to the new allocated size, in particular 0 in case of failure. - /// Desired minimal size of buffer `*ptr` - public static void av_fast_mallocz(void* @ptr, uint* @size, ulong @min_size) - { - av_fast_mallocz_fptr(@ptr, @size, @min_size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void* av_fast_realloc_delegate(void* @ptr, uint* @size, ulong @min_size); - private static av_fast_realloc_delegate av_fast_realloc_fptr = (void* @ptr, uint* @size, ulong @min_size) => - { - av_fast_realloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_fast_realloc"); - if (av_fast_realloc_fptr == null) - { - av_fast_realloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_fast_realloc")); - }; - } - return av_fast_realloc_fptr(@ptr, @size, @min_size); - }; - /// Reallocate the given buffer if it is not large enough, otherwise do nothing. - /// Already allocated buffer, or `NULL` - /// Pointer to the size of buffer `ptr`. `*size` is updated to the new allocated size, in particular 0 in case of failure. - /// Desired minimal size of buffer `ptr` - /// `ptr` if the buffer is large enough, a pointer to newly reallocated buffer if the buffer was not large enough, or `NULL` in case of error - public static void* av_fast_realloc(void* @ptr, uint* @size, ulong @min_size) - { - return av_fast_realloc_fptr(@ptr, @size, @min_size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_file_map_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @filename, byte** @bufptr, ulong* @size, int @log_offset, void* @log_ctx); - private static av_file_map_delegate av_file_map_fptr = (string @filename, byte** @bufptr, ulong* @size, int @log_offset, void* @log_ctx) => - { - av_file_map_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_file_map"); - if (av_file_map_fptr == null) - { - av_file_map_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_file_map")); - }; - } - return av_file_map_fptr(@filename, @bufptr, @size, @log_offset, @log_ctx); - }; - /// Read the file with name filename, and put its content in a newly allocated buffer or map it with mmap() when available. In case of success set *bufptr to the read or mmapped buffer, and *size to the size in bytes of the buffer in *bufptr. Unlike mmap this function succeeds with zero sized files, in this case *bufptr will be set to NULL and *size will be set to 0. The returned buffer must be released with av_file_unmap(). - /// loglevel offset used for logging - /// context used for logging - /// a non negative number in case of success, a negative value corresponding to an AVERROR error code in case of failure - public static int av_file_map( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @filename, byte** @bufptr, ulong* @size, int @log_offset, void* @log_ctx) - { - return av_file_map_fptr(@filename, @bufptr, @size, @log_offset, @log_ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_file_unmap_delegate(byte* @bufptr, ulong @size); - private static av_file_unmap_delegate av_file_unmap_fptr = (byte* @bufptr, ulong @size) => - { - av_file_unmap_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_file_unmap"); - if (av_file_unmap_fptr == null) - { - av_file_unmap_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_file_unmap")); - }; - } - av_file_unmap_fptr(@bufptr, @size); - }; - /// Unmap or free the buffer bufptr created by av_file_map(). - /// size in bytes of bufptr, must be the same as returned by av_file_map() - public static void av_file_unmap(byte* @bufptr, ulong @size) - { - av_file_unmap_fptr(@bufptr, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVPixelFormat av_find_best_pix_fmt_of_2_delegate(AVPixelFormat @dst_pix_fmt1, AVPixelFormat @dst_pix_fmt2, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr); - private static av_find_best_pix_fmt_of_2_delegate av_find_best_pix_fmt_of_2_fptr = (AVPixelFormat @dst_pix_fmt1, AVPixelFormat @dst_pix_fmt2, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr) => - { - av_find_best_pix_fmt_of_2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_find_best_pix_fmt_of_2"); - if (av_find_best_pix_fmt_of_2_fptr == null) - { - av_find_best_pix_fmt_of_2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_find_best_pix_fmt_of_2")); - }; - } - return av_find_best_pix_fmt_of_2_fptr(@dst_pix_fmt1, @dst_pix_fmt2, @src_pix_fmt, @has_alpha, @loss_ptr); - }; - /// Compute what kind of losses will occur when converting from one specific pixel format to another. When converting from one pixel format to another, information loss may occur. For example, when converting from RGB24 to GRAY, the color information will be lost. Similarly, other losses occur when converting from some formats to other formats. These losses can involve loss of chroma, but also loss of resolution, loss of color depth, loss due to the color space conversion, loss of the alpha bits or loss due to color quantization. av_get_fix_fmt_loss() informs you about the various types of losses which will occur when converting from one pixel format to another. - /// source pixel format - /// Whether the source pixel format alpha channel is used. - /// Combination of flags informing you what kind of losses will occur (maximum loss for an invalid dst_pix_fmt). - public static AVPixelFormat av_find_best_pix_fmt_of_2(AVPixelFormat @dst_pix_fmt1, AVPixelFormat @dst_pix_fmt2, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr) - { - return av_find_best_pix_fmt_of_2_fptr(@dst_pix_fmt1, @dst_pix_fmt2, @src_pix_fmt, @has_alpha, @loss_ptr); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_find_nearest_q_idx_delegate(AVRational @q, AVRational* @q_list); - private static av_find_nearest_q_idx_delegate av_find_nearest_q_idx_fptr = (AVRational @q, AVRational* @q_list) => - { - av_find_nearest_q_idx_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_find_nearest_q_idx"); - if (av_find_nearest_q_idx_fptr == null) - { - av_find_nearest_q_idx_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_find_nearest_q_idx")); - }; - } - return av_find_nearest_q_idx_fptr(@q, @q_list); - }; - /// Find the value in a list of rationals nearest a given reference rational. - /// Reference rational - /// Array of rationals terminated by `{0, 0}` - /// Index of the nearest value found in the array - public static int av_find_nearest_q_idx(AVRational @q, AVRational* @q_list) - { - return av_find_nearest_q_idx_fptr(@q, @q_list); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate _iobuf* av_fopen_utf8_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @path, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @mode); - private static av_fopen_utf8_delegate av_fopen_utf8_fptr = (string @path, string @mode) => - { - av_fopen_utf8_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_fopen_utf8"); - if (av_fopen_utf8_fptr == null) - { - av_fopen_utf8_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_fopen_utf8")); - }; - } - return av_fopen_utf8_fptr(@path, @mode); - }; - /// Open a file using a UTF-8 filename. The API of this function matches POSIX fopen(), errors are returned through errno. - [Obsolete("Avoid using it, as on Windows, the FILE* allocated by this function may be allocated with a different CRT than the caller who uses the FILE*. No replacement provided in public API.")] - public static _iobuf* av_fopen_utf8( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @path, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @mode) - { - return av_fopen_utf8_fptr(@path, @mode); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_force_cpu_flags_delegate(int @flags); - private static av_force_cpu_flags_delegate av_force_cpu_flags_fptr = (int @flags) => - { - av_force_cpu_flags_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_force_cpu_flags"); - if (av_force_cpu_flags_fptr == null) - { - av_force_cpu_flags_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_force_cpu_flags")); - }; - } - av_force_cpu_flags_fptr(@flags); - }; - /// Disables cpu detection and forces the specified flags. -1 is a special case that disables forcing of specific flags. - public static void av_force_cpu_flags(int @flags) - { - av_force_cpu_flags_fptr(@flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate byte* av_fourcc_make_string_delegate(byte* @buf, uint @fourcc); - private static av_fourcc_make_string_delegate av_fourcc_make_string_fptr = (byte* @buf, uint @fourcc) => - { - av_fourcc_make_string_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_fourcc_make_string"); - if (av_fourcc_make_string_fptr == null) - { - av_fourcc_make_string_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_fourcc_make_string")); - }; - } - return av_fourcc_make_string_fptr(@buf, @fourcc); - }; - /// Fill the provided buffer with a string containing a FourCC (four-character code) representation. - /// a buffer with size in bytes of at least AV_FOURCC_MAX_STRING_SIZE - /// the fourcc to represent - /// the buffer in input - public static byte* av_fourcc_make_string(byte* @buf, uint @fourcc) - { - return av_fourcc_make_string_fptr(@buf, @fourcc); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVFrame* av_frame_alloc_delegate(); - private static av_frame_alloc_delegate av_frame_alloc_fptr = () => - { - av_frame_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_frame_alloc"); - if (av_frame_alloc_fptr == null) - { - av_frame_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_frame_alloc")); - }; - } - return av_frame_alloc_fptr(); - }; - /// Allocate an AVFrame and set its fields to default values. The resulting struct must be freed using av_frame_free(). - /// An AVFrame filled with default values or NULL on failure. - public static AVFrame* av_frame_alloc() - { - return av_frame_alloc_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_frame_apply_cropping_delegate(AVFrame* @frame, int @flags); - private static av_frame_apply_cropping_delegate av_frame_apply_cropping_fptr = (AVFrame* @frame, int @flags) => - { - av_frame_apply_cropping_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_frame_apply_cropping"); - if (av_frame_apply_cropping_fptr == null) - { - av_frame_apply_cropping_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_frame_apply_cropping")); - }; - } - return av_frame_apply_cropping_fptr(@frame, @flags); - }; - /// Crop the given video AVFrame according to its crop_left/crop_top/crop_right/ crop_bottom fields. If cropping is successful, the function will adjust the data pointers and the width/height fields, and set the crop fields to 0. - /// the frame which should be cropped - /// Some combination of AV_FRAME_CROP_* flags, or 0. - /// >= 0 on success, a negative AVERROR on error. If the cropping fields were invalid, AVERROR(ERANGE) is returned, and nothing is changed. - public static int av_frame_apply_cropping(AVFrame* @frame, int @flags) - { - return av_frame_apply_cropping_fptr(@frame, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVFrame* av_frame_clone_delegate(AVFrame* @src); - private static av_frame_clone_delegate av_frame_clone_fptr = (AVFrame* @src) => - { - av_frame_clone_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_frame_clone"); - if (av_frame_clone_fptr == null) - { - av_frame_clone_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_frame_clone")); - }; - } - return av_frame_clone_fptr(@src); - }; - /// Create a new frame that references the same data as src. - /// newly created AVFrame on success, NULL on error. - public static AVFrame* av_frame_clone(AVFrame* @src) - { - return av_frame_clone_fptr(@src); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_frame_copy_delegate(AVFrame* @dst, AVFrame* @src); - private static av_frame_copy_delegate av_frame_copy_fptr = (AVFrame* @dst, AVFrame* @src) => - { - av_frame_copy_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_frame_copy"); - if (av_frame_copy_fptr == null) - { - av_frame_copy_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_frame_copy")); - }; - } - return av_frame_copy_fptr(@dst, @src); - }; - /// Copy the frame data from src to dst. - /// >= 0 on success, a negative AVERROR on error. - public static int av_frame_copy(AVFrame* @dst, AVFrame* @src) - { - return av_frame_copy_fptr(@dst, @src); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_frame_copy_props_delegate(AVFrame* @dst, AVFrame* @src); - private static av_frame_copy_props_delegate av_frame_copy_props_fptr = (AVFrame* @dst, AVFrame* @src) => - { - av_frame_copy_props_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_frame_copy_props"); - if (av_frame_copy_props_fptr == null) - { - av_frame_copy_props_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_frame_copy_props")); - }; - } - return av_frame_copy_props_fptr(@dst, @src); - }; - /// Copy only "metadata" fields from src to dst. - public static int av_frame_copy_props(AVFrame* @dst, AVFrame* @src) - { - return av_frame_copy_props_fptr(@dst, @src); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_frame_free_delegate(AVFrame** @frame); - private static av_frame_free_delegate av_frame_free_fptr = (AVFrame** @frame) => - { - av_frame_free_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_frame_free"); - if (av_frame_free_fptr == null) - { - av_frame_free_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_frame_free")); - }; - } - av_frame_free_fptr(@frame); - }; - /// Free the frame and any dynamically allocated objects in it, e.g. extended_data. If the frame is reference counted, it will be unreferenced first. - /// frame to be freed. The pointer will be set to NULL. - public static void av_frame_free(AVFrame** @frame) - { - av_frame_free_fptr(@frame); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_frame_get_buffer_delegate(AVFrame* @frame, int @align); - private static av_frame_get_buffer_delegate av_frame_get_buffer_fptr = (AVFrame* @frame, int @align) => - { - av_frame_get_buffer_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_frame_get_buffer"); - if (av_frame_get_buffer_fptr == null) - { - av_frame_get_buffer_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_frame_get_buffer")); - }; - } - return av_frame_get_buffer_fptr(@frame, @align); - }; - /// Allocate new buffer(s) for audio or video data. - /// frame in which to store the new buffers. - /// Required buffer size alignment. If equal to 0, alignment will be chosen automatically for the current CPU. It is highly recommended to pass 0 here unless you know what you are doing. - /// 0 on success, a negative AVERROR on error. - public static int av_frame_get_buffer(AVFrame* @frame, int @align) - { - return av_frame_get_buffer_fptr(@frame, @align); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVBufferRef* av_frame_get_plane_buffer_delegate(AVFrame* @frame, int @plane); - private static av_frame_get_plane_buffer_delegate av_frame_get_plane_buffer_fptr = (AVFrame* @frame, int @plane) => - { - av_frame_get_plane_buffer_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_frame_get_plane_buffer"); - if (av_frame_get_plane_buffer_fptr == null) - { - av_frame_get_plane_buffer_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_frame_get_plane_buffer")); - }; - } - return av_frame_get_plane_buffer_fptr(@frame, @plane); - }; - /// Get the buffer reference a given data plane is stored in. - /// index of the data plane of interest in frame->extended_data. - /// the buffer reference that contains the plane or NULL if the input frame is not valid. - public static AVBufferRef* av_frame_get_plane_buffer(AVFrame* @frame, int @plane) - { - return av_frame_get_plane_buffer_fptr(@frame, @plane); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVFrameSideData* av_frame_get_side_data_delegate(AVFrame* @frame, AVFrameSideDataType @type); - private static av_frame_get_side_data_delegate av_frame_get_side_data_fptr = (AVFrame* @frame, AVFrameSideDataType @type) => - { - av_frame_get_side_data_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_frame_get_side_data"); - if (av_frame_get_side_data_fptr == null) - { - av_frame_get_side_data_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_frame_get_side_data")); - }; - } - return av_frame_get_side_data_fptr(@frame, @type); - }; - /// Returns a pointer to the side data of a given type on success, NULL if there is no side data with such type in this frame. - /// a pointer to the side data of a given type on success, NULL if there is no side data with such type in this frame. - public static AVFrameSideData* av_frame_get_side_data(AVFrame* @frame, AVFrameSideDataType @type) - { - return av_frame_get_side_data_fptr(@frame, @type); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_frame_is_writable_delegate(AVFrame* @frame); - private static av_frame_is_writable_delegate av_frame_is_writable_fptr = (AVFrame* @frame) => - { - av_frame_is_writable_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_frame_is_writable"); - if (av_frame_is_writable_fptr == null) - { - av_frame_is_writable_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_frame_is_writable")); - }; - } - return av_frame_is_writable_fptr(@frame); - }; - /// Check if the frame data is writable. - /// A positive value if the frame data is writable (which is true if and only if each of the underlying buffers has only one reference, namely the one stored in this frame). Return 0 otherwise. - public static int av_frame_is_writable(AVFrame* @frame) - { - return av_frame_is_writable_fptr(@frame); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_frame_make_writable_delegate(AVFrame* @frame); - private static av_frame_make_writable_delegate av_frame_make_writable_fptr = (AVFrame* @frame) => - { - av_frame_make_writable_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_frame_make_writable"); - if (av_frame_make_writable_fptr == null) - { - av_frame_make_writable_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_frame_make_writable")); - }; - } - return av_frame_make_writable_fptr(@frame); - }; - /// Ensure that the frame data is writable, avoiding data copy if possible. - /// 0 on success, a negative AVERROR on error. - public static int av_frame_make_writable(AVFrame* @frame) - { - return av_frame_make_writable_fptr(@frame); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_frame_move_ref_delegate(AVFrame* @dst, AVFrame* @src); - private static av_frame_move_ref_delegate av_frame_move_ref_fptr = (AVFrame* @dst, AVFrame* @src) => - { - av_frame_move_ref_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_frame_move_ref"); - if (av_frame_move_ref_fptr == null) - { - av_frame_move_ref_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_frame_move_ref")); - }; - } - av_frame_move_ref_fptr(@dst, @src); - }; - /// Move everything contained in src to dst and reset src. - public static void av_frame_move_ref(AVFrame* @dst, AVFrame* @src) - { - av_frame_move_ref_fptr(@dst, @src); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVFrameSideData* av_frame_new_side_data_delegate(AVFrame* @frame, AVFrameSideDataType @type, ulong @size); - private static av_frame_new_side_data_delegate av_frame_new_side_data_fptr = (AVFrame* @frame, AVFrameSideDataType @type, ulong @size) => - { - av_frame_new_side_data_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_frame_new_side_data"); - if (av_frame_new_side_data_fptr == null) - { - av_frame_new_side_data_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_frame_new_side_data")); - }; - } - return av_frame_new_side_data_fptr(@frame, @type, @size); - }; - /// Add a new side data to a frame. - /// a frame to which the side data should be added - /// type of the added side data - /// size of the side data - /// newly added side data on success, NULL on error - public static AVFrameSideData* av_frame_new_side_data(AVFrame* @frame, AVFrameSideDataType @type, ulong @size) - { - return av_frame_new_side_data_fptr(@frame, @type, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVFrameSideData* av_frame_new_side_data_from_buf_delegate(AVFrame* @frame, AVFrameSideDataType @type, AVBufferRef* @buf); - private static av_frame_new_side_data_from_buf_delegate av_frame_new_side_data_from_buf_fptr = (AVFrame* @frame, AVFrameSideDataType @type, AVBufferRef* @buf) => - { - av_frame_new_side_data_from_buf_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_frame_new_side_data_from_buf"); - if (av_frame_new_side_data_from_buf_fptr == null) - { - av_frame_new_side_data_from_buf_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_frame_new_side_data_from_buf")); - }; - } - return av_frame_new_side_data_from_buf_fptr(@frame, @type, @buf); - }; - /// Add a new side data to a frame from an existing AVBufferRef - /// a frame to which the side data should be added - /// the type of the added side data - /// an AVBufferRef to add as side data. The ownership of the reference is transferred to the frame. - /// newly added side data on success, NULL on error. On failure the frame is unchanged and the AVBufferRef remains owned by the caller. - public static AVFrameSideData* av_frame_new_side_data_from_buf(AVFrame* @frame, AVFrameSideDataType @type, AVBufferRef* @buf) - { - return av_frame_new_side_data_from_buf_fptr(@frame, @type, @buf); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_frame_ref_delegate(AVFrame* @dst, AVFrame* @src); - private static av_frame_ref_delegate av_frame_ref_fptr = (AVFrame* @dst, AVFrame* @src) => - { - av_frame_ref_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_frame_ref"); - if (av_frame_ref_fptr == null) - { - av_frame_ref_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_frame_ref")); - }; - } - return av_frame_ref_fptr(@dst, @src); - }; - /// Set up a new reference to the data described by the source frame. - /// 0 on success, a negative AVERROR on error - public static int av_frame_ref(AVFrame* @dst, AVFrame* @src) - { - return av_frame_ref_fptr(@dst, @src); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_frame_remove_side_data_delegate(AVFrame* @frame, AVFrameSideDataType @type); - private static av_frame_remove_side_data_delegate av_frame_remove_side_data_fptr = (AVFrame* @frame, AVFrameSideDataType @type) => - { - av_frame_remove_side_data_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_frame_remove_side_data"); - if (av_frame_remove_side_data_fptr == null) - { - av_frame_remove_side_data_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_frame_remove_side_data")); - }; - } - av_frame_remove_side_data_fptr(@frame, @type); - }; - /// Remove and free all side data instances of the given type. - public static void av_frame_remove_side_data(AVFrame* @frame, AVFrameSideDataType @type) - { - av_frame_remove_side_data_fptr(@frame, @type); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string av_frame_side_data_name_delegate(AVFrameSideDataType @type); - private static av_frame_side_data_name_delegate av_frame_side_data_name_fptr = (AVFrameSideDataType @type) => - { - av_frame_side_data_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_frame_side_data_name"); - if (av_frame_side_data_name_fptr == null) - { - av_frame_side_data_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_frame_side_data_name")); - }; - } - return av_frame_side_data_name_fptr(@type); - }; - /// Returns a string identifying the side data type - /// a string identifying the side data type - public static string av_frame_side_data_name(AVFrameSideDataType @type) - { - return av_frame_side_data_name_fptr(@type); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_frame_unref_delegate(AVFrame* @frame); - private static av_frame_unref_delegate av_frame_unref_fptr = (AVFrame* @frame) => - { - av_frame_unref_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_frame_unref"); - if (av_frame_unref_fptr == null) - { - av_frame_unref_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_frame_unref")); - }; - } - av_frame_unref_fptr(@frame); - }; - /// Unreference all the buffers referenced by frame and reset the frame fields. - public static void av_frame_unref(AVFrame* @frame) - { - av_frame_unref_fptr(@frame); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_free_delegate(void* @ptr); - private static av_free_delegate av_free_fptr = (void* @ptr) => - { - av_free_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_free"); - if (av_free_fptr == null) - { - av_free_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_free")); - }; - } - av_free_fptr(@ptr); - }; - /// Free a memory block which has been allocated with a function of av_malloc() or av_realloc() family. - /// Pointer to the memory block which should be freed. - public static void av_free(void* @ptr) - { - av_free_fptr(@ptr); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_freep_delegate(void* @ptr); - private static av_freep_delegate av_freep_fptr = (void* @ptr) => - { - av_freep_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_freep"); - if (av_freep_fptr == null) - { - av_freep_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_freep")); - }; - } - av_freep_fptr(@ptr); - }; - /// Free a memory block which has been allocated with a function of av_malloc() or av_realloc() family, and set the pointer pointing to it to `NULL`. - /// Pointer to the pointer to the memory block which should be freed - public static void av_freep(void* @ptr) - { - av_freep_fptr(@ptr); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate long av_gcd_delegate(long @a, long @b); - private static av_gcd_delegate av_gcd_fptr = (long @a, long @b) => - { - av_gcd_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_gcd"); - if (av_gcd_fptr == null) - { - av_gcd_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_gcd")); - }; - } - return av_gcd_fptr(@a, @b); - }; - /// Compute the greatest common divisor of two integer operands. - /// GCD of a and b up to sign; if a >= 0 and b >= 0, return value is >= 0; if a == 0 and b == 0, returns 0. - public static long av_gcd(long @a, long @b) - { - return av_gcd_fptr(@a, @b); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVRational av_gcd_q_delegate(AVRational @a, AVRational @b, int @max_den, AVRational @def); - private static av_gcd_q_delegate av_gcd_q_fptr = (AVRational @a, AVRational @b, int @max_den, AVRational @def) => - { - av_gcd_q_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_gcd_q"); - if (av_gcd_q_fptr == null) - { - av_gcd_q_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_gcd_q")); - }; - } - return av_gcd_q_fptr(@a, @b, @max_den, @def); - }; - /// Return the best rational so that a and b are multiple of it. If the resulting denominator is larger than max_den, return def. - public static AVRational av_gcd_q(AVRational @a, AVRational @b, int @max_den, AVRational @def) - { - return av_gcd_q_fptr(@a, @b, @max_den, @def); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVSampleFormat av_get_alt_sample_fmt_delegate(AVSampleFormat @sample_fmt, int @planar); - private static av_get_alt_sample_fmt_delegate av_get_alt_sample_fmt_fptr = (AVSampleFormat @sample_fmt, int @planar) => - { - av_get_alt_sample_fmt_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_alt_sample_fmt"); - if (av_get_alt_sample_fmt_fptr == null) - { - av_get_alt_sample_fmt_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_alt_sample_fmt")); - }; - } - return av_get_alt_sample_fmt_fptr(@sample_fmt, @planar); - }; - /// Return the planar<->packed alternative form of the given sample format, or AV_SAMPLE_FMT_NONE on error. If the passed sample_fmt is already in the requested planar/packed format, the format returned is the same as the input. - public static AVSampleFormat av_get_alt_sample_fmt(AVSampleFormat @sample_fmt, int @planar) - { - return av_get_alt_sample_fmt_fptr(@sample_fmt, @planar); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_get_bits_per_pixel_delegate(AVPixFmtDescriptor* @pixdesc); - private static av_get_bits_per_pixel_delegate av_get_bits_per_pixel_fptr = (AVPixFmtDescriptor* @pixdesc) => - { - av_get_bits_per_pixel_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_bits_per_pixel"); - if (av_get_bits_per_pixel_fptr == null) - { - av_get_bits_per_pixel_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_bits_per_pixel")); - }; - } - return av_get_bits_per_pixel_fptr(@pixdesc); - }; - /// Return the number of bits per pixel used by the pixel format described by pixdesc. Note that this is not the same as the number of bits per sample. - public static int av_get_bits_per_pixel(AVPixFmtDescriptor* @pixdesc) - { - return av_get_bits_per_pixel_fptr(@pixdesc); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_get_bytes_per_sample_delegate(AVSampleFormat @sample_fmt); - private static av_get_bytes_per_sample_delegate av_get_bytes_per_sample_fptr = (AVSampleFormat @sample_fmt) => - { - av_get_bytes_per_sample_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_bytes_per_sample"); - if (av_get_bytes_per_sample_fptr == null) - { - av_get_bytes_per_sample_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_bytes_per_sample")); - }; - } - return av_get_bytes_per_sample_fptr(@sample_fmt); - }; - /// Return number of bytes per sample. - /// the sample format - /// number of bytes per sample or zero if unknown for the given sample format - public static int av_get_bytes_per_sample(AVSampleFormat @sample_fmt) - { - return av_get_bytes_per_sample_fptr(@sample_fmt); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string av_get_channel_description_delegate(ulong @channel); - private static av_get_channel_description_delegate av_get_channel_description_fptr = (ulong @channel) => - { - av_get_channel_description_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_channel_description"); - if (av_get_channel_description_fptr == null) - { - av_get_channel_description_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_channel_description")); - }; - } - return av_get_channel_description_fptr(@channel); - }; - /// Get the description of a given channel. - /// a channel layout with a single channel - /// channel description on success, NULL on error - [Obsolete("use av_channel_description()")] - public static string av_get_channel_description(ulong @channel) - { - return av_get_channel_description_fptr(@channel); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate ulong av_get_channel_layout_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name); - private static av_get_channel_layout_delegate av_get_channel_layout_fptr = (string @name) => - { - av_get_channel_layout_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_channel_layout"); - if (av_get_channel_layout_fptr == null) - { - av_get_channel_layout_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_channel_layout")); - }; - } - return av_get_channel_layout_fptr(@name); - }; - /// Return a channel layout id that matches name, or 0 if no match is found. - [Obsolete("use av_channel_layout_from_string()")] - public static ulong av_get_channel_layout( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name) - { - return av_get_channel_layout_fptr(@name); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_get_channel_layout_channel_index_delegate(ulong @channel_layout, ulong @channel); - private static av_get_channel_layout_channel_index_delegate av_get_channel_layout_channel_index_fptr = (ulong @channel_layout, ulong @channel) => - { - av_get_channel_layout_channel_index_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_channel_layout_channel_index"); - if (av_get_channel_layout_channel_index_fptr == null) - { - av_get_channel_layout_channel_index_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_channel_layout_channel_index")); - }; - } - return av_get_channel_layout_channel_index_fptr(@channel_layout, @channel); - }; - /// Get the index of a channel in channel_layout. - /// a channel layout describing exactly one channel which must be present in channel_layout. - /// index of channel in channel_layout on success, a negative AVERROR on error. - [Obsolete("use av_channel_layout_index_from_channel()")] - public static int av_get_channel_layout_channel_index(ulong @channel_layout, ulong @channel) - { - return av_get_channel_layout_channel_index_fptr(@channel_layout, @channel); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_get_channel_layout_nb_channels_delegate(ulong @channel_layout); - private static av_get_channel_layout_nb_channels_delegate av_get_channel_layout_nb_channels_fptr = (ulong @channel_layout) => - { - av_get_channel_layout_nb_channels_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_channel_layout_nb_channels"); - if (av_get_channel_layout_nb_channels_fptr == null) - { - av_get_channel_layout_nb_channels_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_channel_layout_nb_channels")); - }; - } - return av_get_channel_layout_nb_channels_fptr(@channel_layout); - }; - /// Return the number of channels in the channel layout. - [Obsolete("use AVChannelLayout.nb_channels")] - public static int av_get_channel_layout_nb_channels(ulong @channel_layout) - { - return av_get_channel_layout_nb_channels_fptr(@channel_layout); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_get_channel_layout_string_delegate(byte* @buf, int @buf_size, int @nb_channels, ulong @channel_layout); - private static av_get_channel_layout_string_delegate av_get_channel_layout_string_fptr = (byte* @buf, int @buf_size, int @nb_channels, ulong @channel_layout) => - { - av_get_channel_layout_string_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_channel_layout_string"); - if (av_get_channel_layout_string_fptr == null) - { - av_get_channel_layout_string_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_channel_layout_string")); - }; - } - av_get_channel_layout_string_fptr(@buf, @buf_size, @nb_channels, @channel_layout); - }; - /// Return a description of a channel layout. If nb_channels is <= 0, it is guessed from the channel_layout. - /// put here the string containing the channel layout - /// size in bytes of the buffer - [Obsolete("use av_channel_layout_describe()")] - public static void av_get_channel_layout_string(byte* @buf, int @buf_size, int @nb_channels, ulong @channel_layout) - { - av_get_channel_layout_string_fptr(@buf, @buf_size, @nb_channels, @channel_layout); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string av_get_channel_name_delegate(ulong @channel); - private static av_get_channel_name_delegate av_get_channel_name_fptr = (ulong @channel) => - { - av_get_channel_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_channel_name"); - if (av_get_channel_name_fptr == null) - { - av_get_channel_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_channel_name")); - }; - } - return av_get_channel_name_fptr(@channel); - }; - /// Get the name of a given channel. - /// channel name on success, NULL on error. - [Obsolete("use av_channel_name()")] - public static string av_get_channel_name(ulong @channel) - { - return av_get_channel_name_fptr(@channel); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string av_get_colorspace_name_delegate(AVColorSpace @val); - private static av_get_colorspace_name_delegate av_get_colorspace_name_fptr = (AVColorSpace @val) => - { - av_get_colorspace_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_colorspace_name"); - if (av_get_colorspace_name_fptr == null) - { - av_get_colorspace_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_colorspace_name")); - }; - } - return av_get_colorspace_name_fptr(@val); - }; - /// Get the name of a colorspace. - /// a static string identifying the colorspace; can be NULL. - [Obsolete("use av_color_space_name()")] - public static string av_get_colorspace_name(AVColorSpace @val) - { - return av_get_colorspace_name_fptr(@val); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_get_cpu_flags_delegate(); - private static av_get_cpu_flags_delegate av_get_cpu_flags_fptr = () => - { - av_get_cpu_flags_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_cpu_flags"); - if (av_get_cpu_flags_fptr == null) - { - av_get_cpu_flags_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_cpu_flags")); - }; - } - return av_get_cpu_flags_fptr(); - }; - /// Return the flags which specify extensions supported by the CPU. The returned value is affected by av_force_cpu_flags() if that was used before. So av_get_cpu_flags() can easily be used in an application to detect the enabled cpu flags. - public static int av_get_cpu_flags() - { - return av_get_cpu_flags_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate long av_get_default_channel_layout_delegate(int @nb_channels); - private static av_get_default_channel_layout_delegate av_get_default_channel_layout_fptr = (int @nb_channels) => - { - av_get_default_channel_layout_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_default_channel_layout"); - if (av_get_default_channel_layout_fptr == null) - { - av_get_default_channel_layout_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_default_channel_layout")); - }; - } - return av_get_default_channel_layout_fptr(@nb_channels); - }; - /// Return default channel layout for a given number of channels. - [Obsolete("use av_channel_layout_default()")] - public static long av_get_default_channel_layout(int @nb_channels) - { - return av_get_default_channel_layout_fptr(@nb_channels); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_get_extended_channel_layout_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, ulong* @channel_layout, int* @nb_channels); - private static av_get_extended_channel_layout_delegate av_get_extended_channel_layout_fptr = (string @name, ulong* @channel_layout, int* @nb_channels) => - { - av_get_extended_channel_layout_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_extended_channel_layout"); - if (av_get_extended_channel_layout_fptr == null) - { - av_get_extended_channel_layout_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_extended_channel_layout")); - }; - } - return av_get_extended_channel_layout_fptr(@name, @channel_layout, @nb_channels); - }; - /// Return a channel layout and the number of channels based on the specified name. - /// channel layout specification string - /// parsed channel layout (0 if unknown) - /// number of channels - /// 0 on success, AVERROR(EINVAL) if the parsing fails. - [Obsolete("use av_channel_layout_from_string()")] - public static int av_get_extended_channel_layout( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, ulong* @channel_layout, int* @nb_channels) - { - return av_get_extended_channel_layout_fptr(@name, @channel_layout, @nb_channels); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string av_get_media_type_string_delegate(AVMediaType @media_type); - private static av_get_media_type_string_delegate av_get_media_type_string_fptr = (AVMediaType @media_type) => - { - av_get_media_type_string_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_media_type_string"); - if (av_get_media_type_string_fptr == null) - { - av_get_media_type_string_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_media_type_string")); - }; - } - return av_get_media_type_string_fptr(@media_type); - }; - /// Return a string describing the media_type enum, NULL if media_type is unknown. - public static string av_get_media_type_string(AVMediaType @media_type) - { - return av_get_media_type_string_fptr(@media_type); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVSampleFormat av_get_packed_sample_fmt_delegate(AVSampleFormat @sample_fmt); - private static av_get_packed_sample_fmt_delegate av_get_packed_sample_fmt_fptr = (AVSampleFormat @sample_fmt) => - { - av_get_packed_sample_fmt_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_packed_sample_fmt"); - if (av_get_packed_sample_fmt_fptr == null) - { - av_get_packed_sample_fmt_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_packed_sample_fmt")); - }; - } - return av_get_packed_sample_fmt_fptr(@sample_fmt); - }; - /// Get the packed alternative form of the given sample format. - /// the packed alternative form of the given sample format or AV_SAMPLE_FMT_NONE on error. - public static AVSampleFormat av_get_packed_sample_fmt(AVSampleFormat @sample_fmt) - { - return av_get_packed_sample_fmt_fptr(@sample_fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_get_padded_bits_per_pixel_delegate(AVPixFmtDescriptor* @pixdesc); - private static av_get_padded_bits_per_pixel_delegate av_get_padded_bits_per_pixel_fptr = (AVPixFmtDescriptor* @pixdesc) => - { - av_get_padded_bits_per_pixel_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_padded_bits_per_pixel"); - if (av_get_padded_bits_per_pixel_fptr == null) - { - av_get_padded_bits_per_pixel_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_padded_bits_per_pixel")); - }; - } - return av_get_padded_bits_per_pixel_fptr(@pixdesc); - }; - /// Return the number of bits per pixel for the pixel format described by pixdesc, including any padding or unused bits. - public static int av_get_padded_bits_per_pixel(AVPixFmtDescriptor* @pixdesc) - { - return av_get_padded_bits_per_pixel_fptr(@pixdesc); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate byte av_get_picture_type_char_delegate(AVPictureType @pict_type); - private static av_get_picture_type_char_delegate av_get_picture_type_char_fptr = (AVPictureType @pict_type) => - { - av_get_picture_type_char_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_picture_type_char"); - if (av_get_picture_type_char_fptr == null) - { - av_get_picture_type_char_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_picture_type_char")); - }; - } - return av_get_picture_type_char_fptr(@pict_type); - }; - /// Return a single letter to describe the given picture type pict_type. - /// the picture type - /// a single character representing the picture type, '?' if pict_type is unknown - public static byte av_get_picture_type_char(AVPictureType @pict_type) - { - return av_get_picture_type_char_fptr(@pict_type); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVPixelFormat av_get_pix_fmt_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name); - private static av_get_pix_fmt_delegate av_get_pix_fmt_fptr = (string @name) => - { - av_get_pix_fmt_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_pix_fmt"); - if (av_get_pix_fmt_fptr == null) - { - av_get_pix_fmt_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_pix_fmt")); - }; - } - return av_get_pix_fmt_fptr(@name); - }; - /// Return the pixel format corresponding to name. - public static AVPixelFormat av_get_pix_fmt( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name) - { - return av_get_pix_fmt_fptr(@name); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_get_pix_fmt_loss_delegate(AVPixelFormat @dst_pix_fmt, AVPixelFormat @src_pix_fmt, int @has_alpha); - private static av_get_pix_fmt_loss_delegate av_get_pix_fmt_loss_fptr = (AVPixelFormat @dst_pix_fmt, AVPixelFormat @src_pix_fmt, int @has_alpha) => - { - av_get_pix_fmt_loss_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_pix_fmt_loss"); - if (av_get_pix_fmt_loss_fptr == null) - { - av_get_pix_fmt_loss_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_pix_fmt_loss")); - }; - } - return av_get_pix_fmt_loss_fptr(@dst_pix_fmt, @src_pix_fmt, @has_alpha); - }; - /// Compute what kind of losses will occur when converting from one specific pixel format to another. When converting from one pixel format to another, information loss may occur. For example, when converting from RGB24 to GRAY, the color information will be lost. Similarly, other losses occur when converting from some formats to other formats. These losses can involve loss of chroma, but also loss of resolution, loss of color depth, loss due to the color space conversion, loss of the alpha bits or loss due to color quantization. av_get_fix_fmt_loss() informs you about the various types of losses which will occur when converting from one pixel format to another. - /// destination pixel format - /// source pixel format - /// Whether the source pixel format alpha channel is used. - /// Combination of flags informing you what kind of losses will occur (maximum loss for an invalid dst_pix_fmt). - public static int av_get_pix_fmt_loss(AVPixelFormat @dst_pix_fmt, AVPixelFormat @src_pix_fmt, int @has_alpha) - { - return av_get_pix_fmt_loss_fptr(@dst_pix_fmt, @src_pix_fmt, @has_alpha); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string av_get_pix_fmt_name_delegate(AVPixelFormat @pix_fmt); - private static av_get_pix_fmt_name_delegate av_get_pix_fmt_name_fptr = (AVPixelFormat @pix_fmt) => - { - av_get_pix_fmt_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_pix_fmt_name"); - if (av_get_pix_fmt_name_fptr == null) - { - av_get_pix_fmt_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_pix_fmt_name")); - }; - } - return av_get_pix_fmt_name_fptr(@pix_fmt); - }; - /// Return the short name for a pixel format, NULL in case pix_fmt is unknown. - public static string av_get_pix_fmt_name(AVPixelFormat @pix_fmt) - { - return av_get_pix_fmt_name_fptr(@pix_fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate byte* av_get_pix_fmt_string_delegate(byte* @buf, int @buf_size, AVPixelFormat @pix_fmt); - private static av_get_pix_fmt_string_delegate av_get_pix_fmt_string_fptr = (byte* @buf, int @buf_size, AVPixelFormat @pix_fmt) => - { - av_get_pix_fmt_string_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_pix_fmt_string"); - if (av_get_pix_fmt_string_fptr == null) - { - av_get_pix_fmt_string_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_pix_fmt_string")); - }; - } - return av_get_pix_fmt_string_fptr(@buf, @buf_size, @pix_fmt); - }; - /// Print in buf the string corresponding to the pixel format with number pix_fmt, or a header if pix_fmt is negative. - /// the buffer where to write the string - /// the size of buf - /// the number of the pixel format to print the corresponding info string, or a negative value to print the corresponding header. - public static byte* av_get_pix_fmt_string(byte* @buf, int @buf_size, AVPixelFormat @pix_fmt) - { - return av_get_pix_fmt_string_fptr(@buf, @buf_size, @pix_fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVSampleFormat av_get_planar_sample_fmt_delegate(AVSampleFormat @sample_fmt); - private static av_get_planar_sample_fmt_delegate av_get_planar_sample_fmt_fptr = (AVSampleFormat @sample_fmt) => - { - av_get_planar_sample_fmt_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_planar_sample_fmt"); - if (av_get_planar_sample_fmt_fptr == null) - { - av_get_planar_sample_fmt_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_planar_sample_fmt")); - }; - } - return av_get_planar_sample_fmt_fptr(@sample_fmt); - }; - /// Get the planar alternative form of the given sample format. - /// the planar alternative form of the given sample format or AV_SAMPLE_FMT_NONE on error. - public static AVSampleFormat av_get_planar_sample_fmt(AVSampleFormat @sample_fmt) - { - return av_get_planar_sample_fmt_fptr(@sample_fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVSampleFormat av_get_sample_fmt_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name); - private static av_get_sample_fmt_delegate av_get_sample_fmt_fptr = (string @name) => - { - av_get_sample_fmt_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_sample_fmt"); - if (av_get_sample_fmt_fptr == null) - { - av_get_sample_fmt_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_sample_fmt")); - }; - } - return av_get_sample_fmt_fptr(@name); - }; - /// Return a sample format corresponding to name, or AV_SAMPLE_FMT_NONE on error. - public static AVSampleFormat av_get_sample_fmt( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name) - { - return av_get_sample_fmt_fptr(@name); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string av_get_sample_fmt_name_delegate(AVSampleFormat @sample_fmt); - private static av_get_sample_fmt_name_delegate av_get_sample_fmt_name_fptr = (AVSampleFormat @sample_fmt) => - { - av_get_sample_fmt_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_sample_fmt_name"); - if (av_get_sample_fmt_name_fptr == null) - { - av_get_sample_fmt_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_sample_fmt_name")); - }; - } - return av_get_sample_fmt_name_fptr(@sample_fmt); - }; - /// Return the name of sample_fmt, or NULL if sample_fmt is not recognized. - public static string av_get_sample_fmt_name(AVSampleFormat @sample_fmt) - { - return av_get_sample_fmt_name_fptr(@sample_fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate byte* av_get_sample_fmt_string_delegate(byte* @buf, int @buf_size, AVSampleFormat @sample_fmt); - private static av_get_sample_fmt_string_delegate av_get_sample_fmt_string_fptr = (byte* @buf, int @buf_size, AVSampleFormat @sample_fmt) => - { - av_get_sample_fmt_string_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_sample_fmt_string"); - if (av_get_sample_fmt_string_fptr == null) - { - av_get_sample_fmt_string_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_sample_fmt_string")); - }; - } - return av_get_sample_fmt_string_fptr(@buf, @buf_size, @sample_fmt); - }; - /// Generate a string corresponding to the sample format with sample_fmt, or a header if sample_fmt is negative. - /// the buffer where to write the string - /// the size of buf - /// the number of the sample format to print the corresponding info string, or a negative value to print the corresponding header. - /// the pointer to the filled buffer or NULL if sample_fmt is unknown or in case of other errors - public static byte* av_get_sample_fmt_string(byte* @buf, int @buf_size, AVSampleFormat @sample_fmt) - { - return av_get_sample_fmt_string_fptr(@buf, @buf_size, @sample_fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_get_standard_channel_layout_delegate(uint @index, ulong* @layout, byte** @name); - private static av_get_standard_channel_layout_delegate av_get_standard_channel_layout_fptr = (uint @index, ulong* @layout, byte** @name) => - { - av_get_standard_channel_layout_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_standard_channel_layout"); - if (av_get_standard_channel_layout_fptr == null) - { - av_get_standard_channel_layout_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_standard_channel_layout")); - }; - } - return av_get_standard_channel_layout_fptr(@index, @layout, @name); - }; - /// Get the value and name of a standard channel layout. - /// index in an internal list, starting at 0 - /// channel layout mask - /// name of the layout - /// 0 if the layout exists, < 0 if index is beyond the limits - [Obsolete("use av_channel_layout_standard()")] - public static int av_get_standard_channel_layout(uint @index, ulong* @layout, byte** @name) - { - return av_get_standard_channel_layout_fptr(@index, @layout, @name); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVRational av_get_time_base_q_delegate(); - private static av_get_time_base_q_delegate av_get_time_base_q_fptr = () => - { - av_get_time_base_q_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_get_time_base_q"); - if (av_get_time_base_q_fptr == null) - { - av_get_time_base_q_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_get_time_base_q")); - }; - } - return av_get_time_base_q_fptr(); - }; - /// Return the fractional representation of the internal time base. - public static AVRational av_get_time_base_q() - { - return av_get_time_base_q_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate long av_gettime_delegate(); - private static av_gettime_delegate av_gettime_fptr = () => - { - av_gettime_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_gettime"); - if (av_gettime_fptr == null) - { - av_gettime_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_gettime")); - }; - } - return av_gettime_fptr(); - }; - /// Get the current time in microseconds. - public static long av_gettime() - { - return av_gettime_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate long av_gettime_relative_delegate(); - private static av_gettime_relative_delegate av_gettime_relative_fptr = () => - { - av_gettime_relative_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_gettime_relative"); - if (av_gettime_relative_fptr == null) - { - av_gettime_relative_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_gettime_relative")); - }; - } - return av_gettime_relative_fptr(); - }; - /// Get the current time in microseconds since some unspecified starting point. On platforms that support it, the time comes from a monotonic clock This property makes this time source ideal for measuring relative time. The returned values may not be monotonic on platforms where a monotonic clock is not available. - public static long av_gettime_relative() - { - return av_gettime_relative_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_gettime_relative_is_monotonic_delegate(); - private static av_gettime_relative_is_monotonic_delegate av_gettime_relative_is_monotonic_fptr = () => - { - av_gettime_relative_is_monotonic_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_gettime_relative_is_monotonic"); - if (av_gettime_relative_is_monotonic_fptr == null) - { - av_gettime_relative_is_monotonic_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_gettime_relative_is_monotonic")); - }; - } - return av_gettime_relative_is_monotonic_fptr(); - }; - /// Indicates with a boolean result if the av_gettime_relative() time source is monotonic. - public static int av_gettime_relative_is_monotonic() - { - return av_gettime_relative_is_monotonic_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVBufferRef* av_hwdevice_ctx_alloc_delegate(AVHWDeviceType @type); - private static av_hwdevice_ctx_alloc_delegate av_hwdevice_ctx_alloc_fptr = (AVHWDeviceType @type) => - { - av_hwdevice_ctx_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_hwdevice_ctx_alloc"); - if (av_hwdevice_ctx_alloc_fptr == null) - { - av_hwdevice_ctx_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_hwdevice_ctx_alloc")); - }; - } - return av_hwdevice_ctx_alloc_fptr(@type); - }; - /// Allocate an AVHWDeviceContext for a given hardware type. - /// the type of the hardware device to allocate. - /// a reference to the newly created AVHWDeviceContext on success or NULL on failure. - public static AVBufferRef* av_hwdevice_ctx_alloc(AVHWDeviceType @type) - { - return av_hwdevice_ctx_alloc_fptr(@type); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_hwdevice_ctx_create_delegate(AVBufferRef** @device_ctx, AVHWDeviceType @type, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @device, AVDictionary* @opts, int @flags); - private static av_hwdevice_ctx_create_delegate av_hwdevice_ctx_create_fptr = (AVBufferRef** @device_ctx, AVHWDeviceType @type, string @device, AVDictionary* @opts, int @flags) => - { - av_hwdevice_ctx_create_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_hwdevice_ctx_create"); - if (av_hwdevice_ctx_create_fptr == null) - { - av_hwdevice_ctx_create_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_hwdevice_ctx_create")); - }; - } - return av_hwdevice_ctx_create_fptr(@device_ctx, @type, @device, @opts, @flags); - }; - /// Open a device of the specified type and create an AVHWDeviceContext for it. - /// On success, a reference to the newly-created device context will be written here. The reference is owned by the caller and must be released with av_buffer_unref() when no longer needed. On failure, NULL will be written to this pointer. - /// The type of the device to create. - /// A type-specific string identifying the device to open. - /// A dictionary of additional (type-specific) options to use in opening the device. The dictionary remains owned by the caller. - /// currently unused - /// 0 on success, a negative AVERROR code on failure. - public static int av_hwdevice_ctx_create(AVBufferRef** @device_ctx, AVHWDeviceType @type, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @device, AVDictionary* @opts, int @flags) - { - return av_hwdevice_ctx_create_fptr(@device_ctx, @type, @device, @opts, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_hwdevice_ctx_create_derived_delegate(AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, int @flags); - private static av_hwdevice_ctx_create_derived_delegate av_hwdevice_ctx_create_derived_fptr = (AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, int @flags) => - { - av_hwdevice_ctx_create_derived_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_hwdevice_ctx_create_derived"); - if (av_hwdevice_ctx_create_derived_fptr == null) - { - av_hwdevice_ctx_create_derived_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_hwdevice_ctx_create_derived")); - }; - } - return av_hwdevice_ctx_create_derived_fptr(@dst_ctx, @type, @src_ctx, @flags); - }; - /// Create a new device of the specified type from an existing device. - /// On success, a reference to the newly-created AVHWDeviceContext. - /// The type of the new device to create. - /// A reference to an existing AVHWDeviceContext which will be used to create the new device. - /// Currently unused; should be set to zero. - /// Zero on success, a negative AVERROR code on failure. - public static int av_hwdevice_ctx_create_derived(AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, int @flags) - { - return av_hwdevice_ctx_create_derived_fptr(@dst_ctx, @type, @src_ctx, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_hwdevice_ctx_create_derived_opts_delegate(AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, AVDictionary* @options, int @flags); - private static av_hwdevice_ctx_create_derived_opts_delegate av_hwdevice_ctx_create_derived_opts_fptr = (AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, AVDictionary* @options, int @flags) => - { - av_hwdevice_ctx_create_derived_opts_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_hwdevice_ctx_create_derived_opts"); - if (av_hwdevice_ctx_create_derived_opts_fptr == null) - { - av_hwdevice_ctx_create_derived_opts_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_hwdevice_ctx_create_derived_opts")); - }; - } - return av_hwdevice_ctx_create_derived_opts_fptr(@dst_ctx, @type, @src_ctx, @options, @flags); - }; - /// Create a new device of the specified type from an existing device. - /// On success, a reference to the newly-created AVHWDeviceContext. - /// The type of the new device to create. - /// A reference to an existing AVHWDeviceContext which will be used to create the new device. - /// Options for the new device to create, same format as in av_hwdevice_ctx_create. - /// Currently unused; should be set to zero. - /// Zero on success, a negative AVERROR code on failure. - public static int av_hwdevice_ctx_create_derived_opts(AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, AVDictionary* @options, int @flags) - { - return av_hwdevice_ctx_create_derived_opts_fptr(@dst_ctx, @type, @src_ctx, @options, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_hwdevice_ctx_init_delegate(AVBufferRef* @ref); - private static av_hwdevice_ctx_init_delegate av_hwdevice_ctx_init_fptr = (AVBufferRef* @ref) => - { - av_hwdevice_ctx_init_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_hwdevice_ctx_init"); - if (av_hwdevice_ctx_init_fptr == null) - { - av_hwdevice_ctx_init_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_hwdevice_ctx_init")); - }; - } - return av_hwdevice_ctx_init_fptr(@ref); - }; - /// Finalize the device context before use. This function must be called after the context is filled with all the required information and before it is used in any way. - /// a reference to the AVHWDeviceContext - /// 0 on success, a negative AVERROR code on failure - public static int av_hwdevice_ctx_init(AVBufferRef* @ref) - { - return av_hwdevice_ctx_init_fptr(@ref); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVHWDeviceType av_hwdevice_find_type_by_name_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name); - private static av_hwdevice_find_type_by_name_delegate av_hwdevice_find_type_by_name_fptr = (string @name) => - { - av_hwdevice_find_type_by_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_hwdevice_find_type_by_name"); - if (av_hwdevice_find_type_by_name_fptr == null) - { - av_hwdevice_find_type_by_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_hwdevice_find_type_by_name")); - }; - } - return av_hwdevice_find_type_by_name_fptr(@name); - }; - /// Look up an AVHWDeviceType by name. - /// String name of the device type (case-insensitive). - /// The type from enum AVHWDeviceType, or AV_HWDEVICE_TYPE_NONE if not found. - public static AVHWDeviceType av_hwdevice_find_type_by_name( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name) - { - return av_hwdevice_find_type_by_name_fptr(@name); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVHWFramesConstraints* av_hwdevice_get_hwframe_constraints_delegate(AVBufferRef* @ref, void* @hwconfig); - private static av_hwdevice_get_hwframe_constraints_delegate av_hwdevice_get_hwframe_constraints_fptr = (AVBufferRef* @ref, void* @hwconfig) => - { - av_hwdevice_get_hwframe_constraints_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_hwdevice_get_hwframe_constraints"); - if (av_hwdevice_get_hwframe_constraints_fptr == null) - { - av_hwdevice_get_hwframe_constraints_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_hwdevice_get_hwframe_constraints")); - }; - } - return av_hwdevice_get_hwframe_constraints_fptr(@ref, @hwconfig); - }; - /// Get the constraints on HW frames given a device and the HW-specific configuration to be used with that device. If no HW-specific configuration is provided, returns the maximum possible capabilities of the device. - /// a reference to the associated AVHWDeviceContext. - /// a filled HW-specific configuration structure, or NULL to return the maximum possible capabilities of the device. - /// AVHWFramesConstraints structure describing the constraints on the device, or NULL if not available. - public static AVHWFramesConstraints* av_hwdevice_get_hwframe_constraints(AVBufferRef* @ref, void* @hwconfig) - { - return av_hwdevice_get_hwframe_constraints_fptr(@ref, @hwconfig); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string av_hwdevice_get_type_name_delegate(AVHWDeviceType @type); - private static av_hwdevice_get_type_name_delegate av_hwdevice_get_type_name_fptr = (AVHWDeviceType @type) => - { - av_hwdevice_get_type_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_hwdevice_get_type_name"); - if (av_hwdevice_get_type_name_fptr == null) - { - av_hwdevice_get_type_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_hwdevice_get_type_name")); - }; - } - return av_hwdevice_get_type_name_fptr(@type); - }; - /// Get the string name of an AVHWDeviceType. - /// Type from enum AVHWDeviceType. - /// Pointer to a static string containing the name, or NULL if the type is not valid. - public static string av_hwdevice_get_type_name(AVHWDeviceType @type) - { - return av_hwdevice_get_type_name_fptr(@type); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void* av_hwdevice_hwconfig_alloc_delegate(AVBufferRef* @device_ctx); - private static av_hwdevice_hwconfig_alloc_delegate av_hwdevice_hwconfig_alloc_fptr = (AVBufferRef* @device_ctx) => - { - av_hwdevice_hwconfig_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_hwdevice_hwconfig_alloc"); - if (av_hwdevice_hwconfig_alloc_fptr == null) - { - av_hwdevice_hwconfig_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_hwdevice_hwconfig_alloc")); - }; - } - return av_hwdevice_hwconfig_alloc_fptr(@device_ctx); - }; - /// Allocate a HW-specific configuration structure for a given HW device. After use, the user must free all members as required by the specific hardware structure being used, then free the structure itself with av_free(). - /// a reference to the associated AVHWDeviceContext. - /// The newly created HW-specific configuration structure on success or NULL on failure. - public static void* av_hwdevice_hwconfig_alloc(AVBufferRef* @device_ctx) - { - return av_hwdevice_hwconfig_alloc_fptr(@device_ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVHWDeviceType av_hwdevice_iterate_types_delegate(AVHWDeviceType @prev); - private static av_hwdevice_iterate_types_delegate av_hwdevice_iterate_types_fptr = (AVHWDeviceType @prev) => - { - av_hwdevice_iterate_types_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_hwdevice_iterate_types"); - if (av_hwdevice_iterate_types_fptr == null) - { - av_hwdevice_iterate_types_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_hwdevice_iterate_types")); - }; - } - return av_hwdevice_iterate_types_fptr(@prev); - }; - /// Iterate over supported device types. - /// The next usable device type from enum AVHWDeviceType, or AV_HWDEVICE_TYPE_NONE if there are no more. - public static AVHWDeviceType av_hwdevice_iterate_types(AVHWDeviceType @prev) - { - return av_hwdevice_iterate_types_fptr(@prev); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_hwframe_constraints_free_delegate(AVHWFramesConstraints** @constraints); - private static av_hwframe_constraints_free_delegate av_hwframe_constraints_free_fptr = (AVHWFramesConstraints** @constraints) => - { - av_hwframe_constraints_free_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_hwframe_constraints_free"); - if (av_hwframe_constraints_free_fptr == null) - { - av_hwframe_constraints_free_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_hwframe_constraints_free")); - }; - } - av_hwframe_constraints_free_fptr(@constraints); - }; - /// Free an AVHWFrameConstraints structure. - /// The (filled or unfilled) AVHWFrameConstraints structure. - public static void av_hwframe_constraints_free(AVHWFramesConstraints** @constraints) - { - av_hwframe_constraints_free_fptr(@constraints); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVBufferRef* av_hwframe_ctx_alloc_delegate(AVBufferRef* @device_ctx); - private static av_hwframe_ctx_alloc_delegate av_hwframe_ctx_alloc_fptr = (AVBufferRef* @device_ctx) => - { - av_hwframe_ctx_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_hwframe_ctx_alloc"); - if (av_hwframe_ctx_alloc_fptr == null) - { - av_hwframe_ctx_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_hwframe_ctx_alloc")); - }; - } - return av_hwframe_ctx_alloc_fptr(@device_ctx); - }; - /// Allocate an AVHWFramesContext tied to a given device context. - /// a reference to a AVHWDeviceContext. This function will make a new reference for internal use, the one passed to the function remains owned by the caller. - /// a reference to the newly created AVHWFramesContext on success or NULL on failure. - public static AVBufferRef* av_hwframe_ctx_alloc(AVBufferRef* @device_ctx) - { - return av_hwframe_ctx_alloc_fptr(@device_ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_hwframe_ctx_create_derived_delegate(AVBufferRef** @derived_frame_ctx, AVPixelFormat @format, AVBufferRef* @derived_device_ctx, AVBufferRef* @source_frame_ctx, int @flags); - private static av_hwframe_ctx_create_derived_delegate av_hwframe_ctx_create_derived_fptr = (AVBufferRef** @derived_frame_ctx, AVPixelFormat @format, AVBufferRef* @derived_device_ctx, AVBufferRef* @source_frame_ctx, int @flags) => - { - av_hwframe_ctx_create_derived_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_hwframe_ctx_create_derived"); - if (av_hwframe_ctx_create_derived_fptr == null) - { - av_hwframe_ctx_create_derived_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_hwframe_ctx_create_derived")); - }; - } - return av_hwframe_ctx_create_derived_fptr(@derived_frame_ctx, @format, @derived_device_ctx, @source_frame_ctx, @flags); - }; - /// Create and initialise an AVHWFramesContext as a mapping of another existing AVHWFramesContext on a different device. - /// On success, a reference to the newly created AVHWFramesContext. - /// A reference to the device to create the new AVHWFramesContext on. - /// A reference to an existing AVHWFramesContext which will be mapped to the derived context. - /// Some combination of AV_HWFRAME_MAP_* flags, defining the mapping parameters to apply to frames which are allocated in the derived device. - /// Zero on success, negative AVERROR code on failure. - public static int av_hwframe_ctx_create_derived(AVBufferRef** @derived_frame_ctx, AVPixelFormat @format, AVBufferRef* @derived_device_ctx, AVBufferRef* @source_frame_ctx, int @flags) - { - return av_hwframe_ctx_create_derived_fptr(@derived_frame_ctx, @format, @derived_device_ctx, @source_frame_ctx, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_hwframe_ctx_init_delegate(AVBufferRef* @ref); - private static av_hwframe_ctx_init_delegate av_hwframe_ctx_init_fptr = (AVBufferRef* @ref) => - { - av_hwframe_ctx_init_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_hwframe_ctx_init"); - if (av_hwframe_ctx_init_fptr == null) - { - av_hwframe_ctx_init_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_hwframe_ctx_init")); - }; - } - return av_hwframe_ctx_init_fptr(@ref); - }; - /// Finalize the context before use. This function must be called after the context is filled with all the required information and before it is attached to any frames. - /// a reference to the AVHWFramesContext - /// 0 on success, a negative AVERROR code on failure - public static int av_hwframe_ctx_init(AVBufferRef* @ref) - { - return av_hwframe_ctx_init_fptr(@ref); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_hwframe_get_buffer_delegate(AVBufferRef* @hwframe_ctx, AVFrame* @frame, int @flags); - private static av_hwframe_get_buffer_delegate av_hwframe_get_buffer_fptr = (AVBufferRef* @hwframe_ctx, AVFrame* @frame, int @flags) => - { - av_hwframe_get_buffer_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_hwframe_get_buffer"); - if (av_hwframe_get_buffer_fptr == null) - { - av_hwframe_get_buffer_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_hwframe_get_buffer")); - }; - } - return av_hwframe_get_buffer_fptr(@hwframe_ctx, @frame, @flags); - }; - /// Allocate a new frame attached to the given AVHWFramesContext. - /// a reference to an AVHWFramesContext - /// an empty (freshly allocated or unreffed) frame to be filled with newly allocated buffers. - /// currently unused, should be set to zero - /// 0 on success, a negative AVERROR code on failure - public static int av_hwframe_get_buffer(AVBufferRef* @hwframe_ctx, AVFrame* @frame, int @flags) - { - return av_hwframe_get_buffer_fptr(@hwframe_ctx, @frame, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_hwframe_map_delegate(AVFrame* @dst, AVFrame* @src, int @flags); - private static av_hwframe_map_delegate av_hwframe_map_fptr = (AVFrame* @dst, AVFrame* @src, int @flags) => - { - av_hwframe_map_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_hwframe_map"); - if (av_hwframe_map_fptr == null) - { - av_hwframe_map_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_hwframe_map")); - }; - } - return av_hwframe_map_fptr(@dst, @src, @flags); - }; - /// Map a hardware frame. - /// Destination frame, to contain the mapping. - /// Source frame, to be mapped. - /// Some combination of AV_HWFRAME_MAP_* flags. - /// Zero on success, negative AVERROR code on failure. - public static int av_hwframe_map(AVFrame* @dst, AVFrame* @src, int @flags) - { - return av_hwframe_map_fptr(@dst, @src, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_hwframe_transfer_data_delegate(AVFrame* @dst, AVFrame* @src, int @flags); - private static av_hwframe_transfer_data_delegate av_hwframe_transfer_data_fptr = (AVFrame* @dst, AVFrame* @src, int @flags) => - { - av_hwframe_transfer_data_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_hwframe_transfer_data"); - if (av_hwframe_transfer_data_fptr == null) - { - av_hwframe_transfer_data_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_hwframe_transfer_data")); - }; - } - return av_hwframe_transfer_data_fptr(@dst, @src, @flags); - }; - /// Copy data to or from a hw surface. At least one of dst/src must have an AVHWFramesContext attached. - /// the destination frame. dst is not touched on failure. - /// the source frame. - /// currently unused, should be set to zero - /// 0 on success, a negative AVERROR error code on failure. - public static int av_hwframe_transfer_data(AVFrame* @dst, AVFrame* @src, int @flags) - { - return av_hwframe_transfer_data_fptr(@dst, @src, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_hwframe_transfer_get_formats_delegate(AVBufferRef* @hwframe_ctx, AVHWFrameTransferDirection @dir, AVPixelFormat** @formats, int @flags); - private static av_hwframe_transfer_get_formats_delegate av_hwframe_transfer_get_formats_fptr = (AVBufferRef* @hwframe_ctx, AVHWFrameTransferDirection @dir, AVPixelFormat** @formats, int @flags) => - { - av_hwframe_transfer_get_formats_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_hwframe_transfer_get_formats"); - if (av_hwframe_transfer_get_formats_fptr == null) - { - av_hwframe_transfer_get_formats_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_hwframe_transfer_get_formats")); - }; - } - return av_hwframe_transfer_get_formats_fptr(@hwframe_ctx, @dir, @formats, @flags); - }; - /// Get a list of possible source or target formats usable in av_hwframe_transfer_data(). - /// the frame context to obtain the information for - /// the direction of the transfer - /// the pointer to the output format list will be written here. The list is terminated with AV_PIX_FMT_NONE and must be freed by the caller when no longer needed using av_free(). If this function returns successfully, the format list will have at least one item (not counting the terminator). On failure, the contents of this pointer are unspecified. - /// currently unused, should be set to zero - /// 0 on success, a negative AVERROR code on failure. - public static int av_hwframe_transfer_get_formats(AVBufferRef* @hwframe_ctx, AVHWFrameTransferDirection @dir, AVPixelFormat** @formats, int @flags) - { - return av_hwframe_transfer_get_formats_fptr(@hwframe_ctx, @dir, @formats, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_image_alloc_delegate(ref byte_ptrArray4 @pointers, ref int_array4 @linesizes, int @w, int @h, AVPixelFormat @pix_fmt, int @align); - private static av_image_alloc_delegate av_image_alloc_fptr = (ref byte_ptrArray4 @pointers, ref int_array4 @linesizes, int @w, int @h, AVPixelFormat @pix_fmt, int @align) => - { - av_image_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_image_alloc"); - if (av_image_alloc_fptr == null) - { - av_image_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_image_alloc")); - }; - } - return av_image_alloc_fptr(ref @pointers, ref @linesizes, @w, @h, @pix_fmt, @align); - }; - /// Allocate an image with size w and h and pixel format pix_fmt, and fill pointers and linesizes accordingly. The allocated image buffer has to be freed by using av_freep(&pointers[0]). - /// the value to use for buffer size alignment - /// the size in bytes required for the image buffer, a negative error code in case of failure - public static int av_image_alloc(ref byte_ptrArray4 @pointers, ref int_array4 @linesizes, int @w, int @h, AVPixelFormat @pix_fmt, int @align) - { - return av_image_alloc_fptr(ref @pointers, ref @linesizes, @w, @h, @pix_fmt, @align); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_image_check_sar_delegate(uint @w, uint @h, AVRational @sar); - private static av_image_check_sar_delegate av_image_check_sar_fptr = (uint @w, uint @h, AVRational @sar) => - { - av_image_check_sar_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_image_check_sar"); - if (av_image_check_sar_fptr == null) - { - av_image_check_sar_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_image_check_sar")); - }; - } - return av_image_check_sar_fptr(@w, @h, @sar); - }; - /// Check if the given sample aspect ratio of an image is valid. - /// width of the image - /// height of the image - /// sample aspect ratio of the image - /// 0 if valid, a negative AVERROR code otherwise - public static int av_image_check_sar(uint @w, uint @h, AVRational @sar) - { - return av_image_check_sar_fptr(@w, @h, @sar); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_image_check_size_delegate(uint @w, uint @h, int @log_offset, void* @log_ctx); - private static av_image_check_size_delegate av_image_check_size_fptr = (uint @w, uint @h, int @log_offset, void* @log_ctx) => - { - av_image_check_size_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_image_check_size"); - if (av_image_check_size_fptr == null) - { - av_image_check_size_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_image_check_size")); - }; - } - return av_image_check_size_fptr(@w, @h, @log_offset, @log_ctx); - }; - /// Check if the given dimension of an image is valid, meaning that all bytes of the image can be addressed with a signed int. - /// the width of the picture - /// the height of the picture - /// the offset to sum to the log level for logging with log_ctx - /// the parent logging context, it may be NULL - /// >= 0 if valid, a negative error code otherwise - public static int av_image_check_size(uint @w, uint @h, int @log_offset, void* @log_ctx) - { - return av_image_check_size_fptr(@w, @h, @log_offset, @log_ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_image_check_size2_delegate(uint @w, uint @h, long @max_pixels, AVPixelFormat @pix_fmt, int @log_offset, void* @log_ctx); - private static av_image_check_size2_delegate av_image_check_size2_fptr = (uint @w, uint @h, long @max_pixels, AVPixelFormat @pix_fmt, int @log_offset, void* @log_ctx) => - { - av_image_check_size2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_image_check_size2"); - if (av_image_check_size2_fptr == null) - { - av_image_check_size2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_image_check_size2")); - }; - } - return av_image_check_size2_fptr(@w, @h, @max_pixels, @pix_fmt, @log_offset, @log_ctx); - }; - /// Check if the given dimension of an image is valid, meaning that all bytes of a plane of an image with the specified pix_fmt can be addressed with a signed int. - /// the width of the picture - /// the height of the picture - /// the maximum number of pixels the user wants to accept - /// the pixel format, can be AV_PIX_FMT_NONE if unknown. - /// the offset to sum to the log level for logging with log_ctx - /// the parent logging context, it may be NULL - /// >= 0 if valid, a negative error code otherwise - public static int av_image_check_size2(uint @w, uint @h, long @max_pixels, AVPixelFormat @pix_fmt, int @log_offset, void* @log_ctx) - { - return av_image_check_size2_fptr(@w, @h, @max_pixels, @pix_fmt, @log_offset, @log_ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_image_copy_delegate(ref byte_ptrArray4 @dst_data, ref int_array4 @dst_linesizes, ref byte_ptrArray4 @src_data, int_array4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height); - private static av_image_copy_delegate av_image_copy_fptr = (ref byte_ptrArray4 @dst_data, ref int_array4 @dst_linesizes, ref byte_ptrArray4 @src_data, int_array4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height) => - { - av_image_copy_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_image_copy"); - if (av_image_copy_fptr == null) - { - av_image_copy_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_image_copy")); - }; - } - av_image_copy_fptr(ref @dst_data, ref @dst_linesizes, ref @src_data, @src_linesizes, @pix_fmt, @width, @height); - }; - /// Copy image in src_data to dst_data. - /// linesizes for the image in dst_data - /// linesizes for the image in src_data - public static void av_image_copy(ref byte_ptrArray4 @dst_data, ref int_array4 @dst_linesizes, ref byte_ptrArray4 @src_data, int_array4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height) - { - av_image_copy_fptr(ref @dst_data, ref @dst_linesizes, ref @src_data, @src_linesizes, @pix_fmt, @width, @height); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_image_copy_plane_delegate(byte* @dst, int @dst_linesize, byte* @src, int @src_linesize, int @bytewidth, int @height); - private static av_image_copy_plane_delegate av_image_copy_plane_fptr = (byte* @dst, int @dst_linesize, byte* @src, int @src_linesize, int @bytewidth, int @height) => - { - av_image_copy_plane_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_image_copy_plane"); - if (av_image_copy_plane_fptr == null) - { - av_image_copy_plane_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_image_copy_plane")); - }; - } - av_image_copy_plane_fptr(@dst, @dst_linesize, @src, @src_linesize, @bytewidth, @height); - }; - /// Copy image plane from src to dst. That is, copy "height" number of lines of "bytewidth" bytes each. The first byte of each successive line is separated by *_linesize bytes. - /// linesize for the image plane in dst - /// linesize for the image plane in src - public static void av_image_copy_plane(byte* @dst, int @dst_linesize, byte* @src, int @src_linesize, int @bytewidth, int @height) - { - av_image_copy_plane_fptr(@dst, @dst_linesize, @src, @src_linesize, @bytewidth, @height); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_image_copy_plane_uc_from_delegate(byte* @dst, long @dst_linesize, byte* @src, long @src_linesize, long @bytewidth, int @height); - private static av_image_copy_plane_uc_from_delegate av_image_copy_plane_uc_from_fptr = (byte* @dst, long @dst_linesize, byte* @src, long @src_linesize, long @bytewidth, int @height) => - { - av_image_copy_plane_uc_from_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_image_copy_plane_uc_from"); - if (av_image_copy_plane_uc_from_fptr == null) - { - av_image_copy_plane_uc_from_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_image_copy_plane_uc_from")); - }; - } - av_image_copy_plane_uc_from_fptr(@dst, @dst_linesize, @src, @src_linesize, @bytewidth, @height); - }; - /// Copy image data located in uncacheable (e.g. GPU mapped) memory. Where available, this function will use special functionality for reading from such memory, which may result in greatly improved performance compared to plain av_image_copy_plane(). - public static void av_image_copy_plane_uc_from(byte* @dst, long @dst_linesize, byte* @src, long @src_linesize, long @bytewidth, int @height) - { - av_image_copy_plane_uc_from_fptr(@dst, @dst_linesize, @src, @src_linesize, @bytewidth, @height); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_image_copy_to_buffer_delegate(byte* @dst, int @dst_size, byte_ptrArray4 @src_data, int_array4 @src_linesize, AVPixelFormat @pix_fmt, int @width, int @height, int @align); - private static av_image_copy_to_buffer_delegate av_image_copy_to_buffer_fptr = (byte* @dst, int @dst_size, byte_ptrArray4 @src_data, int_array4 @src_linesize, AVPixelFormat @pix_fmt, int @width, int @height, int @align) => - { - av_image_copy_to_buffer_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_image_copy_to_buffer"); - if (av_image_copy_to_buffer_fptr == null) - { - av_image_copy_to_buffer_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_image_copy_to_buffer")); - }; - } - return av_image_copy_to_buffer_fptr(@dst, @dst_size, @src_data, @src_linesize, @pix_fmt, @width, @height, @align); - }; - /// Copy image data from an image into a buffer. - /// a buffer into which picture data will be copied - /// the size in bytes of dst - /// pointers containing the source image data - /// linesizes for the image in src_data - /// the pixel format of the source image - /// the width of the source image in pixels - /// the height of the source image in pixels - /// the assumed linesize alignment for dst - /// the number of bytes written to dst, or a negative value (error code) on error - public static int av_image_copy_to_buffer(byte* @dst, int @dst_size, byte_ptrArray4 @src_data, int_array4 @src_linesize, AVPixelFormat @pix_fmt, int @width, int @height, int @align) - { - return av_image_copy_to_buffer_fptr(@dst, @dst_size, @src_data, @src_linesize, @pix_fmt, @width, @height, @align); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_image_copy_uc_from_delegate(ref byte_ptrArray4 @dst_data, long_array4 @dst_linesizes, ref byte_ptrArray4 @src_data, long_array4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height); - private static av_image_copy_uc_from_delegate av_image_copy_uc_from_fptr = (ref byte_ptrArray4 @dst_data, long_array4 @dst_linesizes, ref byte_ptrArray4 @src_data, long_array4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height) => - { - av_image_copy_uc_from_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_image_copy_uc_from"); - if (av_image_copy_uc_from_fptr == null) - { - av_image_copy_uc_from_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_image_copy_uc_from")); - }; - } - av_image_copy_uc_from_fptr(ref @dst_data, @dst_linesizes, ref @src_data, @src_linesizes, @pix_fmt, @width, @height); - }; - /// Copy image data located in uncacheable (e.g. GPU mapped) memory. Where available, this function will use special functionality for reading from such memory, which may result in greatly improved performance compared to plain av_image_copy(). - public static void av_image_copy_uc_from(ref byte_ptrArray4 @dst_data, long_array4 @dst_linesizes, ref byte_ptrArray4 @src_data, long_array4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height) - { - av_image_copy_uc_from_fptr(ref @dst_data, @dst_linesizes, ref @src_data, @src_linesizes, @pix_fmt, @width, @height); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_image_fill_arrays_delegate(ref byte_ptrArray4 @dst_data, ref int_array4 @dst_linesize, byte* @src, AVPixelFormat @pix_fmt, int @width, int @height, int @align); - private static av_image_fill_arrays_delegate av_image_fill_arrays_fptr = (ref byte_ptrArray4 @dst_data, ref int_array4 @dst_linesize, byte* @src, AVPixelFormat @pix_fmt, int @width, int @height, int @align) => - { - av_image_fill_arrays_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_image_fill_arrays"); - if (av_image_fill_arrays_fptr == null) - { - av_image_fill_arrays_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_image_fill_arrays")); - }; - } - return av_image_fill_arrays_fptr(ref @dst_data, ref @dst_linesize, @src, @pix_fmt, @width, @height, @align); - }; - /// Setup the data pointers and linesizes based on the specified image parameters and the provided array. - /// data pointers to be filled in - /// linesizes for the image in dst_data to be filled in - /// buffer which will contain or contains the actual image data, can be NULL - /// the pixel format of the image - /// the width of the image in pixels - /// the height of the image in pixels - /// the value used in src for linesize alignment - /// the size in bytes required for src, a negative error code in case of failure - public static int av_image_fill_arrays(ref byte_ptrArray4 @dst_data, ref int_array4 @dst_linesize, byte* @src, AVPixelFormat @pix_fmt, int @width, int @height, int @align) - { - return av_image_fill_arrays_fptr(ref @dst_data, ref @dst_linesize, @src, @pix_fmt, @width, @height, @align); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_image_fill_black_delegate(ref byte_ptrArray4 @dst_data, long_array4 @dst_linesize, AVPixelFormat @pix_fmt, AVColorRange @range, int @width, int @height); - private static av_image_fill_black_delegate av_image_fill_black_fptr = (ref byte_ptrArray4 @dst_data, long_array4 @dst_linesize, AVPixelFormat @pix_fmt, AVColorRange @range, int @width, int @height) => - { - av_image_fill_black_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_image_fill_black"); - if (av_image_fill_black_fptr == null) - { - av_image_fill_black_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_image_fill_black")); - }; - } - return av_image_fill_black_fptr(ref @dst_data, @dst_linesize, @pix_fmt, @range, @width, @height); - }; - /// Overwrite the image data with black. This is suitable for filling a sub-rectangle of an image, meaning the padding between the right most pixel and the left most pixel on the next line will not be overwritten. For some formats, the image size might be rounded up due to inherent alignment. - /// data pointers to destination image - /// linesizes for the destination image - /// the pixel format of the image - /// the color range of the image (important for colorspaces such as YUV) - /// the width of the image in pixels - /// the height of the image in pixels - /// 0 if the image data was cleared, a negative AVERROR code otherwise - public static int av_image_fill_black(ref byte_ptrArray4 @dst_data, long_array4 @dst_linesize, AVPixelFormat @pix_fmt, AVColorRange @range, int @width, int @height) - { - return av_image_fill_black_fptr(ref @dst_data, @dst_linesize, @pix_fmt, @range, @width, @height); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_image_fill_linesizes_delegate(ref int_array4 @linesizes, AVPixelFormat @pix_fmt, int @width); - private static av_image_fill_linesizes_delegate av_image_fill_linesizes_fptr = (ref int_array4 @linesizes, AVPixelFormat @pix_fmt, int @width) => - { - av_image_fill_linesizes_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_image_fill_linesizes"); - if (av_image_fill_linesizes_fptr == null) - { - av_image_fill_linesizes_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_image_fill_linesizes")); - }; - } - return av_image_fill_linesizes_fptr(ref @linesizes, @pix_fmt, @width); - }; - /// Fill plane linesizes for an image with pixel format pix_fmt and width width. - /// array to be filled with the linesize for each plane - /// >= 0 in case of success, a negative error code otherwise - public static int av_image_fill_linesizes(ref int_array4 @linesizes, AVPixelFormat @pix_fmt, int @width) - { - return av_image_fill_linesizes_fptr(ref @linesizes, @pix_fmt, @width); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_image_fill_max_pixsteps_delegate(int_array4 @max_pixsteps, ref int_array4 @max_pixstep_comps, AVPixFmtDescriptor* @pixdesc); - private static av_image_fill_max_pixsteps_delegate av_image_fill_max_pixsteps_fptr = (int_array4 @max_pixsteps, ref int_array4 @max_pixstep_comps, AVPixFmtDescriptor* @pixdesc) => - { - av_image_fill_max_pixsteps_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_image_fill_max_pixsteps"); - if (av_image_fill_max_pixsteps_fptr == null) - { - av_image_fill_max_pixsteps_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_image_fill_max_pixsteps")); - }; - } - av_image_fill_max_pixsteps_fptr(@max_pixsteps, ref @max_pixstep_comps, @pixdesc); - }; - /// Compute the max pixel step for each plane of an image with a format described by pixdesc. - /// an array which is filled with the max pixel step for each plane. Since a plane may contain different pixel components, the computed max_pixsteps[plane] is relative to the component in the plane with the max pixel step. - /// an array which is filled with the component for each plane which has the max pixel step. May be NULL. - public static void av_image_fill_max_pixsteps(int_array4 @max_pixsteps, ref int_array4 @max_pixstep_comps, AVPixFmtDescriptor* @pixdesc) - { - av_image_fill_max_pixsteps_fptr(@max_pixsteps, ref @max_pixstep_comps, @pixdesc); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_image_fill_plane_sizes_delegate(ulong_array4 @size, AVPixelFormat @pix_fmt, int @height, long_array4 @linesizes); - private static av_image_fill_plane_sizes_delegate av_image_fill_plane_sizes_fptr = (ulong_array4 @size, AVPixelFormat @pix_fmt, int @height, long_array4 @linesizes) => - { - av_image_fill_plane_sizes_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_image_fill_plane_sizes"); - if (av_image_fill_plane_sizes_fptr == null) - { - av_image_fill_plane_sizes_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_image_fill_plane_sizes")); - }; - } - return av_image_fill_plane_sizes_fptr(@size, @pix_fmt, @height, @linesizes); - }; - /// Fill plane sizes for an image with pixel format pix_fmt and height height. - /// the array to be filled with the size of each image plane - /// the array containing the linesize for each plane, should be filled by av_image_fill_linesizes() - /// >= 0 in case of success, a negative error code otherwise - public static int av_image_fill_plane_sizes(ulong_array4 @size, AVPixelFormat @pix_fmt, int @height, long_array4 @linesizes) - { - return av_image_fill_plane_sizes_fptr(@size, @pix_fmt, @height, @linesizes); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_image_fill_pointers_delegate(byte_ptrArray4 @data, AVPixelFormat @pix_fmt, int @height, byte* @ptr, int_array4 @linesizes); - private static av_image_fill_pointers_delegate av_image_fill_pointers_fptr = (byte_ptrArray4 @data, AVPixelFormat @pix_fmt, int @height, byte* @ptr, int_array4 @linesizes) => - { - av_image_fill_pointers_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_image_fill_pointers"); - if (av_image_fill_pointers_fptr == null) - { - av_image_fill_pointers_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_image_fill_pointers")); - }; - } - return av_image_fill_pointers_fptr(@data, @pix_fmt, @height, @ptr, @linesizes); - }; - /// Fill plane data pointers for an image with pixel format pix_fmt and height height. - /// pointers array to be filled with the pointer for each image plane - /// the pointer to a buffer which will contain the image - /// the array containing the linesize for each plane, should be filled by av_image_fill_linesizes() - /// the size in bytes required for the image buffer, a negative error code in case of failure - public static int av_image_fill_pointers(byte_ptrArray4 @data, AVPixelFormat @pix_fmt, int @height, byte* @ptr, int_array4 @linesizes) - { - return av_image_fill_pointers_fptr(@data, @pix_fmt, @height, @ptr, @linesizes); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_image_get_buffer_size_delegate(AVPixelFormat @pix_fmt, int @width, int @height, int @align); - private static av_image_get_buffer_size_delegate av_image_get_buffer_size_fptr = (AVPixelFormat @pix_fmt, int @width, int @height, int @align) => - { - av_image_get_buffer_size_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_image_get_buffer_size"); - if (av_image_get_buffer_size_fptr == null) - { - av_image_get_buffer_size_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_image_get_buffer_size")); - }; - } - return av_image_get_buffer_size_fptr(@pix_fmt, @width, @height, @align); - }; - /// Return the size in bytes of the amount of data required to store an image with the given parameters. - /// the pixel format of the image - /// the width of the image in pixels - /// the height of the image in pixels - /// the assumed linesize alignment - /// the buffer size in bytes, a negative error code in case of failure - public static int av_image_get_buffer_size(AVPixelFormat @pix_fmt, int @width, int @height, int @align) - { - return av_image_get_buffer_size_fptr(@pix_fmt, @width, @height, @align); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_image_get_linesize_delegate(AVPixelFormat @pix_fmt, int @width, int @plane); - private static av_image_get_linesize_delegate av_image_get_linesize_fptr = (AVPixelFormat @pix_fmt, int @width, int @plane) => - { - av_image_get_linesize_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_image_get_linesize"); - if (av_image_get_linesize_fptr == null) - { - av_image_get_linesize_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_image_get_linesize")); - }; - } - return av_image_get_linesize_fptr(@pix_fmt, @width, @plane); - }; - /// Compute the size of an image line with format pix_fmt and width width for the plane plane. - /// the computed size in bytes - public static int av_image_get_linesize(AVPixelFormat @pix_fmt, int @width, int @plane) - { - return av_image_get_linesize_fptr(@pix_fmt, @width, @plane); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint av_int_list_length_for_size_delegate(uint @elsize, void* @list, ulong @term); - private static av_int_list_length_for_size_delegate av_int_list_length_for_size_fptr = (uint @elsize, void* @list, ulong @term) => - { - av_int_list_length_for_size_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_int_list_length_for_size"); - if (av_int_list_length_for_size_fptr == null) - { - av_int_list_length_for_size_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_int_list_length_for_size")); - }; - } - return av_int_list_length_for_size_fptr(@elsize, @list, @term); - }; - /// Compute the length of an integer list. - /// size in bytes of each list element (only 1, 2, 4 or 8) - /// pointer to the list - /// list terminator (usually 0 or -1) - /// length of the list, in elements, not counting the terminator - public static uint av_int_list_length_for_size(uint @elsize, void* @list, ulong @term) - { - return av_int_list_length_for_size_fptr(@elsize, @list, @term); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_log_delegate(void* @avcl, int @level, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @fmt); - private static av_log_delegate av_log_fptr = (void* @avcl, int @level, string @fmt) => - { - av_log_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_log"); - if (av_log_fptr == null) - { - av_log_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_log")); - }; - } - av_log_fptr(@avcl, @level, @fmt); - }; - /// Send the specified message to the log if the level is less than or equal to the current av_log_level. By default, all logging messages are sent to stderr. This behavior can be altered by setting a different logging callback function. - /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct or NULL if general log. - /// The importance level of the message expressed using a "Logging Constant". - /// The format string (printf-compatible) that specifies how subsequent arguments are converted to output. - public static void av_log(void* @avcl, int @level, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @fmt) - { - av_log_fptr(@avcl, @level, @fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_log_default_callback_delegate(void* @avcl, int @level, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @fmt, byte* @vl); - private static av_log_default_callback_delegate av_log_default_callback_fptr = (void* @avcl, int @level, string @fmt, byte* @vl) => - { - av_log_default_callback_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_log_default_callback"); - if (av_log_default_callback_fptr == null) - { - av_log_default_callback_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_log_default_callback")); - }; - } - av_log_default_callback_fptr(@avcl, @level, @fmt, @vl); - }; - /// Default logging callback - /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct. - /// The importance level of the message expressed using a "Logging Constant". - /// The format string (printf-compatible) that specifies how subsequent arguments are converted to output. - /// The arguments referenced by the format string. - public static void av_log_default_callback(void* @avcl, int @level, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @fmt, byte* @vl) - { - av_log_default_callback_fptr(@avcl, @level, @fmt, @vl); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_log_format_line_delegate(void* @ptr, int @level, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix); - private static av_log_format_line_delegate av_log_format_line_fptr = (void* @ptr, int @level, string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix) => - { - av_log_format_line_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_log_format_line"); - if (av_log_format_line_fptr == null) - { - av_log_format_line_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_log_format_line")); - }; - } - av_log_format_line_fptr(@ptr, @level, @fmt, @vl, @line, @line_size, @print_prefix); - }; - /// Format a line of log the same way as the default callback. - /// buffer to receive the formatted line - /// size of the buffer - /// used to store whether the prefix must be printed; must point to a persistent integer initially set to 1 - public static void av_log_format_line(void* @ptr, int @level, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix) - { - av_log_format_line_fptr(@ptr, @level, @fmt, @vl, @line, @line_size, @print_prefix); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_log_format_line2_delegate(void* @ptr, int @level, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix); - private static av_log_format_line2_delegate av_log_format_line2_fptr = (void* @ptr, int @level, string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix) => - { - av_log_format_line2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_log_format_line2"); - if (av_log_format_line2_fptr == null) - { - av_log_format_line2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_log_format_line2")); - }; - } - return av_log_format_line2_fptr(@ptr, @level, @fmt, @vl, @line, @line_size, @print_prefix); - }; - /// Format a line of log the same way as the default callback. - /// buffer to receive the formatted line; may be NULL if line_size is 0 - /// size of the buffer; at most line_size-1 characters will be written to the buffer, plus one null terminator - /// used to store whether the prefix must be printed; must point to a persistent integer initially set to 1 - /// Returns a negative value if an error occurred, otherwise returns the number of characters that would have been written for a sufficiently large buffer, not including the terminating null character. If the return value is not less than line_size, it means that the log message was truncated to fit the buffer. - public static int av_log_format_line2(void* @ptr, int @level, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix) - { - return av_log_format_line2_fptr(@ptr, @level, @fmt, @vl, @line, @line_size, @print_prefix); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_log_get_flags_delegate(); - private static av_log_get_flags_delegate av_log_get_flags_fptr = () => - { - av_log_get_flags_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_log_get_flags"); - if (av_log_get_flags_fptr == null) - { - av_log_get_flags_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_log_get_flags")); - }; - } - return av_log_get_flags_fptr(); - }; - public static int av_log_get_flags() - { - return av_log_get_flags_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_log_get_level_delegate(); - private static av_log_get_level_delegate av_log_get_level_fptr = () => - { - av_log_get_level_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_log_get_level"); - if (av_log_get_level_fptr == null) - { - av_log_get_level_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_log_get_level")); - }; - } - return av_log_get_level_fptr(); - }; - /// Get the current log level - /// Current log level - public static int av_log_get_level() - { - return av_log_get_level_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_log_once_delegate(void* @avcl, int @initial_level, int @subsequent_level, int* @state, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @fmt); - private static av_log_once_delegate av_log_once_fptr = (void* @avcl, int @initial_level, int @subsequent_level, int* @state, string @fmt) => - { - av_log_once_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_log_once"); - if (av_log_once_fptr == null) - { - av_log_once_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_log_once")); - }; - } - av_log_once_fptr(@avcl, @initial_level, @subsequent_level, @state, @fmt); - }; - /// Send the specified message to the log once with the initial_level and then with the subsequent_level. By default, all logging messages are sent to stderr. This behavior can be altered by setting a different logging callback function. - /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct or NULL if general log. - /// importance level of the message expressed using a "Logging Constant" for the first occurance. - /// importance level of the message expressed using a "Logging Constant" after the first occurance. - /// a variable to keep trak of if a message has already been printed this must be initialized to 0 before the first use. The same state must not be accessed by 2 Threads simultaneously. - /// The format string (printf-compatible) that specifies how subsequent arguments are converted to output. - public static void av_log_once(void* @avcl, int @initial_level, int @subsequent_level, int* @state, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @fmt) - { - av_log_once_fptr(@avcl, @initial_level, @subsequent_level, @state, @fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_log_set_callback_delegate(av_log_set_callback_callback_func @callback); - private static av_log_set_callback_delegate av_log_set_callback_fptr = (av_log_set_callback_callback_func @callback) => - { - av_log_set_callback_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_log_set_callback"); - if (av_log_set_callback_fptr == null) - { - av_log_set_callback_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_log_set_callback")); - }; - } - av_log_set_callback_fptr(@callback); - }; - /// Set the logging callback - /// A logging function with a compatible signature. - public static void av_log_set_callback(av_log_set_callback_callback_func @callback) - { - av_log_set_callback_fptr(@callback); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_log_set_flags_delegate(int @arg); - private static av_log_set_flags_delegate av_log_set_flags_fptr = (int @arg) => - { - av_log_set_flags_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_log_set_flags"); - if (av_log_set_flags_fptr == null) - { - av_log_set_flags_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_log_set_flags")); - }; - } - av_log_set_flags_fptr(@arg); - }; - public static void av_log_set_flags(int @arg) - { - av_log_set_flags_fptr(@arg); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_log_set_level_delegate(int @level); - private static av_log_set_level_delegate av_log_set_level_fptr = (int @level) => - { - av_log_set_level_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_log_set_level"); - if (av_log_set_level_fptr == null) - { - av_log_set_level_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_log_set_level")); - }; - } - av_log_set_level_fptr(@level); - }; - /// Set the log level - /// Logging level - public static void av_log_set_level(int @level) - { - av_log_set_level_fptr(@level); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_log2_delegate(uint @v); - private static av_log2_delegate av_log2_fptr = (uint @v) => - { - av_log2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_log2"); - if (av_log2_fptr == null) - { - av_log2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_log2")); - }; - } - return av_log2_fptr(@v); - }; - public static int av_log2(uint @v) - { - return av_log2_fptr(@v); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_log2_16bit_delegate(uint @v); - private static av_log2_16bit_delegate av_log2_16bit_fptr = (uint @v) => - { - av_log2_16bit_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_log2_16bit"); - if (av_log2_16bit_fptr == null) - { - av_log2_16bit_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_log2_16bit")); - }; - } - return av_log2_16bit_fptr(@v); - }; - public static int av_log2_16bit(uint @v) - { - return av_log2_16bit_fptr(@v); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void* av_malloc_delegate(ulong @size); - private static av_malloc_delegate av_malloc_fptr = (ulong @size) => - { - av_malloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_malloc"); - if (av_malloc_fptr == null) - { - av_malloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_malloc")); - }; - } - return av_malloc_fptr(@size); - }; - /// Allocate a memory block with alignment suitable for all memory accesses (including vectors if available on the CPU). - /// Size in bytes for the memory block to be allocated - /// Pointer to the allocated block, or `NULL` if the block cannot be allocated - public static void* av_malloc(ulong @size) - { - return av_malloc_fptr(@size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void* av_malloc_array_delegate(ulong @nmemb, ulong @size); - private static av_malloc_array_delegate av_malloc_array_fptr = (ulong @nmemb, ulong @size) => - { - av_malloc_array_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_malloc_array"); - if (av_malloc_array_fptr == null) - { - av_malloc_array_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_malloc_array")); - }; - } - return av_malloc_array_fptr(@nmemb, @size); - }; - /// Allocate a memory block for an array with av_malloc(). - /// Number of element - /// Size of a single element - /// Pointer to the allocated block, or `NULL` if the block cannot be allocated - public static void* av_malloc_array(ulong @nmemb, ulong @size) - { - return av_malloc_array_fptr(@nmemb, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void* av_mallocz_delegate(ulong @size); - private static av_mallocz_delegate av_mallocz_fptr = (ulong @size) => - { - av_mallocz_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_mallocz"); - if (av_mallocz_fptr == null) - { - av_mallocz_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_mallocz")); - }; - } - return av_mallocz_fptr(@size); - }; - /// Allocate a memory block with alignment suitable for all memory accesses (including vectors if available on the CPU) and zero all the bytes of the block. - /// Size in bytes for the memory block to be allocated - /// Pointer to the allocated block, or `NULL` if it cannot be allocated - public static void* av_mallocz(ulong @size) - { - return av_mallocz_fptr(@size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void* av_mallocz_array_delegate(ulong @nmemb, ulong @size); - private static av_mallocz_array_delegate av_mallocz_array_fptr = (ulong @nmemb, ulong @size) => - { - av_mallocz_array_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_mallocz_array"); - if (av_mallocz_array_fptr == null) - { - av_mallocz_array_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_mallocz_array")); - }; - } - return av_mallocz_array_fptr(@nmemb, @size); - }; - [Obsolete("use av_calloc()")] - public static void* av_mallocz_array(ulong @nmemb, ulong @size) - { - return av_mallocz_array_fptr(@nmemb, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVMasteringDisplayMetadata* av_mastering_display_metadata_alloc_delegate(); - private static av_mastering_display_metadata_alloc_delegate av_mastering_display_metadata_alloc_fptr = () => - { - av_mastering_display_metadata_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_mastering_display_metadata_alloc"); - if (av_mastering_display_metadata_alloc_fptr == null) - { - av_mastering_display_metadata_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_mastering_display_metadata_alloc")); - }; - } - return av_mastering_display_metadata_alloc_fptr(); - }; - /// Allocate an AVMasteringDisplayMetadata structure and set its fields to default values. The resulting struct can be freed using av_freep(). - /// An AVMasteringDisplayMetadata filled with default values or NULL on failure. - public static AVMasteringDisplayMetadata* av_mastering_display_metadata_alloc() - { - return av_mastering_display_metadata_alloc_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVMasteringDisplayMetadata* av_mastering_display_metadata_create_side_data_delegate(AVFrame* @frame); - private static av_mastering_display_metadata_create_side_data_delegate av_mastering_display_metadata_create_side_data_fptr = (AVFrame* @frame) => - { - av_mastering_display_metadata_create_side_data_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_mastering_display_metadata_create_side_data"); - if (av_mastering_display_metadata_create_side_data_fptr == null) - { - av_mastering_display_metadata_create_side_data_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_mastering_display_metadata_create_side_data")); - }; - } - return av_mastering_display_metadata_create_side_data_fptr(@frame); - }; - /// Allocate a complete AVMasteringDisplayMetadata and add it to the frame. - /// The frame which side data is added to. - /// The AVMasteringDisplayMetadata structure to be filled by caller. - public static AVMasteringDisplayMetadata* av_mastering_display_metadata_create_side_data(AVFrame* @frame) - { - return av_mastering_display_metadata_create_side_data_fptr(@frame); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_max_alloc_delegate(ulong @max); - private static av_max_alloc_delegate av_max_alloc_fptr = (ulong @max) => - { - av_max_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_max_alloc"); - if (av_max_alloc_fptr == null) - { - av_max_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_max_alloc")); - }; - } - av_max_alloc_fptr(@max); - }; - /// Set the maximum size that may be allocated in one block. - /// Value to be set as the new maximum size - public static void av_max_alloc(ulong @max) - { - av_max_alloc_fptr(@max); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_memcpy_backptr_delegate(byte* @dst, int @back, int @cnt); - private static av_memcpy_backptr_delegate av_memcpy_backptr_fptr = (byte* @dst, int @back, int @cnt) => - { - av_memcpy_backptr_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_memcpy_backptr"); - if (av_memcpy_backptr_fptr == null) - { - av_memcpy_backptr_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_memcpy_backptr")); - }; - } - av_memcpy_backptr_fptr(@dst, @back, @cnt); - }; - /// Overlapping memcpy() implementation. - /// Destination buffer - /// Number of bytes back to start copying (i.e. the initial size of the overlapping window); must be > 0 - /// Number of bytes to copy; must be >= 0 - public static void av_memcpy_backptr(byte* @dst, int @back, int @cnt) - { - av_memcpy_backptr_fptr(@dst, @back, @cnt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void* av_memdup_delegate(void* @p, ulong @size); - private static av_memdup_delegate av_memdup_fptr = (void* @p, ulong @size) => - { - av_memdup_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_memdup"); - if (av_memdup_fptr == null) - { - av_memdup_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_memdup")); - }; - } - return av_memdup_fptr(@p, @size); - }; - /// Duplicate a buffer with av_malloc(). - /// Buffer to be duplicated - /// Size in bytes of the buffer copied - /// Pointer to a newly allocated buffer containing a copy of `p` or `NULL` if the buffer cannot be allocated - public static void* av_memdup(void* @p, ulong @size) - { - return av_memdup_fptr(@p, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVRational av_mul_q_delegate(AVRational @b, AVRational @c); - private static av_mul_q_delegate av_mul_q_fptr = (AVRational @b, AVRational @c) => - { - av_mul_q_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_mul_q"); - if (av_mul_q_fptr == null) - { - av_mul_q_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_mul_q")); - }; - } - return av_mul_q_fptr(@b, @c); - }; - /// Multiply two rationals. - /// First rational - /// Second rational - /// b*c - public static AVRational av_mul_q(AVRational @b, AVRational @c) - { - return av_mul_q_fptr(@b, @c); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_nearer_q_delegate(AVRational @q, AVRational @q1, AVRational @q2); - private static av_nearer_q_delegate av_nearer_q_fptr = (AVRational @q, AVRational @q1, AVRational @q2) => - { - av_nearer_q_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_nearer_q"); - if (av_nearer_q_fptr == null) - { - av_nearer_q_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_nearer_q")); - }; - } - return av_nearer_q_fptr(@q, @q1, @q2); - }; - /// Find which of the two rationals is closer to another rational. - /// Rational to be compared against - /// One of the following values: - 1 if `q1` is nearer to `q` than `q2` - -1 if `q2` is nearer to `q` than `q1` - 0 if they have the same distance - public static int av_nearer_q(AVRational @q, AVRational @q1, AVRational @q2) - { - return av_nearer_q_fptr(@q, @q1, @q2); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVClass* av_opt_child_class_iterate_delegate(AVClass* @parent, void** @iter); - private static av_opt_child_class_iterate_delegate av_opt_child_class_iterate_fptr = (AVClass* @parent, void** @iter) => - { - av_opt_child_class_iterate_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_child_class_iterate"); - if (av_opt_child_class_iterate_fptr == null) - { - av_opt_child_class_iterate_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_child_class_iterate")); - }; - } - return av_opt_child_class_iterate_fptr(@parent, @iter); - }; - /// Iterate over potential AVOptions-enabled children of parent. - /// a pointer where iteration state is stored. - /// AVClass corresponding to next potential child or NULL - public static AVClass* av_opt_child_class_iterate(AVClass* @parent, void** @iter) - { - return av_opt_child_class_iterate_fptr(@parent, @iter); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void* av_opt_child_next_delegate(void* @obj, void* @prev); - private static av_opt_child_next_delegate av_opt_child_next_fptr = (void* @obj, void* @prev) => - { - av_opt_child_next_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_child_next"); - if (av_opt_child_next_fptr == null) - { - av_opt_child_next_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_child_next")); - }; - } - return av_opt_child_next_fptr(@obj, @prev); - }; - /// Iterate over AVOptions-enabled children of obj. - /// result of a previous call to this function or NULL - /// next AVOptions-enabled child or NULL - public static void* av_opt_child_next(void* @obj, void* @prev) - { - return av_opt_child_next_fptr(@obj, @prev); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_copy_delegate(void* @dest, void* @src); - private static av_opt_copy_delegate av_opt_copy_fptr = (void* @dest, void* @src) => - { - av_opt_copy_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_copy"); - if (av_opt_copy_fptr == null) - { - av_opt_copy_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_copy")); - }; - } - return av_opt_copy_fptr(@dest, @src); - }; - /// Copy options from src object into dest object. - /// Object to copy from - /// Object to copy into - /// 0 on success, negative on error - public static int av_opt_copy(void* @dest, void* @src) - { - return av_opt_copy_fptr(@dest, @src); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_eval_double_delegate(void* @obj, AVOption* @o, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @val, double* @double_out); - private static av_opt_eval_double_delegate av_opt_eval_double_fptr = (void* @obj, AVOption* @o, string @val, double* @double_out) => - { - av_opt_eval_double_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_eval_double"); - if (av_opt_eval_double_fptr == null) - { - av_opt_eval_double_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_eval_double")); - }; - } - return av_opt_eval_double_fptr(@obj, @o, @val, @double_out); - }; - public static int av_opt_eval_double(void* @obj, AVOption* @o, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @val, double* @double_out) - { - return av_opt_eval_double_fptr(@obj, @o, @val, @double_out); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_eval_flags_delegate(void* @obj, AVOption* @o, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @val, int* @flags_out); - private static av_opt_eval_flags_delegate av_opt_eval_flags_fptr = (void* @obj, AVOption* @o, string @val, int* @flags_out) => - { - av_opt_eval_flags_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_eval_flags"); - if (av_opt_eval_flags_fptr == null) - { - av_opt_eval_flags_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_eval_flags")); - }; - } - return av_opt_eval_flags_fptr(@obj, @o, @val, @flags_out); - }; - /// @{ This group of functions can be used to evaluate option strings and get numbers out of them. They do the same thing as av_opt_set(), except the result is written into the caller-supplied pointer. - /// a struct whose first element is a pointer to AVClass. - /// an option for which the string is to be evaluated. - /// string to be evaluated. - /// 0 on success, a negative number on failure. - public static int av_opt_eval_flags(void* @obj, AVOption* @o, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @val, int* @flags_out) - { - return av_opt_eval_flags_fptr(@obj, @o, @val, @flags_out); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_eval_float_delegate(void* @obj, AVOption* @o, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @val, float* @float_out); - private static av_opt_eval_float_delegate av_opt_eval_float_fptr = (void* @obj, AVOption* @o, string @val, float* @float_out) => - { - av_opt_eval_float_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_eval_float"); - if (av_opt_eval_float_fptr == null) - { - av_opt_eval_float_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_eval_float")); - }; - } - return av_opt_eval_float_fptr(@obj, @o, @val, @float_out); - }; - public static int av_opt_eval_float(void* @obj, AVOption* @o, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @val, float* @float_out) - { - return av_opt_eval_float_fptr(@obj, @o, @val, @float_out); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_eval_int_delegate(void* @obj, AVOption* @o, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @val, int* @int_out); - private static av_opt_eval_int_delegate av_opt_eval_int_fptr = (void* @obj, AVOption* @o, string @val, int* @int_out) => - { - av_opt_eval_int_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_eval_int"); - if (av_opt_eval_int_fptr == null) - { - av_opt_eval_int_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_eval_int")); - }; - } - return av_opt_eval_int_fptr(@obj, @o, @val, @int_out); - }; - public static int av_opt_eval_int(void* @obj, AVOption* @o, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @val, int* @int_out) - { - return av_opt_eval_int_fptr(@obj, @o, @val, @int_out); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_eval_int64_delegate(void* @obj, AVOption* @o, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @val, long* @int64_out); - private static av_opt_eval_int64_delegate av_opt_eval_int64_fptr = (void* @obj, AVOption* @o, string @val, long* @int64_out) => - { - av_opt_eval_int64_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_eval_int64"); - if (av_opt_eval_int64_fptr == null) - { - av_opt_eval_int64_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_eval_int64")); - }; - } - return av_opt_eval_int64_fptr(@obj, @o, @val, @int64_out); - }; - public static int av_opt_eval_int64(void* @obj, AVOption* @o, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @val, long* @int64_out) - { - return av_opt_eval_int64_fptr(@obj, @o, @val, @int64_out); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_eval_q_delegate(void* @obj, AVOption* @o, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @val, AVRational* @q_out); - private static av_opt_eval_q_delegate av_opt_eval_q_fptr = (void* @obj, AVOption* @o, string @val, AVRational* @q_out) => - { - av_opt_eval_q_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_eval_q"); - if (av_opt_eval_q_fptr == null) - { - av_opt_eval_q_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_eval_q")); - }; - } - return av_opt_eval_q_fptr(@obj, @o, @val, @q_out); - }; - public static int av_opt_eval_q(void* @obj, AVOption* @o, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @val, AVRational* @q_out) - { - return av_opt_eval_q_fptr(@obj, @o, @val, @q_out); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVOption* av_opt_find_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @unit, int @opt_flags, int @search_flags); - private static av_opt_find_delegate av_opt_find_fptr = (void* @obj, string @name, string @unit, int @opt_flags, int @search_flags) => - { - av_opt_find_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_find"); - if (av_opt_find_fptr == null) - { - av_opt_find_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_find")); - }; - } - return av_opt_find_fptr(@obj, @name, @unit, @opt_flags, @search_flags); - }; - /// Look for an option in an object. Consider only options which have all the specified flags set. - /// A pointer to a struct whose first element is a pointer to an AVClass. Alternatively a double pointer to an AVClass, if AV_OPT_SEARCH_FAKE_OBJ search flag is set. - /// The name of the option to look for. - /// When searching for named constants, name of the unit it belongs to. - /// Find only options with all the specified flags set (AV_OPT_FLAG). - /// A combination of AV_OPT_SEARCH_*. - /// A pointer to the option found, or NULL if no option was found. - public static AVOption* av_opt_find(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @unit, int @opt_flags, int @search_flags) - { - return av_opt_find_fptr(@obj, @name, @unit, @opt_flags, @search_flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVOption* av_opt_find2_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @unit, int @opt_flags, int @search_flags, void** @target_obj); - private static av_opt_find2_delegate av_opt_find2_fptr = (void* @obj, string @name, string @unit, int @opt_flags, int @search_flags, void** @target_obj) => - { - av_opt_find2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_find2"); - if (av_opt_find2_fptr == null) - { - av_opt_find2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_find2")); - }; - } - return av_opt_find2_fptr(@obj, @name, @unit, @opt_flags, @search_flags, @target_obj); - }; - /// Look for an option in an object. Consider only options which have all the specified flags set. - /// A pointer to a struct whose first element is a pointer to an AVClass. Alternatively a double pointer to an AVClass, if AV_OPT_SEARCH_FAKE_OBJ search flag is set. - /// The name of the option to look for. - /// When searching for named constants, name of the unit it belongs to. - /// Find only options with all the specified flags set (AV_OPT_FLAG). - /// A combination of AV_OPT_SEARCH_*. - /// if non-NULL, an object to which the option belongs will be written here. It may be different from obj if AV_OPT_SEARCH_CHILDREN is present in search_flags. This parameter is ignored if search_flags contain AV_OPT_SEARCH_FAKE_OBJ. - /// A pointer to the option found, or NULL if no option was found. - public static AVOption* av_opt_find2(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @unit, int @opt_flags, int @search_flags, void** @target_obj) - { - return av_opt_find2_fptr(@obj, @name, @unit, @opt_flags, @search_flags, @target_obj); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_flag_is_set_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @field_name, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @flag_name); - private static av_opt_flag_is_set_delegate av_opt_flag_is_set_fptr = (void* @obj, string @field_name, string @flag_name) => - { - av_opt_flag_is_set_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_flag_is_set"); - if (av_opt_flag_is_set_fptr == null) - { - av_opt_flag_is_set_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_flag_is_set")); - }; - } - return av_opt_flag_is_set_fptr(@obj, @field_name, @flag_name); - }; - /// Check whether a particular flag is set in a flags field. - /// the name of the flag field option - /// the name of the flag to check - /// non-zero if the flag is set, zero if the flag isn't set, isn't of the right type, or the flags field doesn't exist. - public static int av_opt_flag_is_set(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @field_name, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @flag_name) - { - return av_opt_flag_is_set_fptr(@obj, @field_name, @flag_name); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_opt_free_delegate(void* @obj); - private static av_opt_free_delegate av_opt_free_fptr = (void* @obj) => - { - av_opt_free_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_free"); - if (av_opt_free_fptr == null) - { - av_opt_free_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_free")); - }; - } - av_opt_free_fptr(@obj); - }; - /// Free all allocated objects in obj. - public static void av_opt_free(void* @obj) - { - av_opt_free_fptr(@obj); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_opt_freep_ranges_delegate(AVOptionRanges** @ranges); - private static av_opt_freep_ranges_delegate av_opt_freep_ranges_fptr = (AVOptionRanges** @ranges) => - { - av_opt_freep_ranges_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_freep_ranges"); - if (av_opt_freep_ranges_fptr == null) - { - av_opt_freep_ranges_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_freep_ranges")); - }; - } - av_opt_freep_ranges_fptr(@ranges); - }; - /// Free an AVOptionRanges struct and set it to NULL. - public static void av_opt_freep_ranges(AVOptionRanges** @ranges) - { - av_opt_freep_ranges_fptr(@ranges); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_get_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, byte** @out_val); - private static av_opt_get_delegate av_opt_get_fptr = (void* @obj, string @name, int @search_flags, byte** @out_val) => - { - av_opt_get_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_get"); - if (av_opt_get_fptr == null) - { - av_opt_get_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_get")); - }; - } - return av_opt_get_fptr(@obj, @name, @search_flags, @out_val); - }; - /// @{ Those functions get a value of the option with the given name from an object. - /// a struct whose first element is a pointer to an AVClass. - /// name of the option to get. - /// flags passed to av_opt_find2. I.e. if AV_OPT_SEARCH_CHILDREN is passed here, then the option may be found in a child of obj. - /// value of the option will be written here - /// >=0 on success, a negative error code otherwise - public static int av_opt_get(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, byte** @out_val) - { - return av_opt_get_fptr(@obj, @name, @search_flags, @out_val); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_get_channel_layout_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, long* @ch_layout); - private static av_opt_get_channel_layout_delegate av_opt_get_channel_layout_fptr = (void* @obj, string @name, int @search_flags, long* @ch_layout) => - { - av_opt_get_channel_layout_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_get_channel_layout"); - if (av_opt_get_channel_layout_fptr == null) - { - av_opt_get_channel_layout_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_get_channel_layout")); - }; - } - return av_opt_get_channel_layout_fptr(@obj, @name, @search_flags, @ch_layout); - }; - [Obsolete("")] - public static int av_opt_get_channel_layout(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, long* @ch_layout) - { - return av_opt_get_channel_layout_fptr(@obj, @name, @search_flags, @ch_layout); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_get_chlayout_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, AVChannelLayout* @layout); - private static av_opt_get_chlayout_delegate av_opt_get_chlayout_fptr = (void* @obj, string @name, int @search_flags, AVChannelLayout* @layout) => - { - av_opt_get_chlayout_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_get_chlayout"); - if (av_opt_get_chlayout_fptr == null) - { - av_opt_get_chlayout_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_get_chlayout")); - }; - } - return av_opt_get_chlayout_fptr(@obj, @name, @search_flags, @layout); - }; - public static int av_opt_get_chlayout(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, AVChannelLayout* @layout) - { - return av_opt_get_chlayout_fptr(@obj, @name, @search_flags, @layout); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_get_dict_val_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, AVDictionary** @out_val); - private static av_opt_get_dict_val_delegate av_opt_get_dict_val_fptr = (void* @obj, string @name, int @search_flags, AVDictionary** @out_val) => - { - av_opt_get_dict_val_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_get_dict_val"); - if (av_opt_get_dict_val_fptr == null) - { - av_opt_get_dict_val_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_get_dict_val")); - }; - } - return av_opt_get_dict_val_fptr(@obj, @name, @search_flags, @out_val); - }; - /// The returned dictionary is a copy of the actual value and must be freed with av_dict_free() by the caller - public static int av_opt_get_dict_val(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, AVDictionary** @out_val) - { - return av_opt_get_dict_val_fptr(@obj, @name, @search_flags, @out_val); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_get_double_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, double* @out_val); - private static av_opt_get_double_delegate av_opt_get_double_fptr = (void* @obj, string @name, int @search_flags, double* @out_val) => - { - av_opt_get_double_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_get_double"); - if (av_opt_get_double_fptr == null) - { - av_opt_get_double_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_get_double")); - }; - } - return av_opt_get_double_fptr(@obj, @name, @search_flags, @out_val); - }; - public static int av_opt_get_double(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, double* @out_val) - { - return av_opt_get_double_fptr(@obj, @name, @search_flags, @out_val); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_get_image_size_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, int* @w_out, int* @h_out); - private static av_opt_get_image_size_delegate av_opt_get_image_size_fptr = (void* @obj, string @name, int @search_flags, int* @w_out, int* @h_out) => - { - av_opt_get_image_size_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_get_image_size"); - if (av_opt_get_image_size_fptr == null) - { - av_opt_get_image_size_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_get_image_size")); - }; - } - return av_opt_get_image_size_fptr(@obj, @name, @search_flags, @w_out, @h_out); - }; - public static int av_opt_get_image_size(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, int* @w_out, int* @h_out) - { - return av_opt_get_image_size_fptr(@obj, @name, @search_flags, @w_out, @h_out); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_get_int_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, long* @out_val); - private static av_opt_get_int_delegate av_opt_get_int_fptr = (void* @obj, string @name, int @search_flags, long* @out_val) => - { - av_opt_get_int_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_get_int"); - if (av_opt_get_int_fptr == null) - { - av_opt_get_int_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_get_int")); - }; - } - return av_opt_get_int_fptr(@obj, @name, @search_flags, @out_val); - }; - public static int av_opt_get_int(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, long* @out_val) - { - return av_opt_get_int_fptr(@obj, @name, @search_flags, @out_val); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_get_key_value_delegate(byte** @ropts, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @key_val_sep, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @pairs_sep, uint @flags, byte** @rkey, byte** @rval); - private static av_opt_get_key_value_delegate av_opt_get_key_value_fptr = (byte** @ropts, string @key_val_sep, string @pairs_sep, uint @flags, byte** @rkey, byte** @rval) => - { - av_opt_get_key_value_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_get_key_value"); - if (av_opt_get_key_value_fptr == null) - { - av_opt_get_key_value_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_get_key_value")); - }; - } - return av_opt_get_key_value_fptr(@ropts, @key_val_sep, @pairs_sep, @flags, @rkey, @rval); - }; - /// Extract a key-value pair from the beginning of a string. - /// pointer to the options string, will be updated to point to the rest of the string (one of the pairs_sep or the final NUL) - /// a 0-terminated list of characters used to separate key from value, for example '=' - /// a 0-terminated list of characters used to separate two pairs from each other, for example ':' or ',' - /// flags; see the AV_OPT_FLAG_* values below - /// parsed key; must be freed using av_free() - /// parsed value; must be freed using av_free() - /// >=0 for success, or a negative value corresponding to an AVERROR code in case of error; in particular: AVERROR(EINVAL) if no key is present - public static int av_opt_get_key_value(byte** @ropts, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @key_val_sep, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @pairs_sep, uint @flags, byte** @rkey, byte** @rval) - { - return av_opt_get_key_value_fptr(@ropts, @key_val_sep, @pairs_sep, @flags, @rkey, @rval); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_get_pixel_fmt_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, AVPixelFormat* @out_fmt); - private static av_opt_get_pixel_fmt_delegate av_opt_get_pixel_fmt_fptr = (void* @obj, string @name, int @search_flags, AVPixelFormat* @out_fmt) => - { - av_opt_get_pixel_fmt_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_get_pixel_fmt"); - if (av_opt_get_pixel_fmt_fptr == null) - { - av_opt_get_pixel_fmt_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_get_pixel_fmt")); - }; - } - return av_opt_get_pixel_fmt_fptr(@obj, @name, @search_flags, @out_fmt); - }; - public static int av_opt_get_pixel_fmt(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, AVPixelFormat* @out_fmt) - { - return av_opt_get_pixel_fmt_fptr(@obj, @name, @search_flags, @out_fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_get_q_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, AVRational* @out_val); - private static av_opt_get_q_delegate av_opt_get_q_fptr = (void* @obj, string @name, int @search_flags, AVRational* @out_val) => - { - av_opt_get_q_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_get_q"); - if (av_opt_get_q_fptr == null) - { - av_opt_get_q_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_get_q")); - }; - } - return av_opt_get_q_fptr(@obj, @name, @search_flags, @out_val); - }; - public static int av_opt_get_q(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, AVRational* @out_val) - { - return av_opt_get_q_fptr(@obj, @name, @search_flags, @out_val); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_get_sample_fmt_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, AVSampleFormat* @out_fmt); - private static av_opt_get_sample_fmt_delegate av_opt_get_sample_fmt_fptr = (void* @obj, string @name, int @search_flags, AVSampleFormat* @out_fmt) => - { - av_opt_get_sample_fmt_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_get_sample_fmt"); - if (av_opt_get_sample_fmt_fptr == null) - { - av_opt_get_sample_fmt_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_get_sample_fmt")); - }; - } - return av_opt_get_sample_fmt_fptr(@obj, @name, @search_flags, @out_fmt); - }; - public static int av_opt_get_sample_fmt(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, AVSampleFormat* @out_fmt) - { - return av_opt_get_sample_fmt_fptr(@obj, @name, @search_flags, @out_fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_get_video_rate_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, AVRational* @out_val); - private static av_opt_get_video_rate_delegate av_opt_get_video_rate_fptr = (void* @obj, string @name, int @search_flags, AVRational* @out_val) => - { - av_opt_get_video_rate_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_get_video_rate"); - if (av_opt_get_video_rate_fptr == null) - { - av_opt_get_video_rate_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_get_video_rate")); - }; - } - return av_opt_get_video_rate_fptr(@obj, @name, @search_flags, @out_val); - }; - public static int av_opt_get_video_rate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags, AVRational* @out_val) - { - return av_opt_get_video_rate_fptr(@obj, @name, @search_flags, @out_val); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_is_set_to_default_delegate(void* @obj, AVOption* @o); - private static av_opt_is_set_to_default_delegate av_opt_is_set_to_default_fptr = (void* @obj, AVOption* @o) => - { - av_opt_is_set_to_default_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_is_set_to_default"); - if (av_opt_is_set_to_default_fptr == null) - { - av_opt_is_set_to_default_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_is_set_to_default")); - }; - } - return av_opt_is_set_to_default_fptr(@obj, @o); - }; - /// Check if given option is set to its default value. - /// AVClass object to check option on - /// option to be checked - /// >0 when option is set to its default, 0 when option is not set its default, < 0 on error - public static int av_opt_is_set_to_default(void* @obj, AVOption* @o) - { - return av_opt_is_set_to_default_fptr(@obj, @o); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_is_set_to_default_by_name_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags); - private static av_opt_is_set_to_default_by_name_delegate av_opt_is_set_to_default_by_name_fptr = (void* @obj, string @name, int @search_flags) => - { - av_opt_is_set_to_default_by_name_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_is_set_to_default_by_name"); - if (av_opt_is_set_to_default_by_name_fptr == null) - { - av_opt_is_set_to_default_by_name_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_is_set_to_default_by_name")); - }; - } - return av_opt_is_set_to_default_by_name_fptr(@obj, @name, @search_flags); - }; - /// Check if given option is set to its default value. - /// AVClass object to check option on - /// option name - /// combination of AV_OPT_SEARCH_* - /// >0 when option is set to its default, 0 when option is not set its default, < 0 on error - public static int av_opt_is_set_to_default_by_name(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @search_flags) - { - return av_opt_is_set_to_default_by_name_fptr(@obj, @name, @search_flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVOption* av_opt_next_delegate(void* @obj, AVOption* @prev); - private static av_opt_next_delegate av_opt_next_fptr = (void* @obj, AVOption* @prev) => - { - av_opt_next_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_next"); - if (av_opt_next_fptr == null) - { - av_opt_next_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_next")); - }; - } - return av_opt_next_fptr(@obj, @prev); - }; - /// Iterate over all AVOptions belonging to obj. - /// an AVOptions-enabled struct or a double pointer to an AVClass describing it. - /// result of the previous call to av_opt_next() on this object or NULL - /// next AVOption or NULL - public static AVOption* av_opt_next(void* @obj, AVOption* @prev) - { - return av_opt_next_fptr(@obj, @prev); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void* av_opt_ptr_delegate(AVClass* @avclass, void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name); - private static av_opt_ptr_delegate av_opt_ptr_fptr = (AVClass* @avclass, void* @obj, string @name) => - { - av_opt_ptr_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_ptr"); - if (av_opt_ptr_fptr == null) - { - av_opt_ptr_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_ptr")); - }; - } - return av_opt_ptr_fptr(@avclass, @obj, @name); - }; - /// @} - public static void* av_opt_ptr(AVClass* @avclass, void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name) - { - return av_opt_ptr_fptr(@avclass, @obj, @name); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_query_ranges_delegate(AVOptionRanges** @p0, void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @key, int @flags); - private static av_opt_query_ranges_delegate av_opt_query_ranges_fptr = (AVOptionRanges** @p0, void* @obj, string @key, int @flags) => - { - av_opt_query_ranges_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_query_ranges"); - if (av_opt_query_ranges_fptr == null) - { - av_opt_query_ranges_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_query_ranges")); - }; - } - return av_opt_query_ranges_fptr(@p0, @obj, @key, @flags); - }; - /// Get a list of allowed ranges for the given option. - /// is a bitmask of flags, undefined flags should not be set and should be ignored AV_OPT_SEARCH_FAKE_OBJ indicates that the obj is a double pointer to a AVClass instead of a full instance AV_OPT_MULTI_COMPONENT_RANGE indicates that function may return more than one component, - /// number of compontents returned on success, a negative errro code otherwise - public static int av_opt_query_ranges(AVOptionRanges** @p0, void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @key, int @flags) - { - return av_opt_query_ranges_fptr(@p0, @obj, @key, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_query_ranges_default_delegate(AVOptionRanges** @p0, void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @key, int @flags); - private static av_opt_query_ranges_default_delegate av_opt_query_ranges_default_fptr = (AVOptionRanges** @p0, void* @obj, string @key, int @flags) => - { - av_opt_query_ranges_default_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_query_ranges_default"); - if (av_opt_query_ranges_default_fptr == null) - { - av_opt_query_ranges_default_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_query_ranges_default")); - }; - } - return av_opt_query_ranges_default_fptr(@p0, @obj, @key, @flags); - }; - /// Get a default list of allowed ranges for the given option. - /// is a bitmask of flags, undefined flags should not be set and should be ignored AV_OPT_SEARCH_FAKE_OBJ indicates that the obj is a double pointer to a AVClass instead of a full instance AV_OPT_MULTI_COMPONENT_RANGE indicates that function may return more than one component, - /// number of compontents returned on success, a negative errro code otherwise - public static int av_opt_query_ranges_default(AVOptionRanges** @p0, void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @key, int @flags) - { - return av_opt_query_ranges_default_fptr(@p0, @obj, @key, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_serialize_delegate(void* @obj, int @opt_flags, int @flags, byte** @buffer, byte @key_val_sep, byte @pairs_sep); - private static av_opt_serialize_delegate av_opt_serialize_fptr = (void* @obj, int @opt_flags, int @flags, byte** @buffer, byte @key_val_sep, byte @pairs_sep) => - { - av_opt_serialize_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_serialize"); - if (av_opt_serialize_fptr == null) - { - av_opt_serialize_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_serialize")); - }; - } - return av_opt_serialize_fptr(@obj, @opt_flags, @flags, @buffer, @key_val_sep, @pairs_sep); - }; - /// Serialize object's options. - /// AVClass object to serialize - /// serialize options with all the specified flags set (AV_OPT_FLAG) - /// combination of AV_OPT_SERIALIZE_* flags - /// Pointer to buffer that will be allocated with string containg serialized options. Buffer must be freed by the caller when is no longer needed. - /// character used to separate key from value - /// character used to separate two pairs from each other - /// >= 0 on success, negative on error - public static int av_opt_serialize(void* @obj, int @opt_flags, int @flags, byte** @buffer, byte @key_val_sep, byte @pairs_sep) - { - return av_opt_serialize_fptr(@obj, @opt_flags, @flags, @buffer, @key_val_sep, @pairs_sep); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_set_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @val, int @search_flags); - private static av_opt_set_delegate av_opt_set_fptr = (void* @obj, string @name, string @val, int @search_flags) => - { - av_opt_set_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_set"); - if (av_opt_set_fptr == null) - { - av_opt_set_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_set")); - }; - } - return av_opt_set_fptr(@obj, @name, @val, @search_flags); - }; - /// @{ Those functions set the field of obj with the given name to value. - /// A struct whose first element is a pointer to an AVClass. - /// the name of the field to set - /// The value to set. In case of av_opt_set() if the field is not of a string type, then the given string is parsed. SI postfixes and some named scalars are supported. If the field is of a numeric type, it has to be a numeric or named scalar. Behavior with more than one scalar and +- infix operators is undefined. If the field is of a flags type, it has to be a sequence of numeric scalars or named flags separated by '+' or '-'. Prefixing a flag with '+' causes it to be set without affecting the other flags; similarly, '-' unsets a flag. If the field is of a dictionary type, it has to be a ':' separated list of key=value parameters. Values containing ':' special characters must be escaped. - /// flags passed to av_opt_find2. I.e. if AV_OPT_SEARCH_CHILDREN is passed here, then the option may be set on a child of obj. - /// 0 if the value has been set, or an AVERROR code in case of error: AVERROR_OPTION_NOT_FOUND if no matching option exists AVERROR(ERANGE) if the value is out of range AVERROR(EINVAL) if the value is not valid - public static int av_opt_set(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @val, int @search_flags) - { - return av_opt_set_fptr(@obj, @name, @val, @search_flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_set_bin_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, byte* @val, int @size, int @search_flags); - private static av_opt_set_bin_delegate av_opt_set_bin_fptr = (void* @obj, string @name, byte* @val, int @size, int @search_flags) => - { - av_opt_set_bin_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_set_bin"); - if (av_opt_set_bin_fptr == null) - { - av_opt_set_bin_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_set_bin")); - }; - } - return av_opt_set_bin_fptr(@obj, @name, @val, @size, @search_flags); - }; - public static int av_opt_set_bin(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, byte* @val, int @size, int @search_flags) - { - return av_opt_set_bin_fptr(@obj, @name, @val, @size, @search_flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_set_channel_layout_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, long @ch_layout, int @search_flags); - private static av_opt_set_channel_layout_delegate av_opt_set_channel_layout_fptr = (void* @obj, string @name, long @ch_layout, int @search_flags) => - { - av_opt_set_channel_layout_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_set_channel_layout"); - if (av_opt_set_channel_layout_fptr == null) - { - av_opt_set_channel_layout_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_set_channel_layout")); - }; - } - return av_opt_set_channel_layout_fptr(@obj, @name, @ch_layout, @search_flags); - }; - [Obsolete("")] - public static int av_opt_set_channel_layout(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, long @ch_layout, int @search_flags) - { - return av_opt_set_channel_layout_fptr(@obj, @name, @ch_layout, @search_flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_set_chlayout_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, AVChannelLayout* @layout, int @search_flags); - private static av_opt_set_chlayout_delegate av_opt_set_chlayout_fptr = (void* @obj, string @name, AVChannelLayout* @layout, int @search_flags) => - { - av_opt_set_chlayout_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_set_chlayout"); - if (av_opt_set_chlayout_fptr == null) - { - av_opt_set_chlayout_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_set_chlayout")); - }; - } - return av_opt_set_chlayout_fptr(@obj, @name, @layout, @search_flags); - }; - public static int av_opt_set_chlayout(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, AVChannelLayout* @layout, int @search_flags) - { - return av_opt_set_chlayout_fptr(@obj, @name, @layout, @search_flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_opt_set_defaults_delegate(void* @s); - private static av_opt_set_defaults_delegate av_opt_set_defaults_fptr = (void* @s) => - { - av_opt_set_defaults_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_set_defaults"); - if (av_opt_set_defaults_fptr == null) - { - av_opt_set_defaults_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_set_defaults")); - }; - } - av_opt_set_defaults_fptr(@s); - }; - /// Set the values of all AVOption fields to their default values. - /// an AVOption-enabled struct (its first member must be a pointer to AVClass) - public static void av_opt_set_defaults(void* @s) - { - av_opt_set_defaults_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_opt_set_defaults2_delegate(void* @s, int @mask, int @flags); - private static av_opt_set_defaults2_delegate av_opt_set_defaults2_fptr = (void* @s, int @mask, int @flags) => - { - av_opt_set_defaults2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_set_defaults2"); - if (av_opt_set_defaults2_fptr == null) - { - av_opt_set_defaults2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_set_defaults2")); - }; - } - av_opt_set_defaults2_fptr(@s, @mask, @flags); - }; - /// Set the values of all AVOption fields to their default values. Only these AVOption fields for which (opt->flags & mask) == flags will have their default applied to s. - /// an AVOption-enabled struct (its first member must be a pointer to AVClass) - /// combination of AV_OPT_FLAG_* - /// combination of AV_OPT_FLAG_* - public static void av_opt_set_defaults2(void* @s, int @mask, int @flags) - { - av_opt_set_defaults2_fptr(@s, @mask, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_set_dict_delegate(void* @obj, AVDictionary** @options); - private static av_opt_set_dict_delegate av_opt_set_dict_fptr = (void* @obj, AVDictionary** @options) => - { - av_opt_set_dict_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_set_dict"); - if (av_opt_set_dict_fptr == null) - { - av_opt_set_dict_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_set_dict")); - }; - } - return av_opt_set_dict_fptr(@obj, @options); - }; - /// Set all the options from a given dictionary on an object. - /// a struct whose first element is a pointer to AVClass - /// options to process. This dictionary will be freed and replaced by a new one containing all options not found in obj. Of course this new dictionary needs to be freed by caller with av_dict_free(). - /// 0 on success, a negative AVERROR if some option was found in obj, but could not be set. - public static int av_opt_set_dict(void* @obj, AVDictionary** @options) - { - return av_opt_set_dict_fptr(@obj, @options); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_set_dict_val_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, AVDictionary* @val, int @search_flags); - private static av_opt_set_dict_val_delegate av_opt_set_dict_val_fptr = (void* @obj, string @name, AVDictionary* @val, int @search_flags) => - { - av_opt_set_dict_val_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_set_dict_val"); - if (av_opt_set_dict_val_fptr == null) - { - av_opt_set_dict_val_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_set_dict_val")); - }; - } - return av_opt_set_dict_val_fptr(@obj, @name, @val, @search_flags); - }; - public static int av_opt_set_dict_val(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, AVDictionary* @val, int @search_flags) - { - return av_opt_set_dict_val_fptr(@obj, @name, @val, @search_flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_set_dict2_delegate(void* @obj, AVDictionary** @options, int @search_flags); - private static av_opt_set_dict2_delegate av_opt_set_dict2_fptr = (void* @obj, AVDictionary** @options, int @search_flags) => - { - av_opt_set_dict2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_set_dict2"); - if (av_opt_set_dict2_fptr == null) - { - av_opt_set_dict2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_set_dict2")); - }; - } - return av_opt_set_dict2_fptr(@obj, @options, @search_flags); - }; - /// Set all the options from a given dictionary on an object. - /// a struct whose first element is a pointer to AVClass - /// options to process. This dictionary will be freed and replaced by a new one containing all options not found in obj. Of course this new dictionary needs to be freed by caller with av_dict_free(). - /// A combination of AV_OPT_SEARCH_*. - /// 0 on success, a negative AVERROR if some option was found in obj, but could not be set. - public static int av_opt_set_dict2(void* @obj, AVDictionary** @options, int @search_flags) - { - return av_opt_set_dict2_fptr(@obj, @options, @search_flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_set_double_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, double @val, int @search_flags); - private static av_opt_set_double_delegate av_opt_set_double_fptr = (void* @obj, string @name, double @val, int @search_flags) => - { - av_opt_set_double_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_set_double"); - if (av_opt_set_double_fptr == null) - { - av_opt_set_double_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_set_double")); - }; - } - return av_opt_set_double_fptr(@obj, @name, @val, @search_flags); - }; - public static int av_opt_set_double(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, double @val, int @search_flags) - { - return av_opt_set_double_fptr(@obj, @name, @val, @search_flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_set_from_string_delegate(void* @ctx, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @opts, byte** @shorthand, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @key_val_sep, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @pairs_sep); - private static av_opt_set_from_string_delegate av_opt_set_from_string_fptr = (void* @ctx, string @opts, byte** @shorthand, string @key_val_sep, string @pairs_sep) => - { - av_opt_set_from_string_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_set_from_string"); - if (av_opt_set_from_string_fptr == null) - { - av_opt_set_from_string_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_set_from_string")); - }; - } - return av_opt_set_from_string_fptr(@ctx, @opts, @shorthand, @key_val_sep, @pairs_sep); - }; - /// Parse the key-value pairs list in opts. For each key=value pair found, set the value of the corresponding option in ctx. - /// the AVClass object to set options on - /// the options string, key-value pairs separated by a delimiter - /// a NULL-terminated array of options names for shorthand notation: if the first field in opts has no key part, the key is taken from the first element of shorthand; then again for the second, etc., until either opts is finished, shorthand is finished or a named option is found; after that, all options must be named - /// a 0-terminated list of characters used to separate key from value, for example '=' - /// a 0-terminated list of characters used to separate two pairs from each other, for example ':' or ',' - /// the number of successfully set key=value pairs, or a negative value corresponding to an AVERROR code in case of error: AVERROR(EINVAL) if opts cannot be parsed, the error code issued by av_set_string3() if a key/value pair cannot be set - public static int av_opt_set_from_string(void* @ctx, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @opts, byte** @shorthand, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @key_val_sep, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @pairs_sep) - { - return av_opt_set_from_string_fptr(@ctx, @opts, @shorthand, @key_val_sep, @pairs_sep); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_set_image_size_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @w, int @h, int @search_flags); - private static av_opt_set_image_size_delegate av_opt_set_image_size_fptr = (void* @obj, string @name, int @w, int @h, int @search_flags) => - { - av_opt_set_image_size_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_set_image_size"); - if (av_opt_set_image_size_fptr == null) - { - av_opt_set_image_size_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_set_image_size")); - }; - } - return av_opt_set_image_size_fptr(@obj, @name, @w, @h, @search_flags); - }; - public static int av_opt_set_image_size(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @w, int @h, int @search_flags) - { - return av_opt_set_image_size_fptr(@obj, @name, @w, @h, @search_flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_set_int_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, long @val, int @search_flags); - private static av_opt_set_int_delegate av_opt_set_int_fptr = (void* @obj, string @name, long @val, int @search_flags) => - { - av_opt_set_int_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_set_int"); - if (av_opt_set_int_fptr == null) - { - av_opt_set_int_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_set_int")); - }; - } - return av_opt_set_int_fptr(@obj, @name, @val, @search_flags); - }; - public static int av_opt_set_int(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, long @val, int @search_flags) - { - return av_opt_set_int_fptr(@obj, @name, @val, @search_flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_set_pixel_fmt_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, AVPixelFormat @fmt, int @search_flags); - private static av_opt_set_pixel_fmt_delegate av_opt_set_pixel_fmt_fptr = (void* @obj, string @name, AVPixelFormat @fmt, int @search_flags) => - { - av_opt_set_pixel_fmt_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_set_pixel_fmt"); - if (av_opt_set_pixel_fmt_fptr == null) - { - av_opt_set_pixel_fmt_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_set_pixel_fmt")); - }; - } - return av_opt_set_pixel_fmt_fptr(@obj, @name, @fmt, @search_flags); - }; - public static int av_opt_set_pixel_fmt(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, AVPixelFormat @fmt, int @search_flags) - { - return av_opt_set_pixel_fmt_fptr(@obj, @name, @fmt, @search_flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_set_q_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, AVRational @val, int @search_flags); - private static av_opt_set_q_delegate av_opt_set_q_fptr = (void* @obj, string @name, AVRational @val, int @search_flags) => - { - av_opt_set_q_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_set_q"); - if (av_opt_set_q_fptr == null) - { - av_opt_set_q_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_set_q")); - }; - } - return av_opt_set_q_fptr(@obj, @name, @val, @search_flags); - }; - public static int av_opt_set_q(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, AVRational @val, int @search_flags) - { - return av_opt_set_q_fptr(@obj, @name, @val, @search_flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_set_sample_fmt_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, AVSampleFormat @fmt, int @search_flags); - private static av_opt_set_sample_fmt_delegate av_opt_set_sample_fmt_fptr = (void* @obj, string @name, AVSampleFormat @fmt, int @search_flags) => - { - av_opt_set_sample_fmt_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_set_sample_fmt"); - if (av_opt_set_sample_fmt_fptr == null) - { - av_opt_set_sample_fmt_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_set_sample_fmt")); - }; - } - return av_opt_set_sample_fmt_fptr(@obj, @name, @fmt, @search_flags); - }; - public static int av_opt_set_sample_fmt(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, AVSampleFormat @fmt, int @search_flags) - { - return av_opt_set_sample_fmt_fptr(@obj, @name, @fmt, @search_flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_set_video_rate_delegate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, AVRational @val, int @search_flags); - private static av_opt_set_video_rate_delegate av_opt_set_video_rate_fptr = (void* @obj, string @name, AVRational @val, int @search_flags) => - { - av_opt_set_video_rate_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_set_video_rate"); - if (av_opt_set_video_rate_fptr == null) - { - av_opt_set_video_rate_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_set_video_rate")); - }; - } - return av_opt_set_video_rate_fptr(@obj, @name, @val, @search_flags); - }; - public static int av_opt_set_video_rate(void* @obj, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, AVRational @val, int @search_flags) - { - return av_opt_set_video_rate_fptr(@obj, @name, @val, @search_flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_opt_show2_delegate(void* @obj, void* @av_log_obj, int @req_flags, int @rej_flags); - private static av_opt_show2_delegate av_opt_show2_fptr = (void* @obj, void* @av_log_obj, int @req_flags, int @rej_flags) => - { - av_opt_show2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_opt_show2"); - if (av_opt_show2_fptr == null) - { - av_opt_show2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_opt_show2")); - }; - } - return av_opt_show2_fptr(@obj, @av_log_obj, @req_flags, @rej_flags); - }; - /// Show the obj options. - /// log context to use for showing the options - /// requested flags for the options to show. Show only the options for which it is opt->flags & req_flags. - /// rejected flags for the options to show. Show only the options for which it is !(opt->flags & req_flags). - public static int av_opt_show2(void* @obj, void* @av_log_obj, int @req_flags, int @rej_flags) - { - return av_opt_show2_fptr(@obj, @av_log_obj, @req_flags, @rej_flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_parse_cpu_caps_delegate(uint* @flags, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @s); - private static av_parse_cpu_caps_delegate av_parse_cpu_caps_fptr = (uint* @flags, string @s) => - { - av_parse_cpu_caps_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_parse_cpu_caps"); - if (av_parse_cpu_caps_fptr == null) - { - av_parse_cpu_caps_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_parse_cpu_caps")); - }; - } - return av_parse_cpu_caps_fptr(@flags, @s); - }; - /// Parse CPU caps from a string and update the given AV_CPU_* flags based on that. - /// negative on error. - public static int av_parse_cpu_caps(uint* @flags, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @s) - { - return av_parse_cpu_caps_fptr(@flags, @s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_pix_fmt_count_planes_delegate(AVPixelFormat @pix_fmt); - private static av_pix_fmt_count_planes_delegate av_pix_fmt_count_planes_fptr = (AVPixelFormat @pix_fmt) => - { - av_pix_fmt_count_planes_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_pix_fmt_count_planes"); - if (av_pix_fmt_count_planes_fptr == null) - { - av_pix_fmt_count_planes_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_pix_fmt_count_planes")); - }; - } - return av_pix_fmt_count_planes_fptr(@pix_fmt); - }; - /// Returns number of planes in pix_fmt, a negative AVERROR if pix_fmt is not a valid pixel format. - /// number of planes in pix_fmt, a negative AVERROR if pix_fmt is not a valid pixel format. - public static int av_pix_fmt_count_planes(AVPixelFormat @pix_fmt) - { - return av_pix_fmt_count_planes_fptr(@pix_fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVPixFmtDescriptor* av_pix_fmt_desc_get_delegate(AVPixelFormat @pix_fmt); - private static av_pix_fmt_desc_get_delegate av_pix_fmt_desc_get_fptr = (AVPixelFormat @pix_fmt) => - { - av_pix_fmt_desc_get_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_pix_fmt_desc_get"); - if (av_pix_fmt_desc_get_fptr == null) - { - av_pix_fmt_desc_get_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_pix_fmt_desc_get")); - }; - } - return av_pix_fmt_desc_get_fptr(@pix_fmt); - }; - /// Returns a pixel format descriptor for provided pixel format or NULL if this pixel format is unknown. - /// a pixel format descriptor for provided pixel format or NULL if this pixel format is unknown. - public static AVPixFmtDescriptor* av_pix_fmt_desc_get(AVPixelFormat @pix_fmt) - { - return av_pix_fmt_desc_get_fptr(@pix_fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVPixelFormat av_pix_fmt_desc_get_id_delegate(AVPixFmtDescriptor* @desc); - private static av_pix_fmt_desc_get_id_delegate av_pix_fmt_desc_get_id_fptr = (AVPixFmtDescriptor* @desc) => - { - av_pix_fmt_desc_get_id_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_pix_fmt_desc_get_id"); - if (av_pix_fmt_desc_get_id_fptr == null) - { - av_pix_fmt_desc_get_id_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_pix_fmt_desc_get_id")); - }; - } - return av_pix_fmt_desc_get_id_fptr(@desc); - }; - /// Returns an AVPixelFormat id described by desc, or AV_PIX_FMT_NONE if desc is not a valid pointer to a pixel format descriptor. - /// an AVPixelFormat id described by desc, or AV_PIX_FMT_NONE if desc is not a valid pointer to a pixel format descriptor. - public static AVPixelFormat av_pix_fmt_desc_get_id(AVPixFmtDescriptor* @desc) - { - return av_pix_fmt_desc_get_id_fptr(@desc); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVPixFmtDescriptor* av_pix_fmt_desc_next_delegate(AVPixFmtDescriptor* @prev); - private static av_pix_fmt_desc_next_delegate av_pix_fmt_desc_next_fptr = (AVPixFmtDescriptor* @prev) => - { - av_pix_fmt_desc_next_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_pix_fmt_desc_next"); - if (av_pix_fmt_desc_next_fptr == null) - { - av_pix_fmt_desc_next_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_pix_fmt_desc_next")); - }; - } - return av_pix_fmt_desc_next_fptr(@prev); - }; - /// Iterate over all pixel format descriptors known to libavutil. - /// previous descriptor. NULL to get the first descriptor. - /// next descriptor or NULL after the last descriptor - public static AVPixFmtDescriptor* av_pix_fmt_desc_next(AVPixFmtDescriptor* @prev) - { - return av_pix_fmt_desc_next_fptr(@prev); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_pix_fmt_get_chroma_sub_sample_delegate(AVPixelFormat @pix_fmt, int* @h_shift, int* @v_shift); - private static av_pix_fmt_get_chroma_sub_sample_delegate av_pix_fmt_get_chroma_sub_sample_fptr = (AVPixelFormat @pix_fmt, int* @h_shift, int* @v_shift) => - { - av_pix_fmt_get_chroma_sub_sample_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_pix_fmt_get_chroma_sub_sample"); - if (av_pix_fmt_get_chroma_sub_sample_fptr == null) - { - av_pix_fmt_get_chroma_sub_sample_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_pix_fmt_get_chroma_sub_sample")); - }; - } - return av_pix_fmt_get_chroma_sub_sample_fptr(@pix_fmt, @h_shift, @v_shift); - }; - /// Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor. - /// the pixel format - /// store log2_chroma_w (horizontal/width shift) - /// store log2_chroma_h (vertical/height shift) - /// 0 on success, AVERROR(ENOSYS) on invalid or unknown pixel format - public static int av_pix_fmt_get_chroma_sub_sample(AVPixelFormat @pix_fmt, int* @h_shift, int* @v_shift) - { - return av_pix_fmt_get_chroma_sub_sample_fptr(@pix_fmt, @h_shift, @v_shift); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVPixelFormat av_pix_fmt_swap_endianness_delegate(AVPixelFormat @pix_fmt); - private static av_pix_fmt_swap_endianness_delegate av_pix_fmt_swap_endianness_fptr = (AVPixelFormat @pix_fmt) => - { - av_pix_fmt_swap_endianness_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_pix_fmt_swap_endianness"); - if (av_pix_fmt_swap_endianness_fptr == null) - { - av_pix_fmt_swap_endianness_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_pix_fmt_swap_endianness")); - }; - } - return av_pix_fmt_swap_endianness_fptr(@pix_fmt); - }; - /// Utility function to swap the endianness of a pixel format. - /// the pixel format - /// pixel format with swapped endianness if it exists, otherwise AV_PIX_FMT_NONE - public static AVPixelFormat av_pix_fmt_swap_endianness(AVPixelFormat @pix_fmt) - { - return av_pix_fmt_swap_endianness_fptr(@pix_fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint av_q2intfloat_delegate(AVRational @q); - private static av_q2intfloat_delegate av_q2intfloat_fptr = (AVRational @q) => - { - av_q2intfloat_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_q2intfloat"); - if (av_q2intfloat_fptr == null) - { - av_q2intfloat_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_q2intfloat")); - }; - } - return av_q2intfloat_fptr(@q); - }; - /// Convert an AVRational to a IEEE 32-bit `float` expressed in fixed-point format. - /// Rational to be converted - /// Equivalent floating-point value, expressed as an unsigned 32-bit integer. - public static uint av_q2intfloat(AVRational @q) - { - return av_q2intfloat_fptr(@q); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_read_image_line_delegate(ushort* @dst, ref byte_ptrArray4 @data, int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component); - private static av_read_image_line_delegate av_read_image_line_fptr = (ushort* @dst, ref byte_ptrArray4 @data, int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component) => - { - av_read_image_line_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_read_image_line"); - if (av_read_image_line_fptr == null) - { - av_read_image_line_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_read_image_line")); - }; - } - av_read_image_line_fptr(@dst, ref @data, @linesize, @desc, @x, @y, @c, @w, @read_pal_component); - }; - public static void av_read_image_line(ushort* @dst, ref byte_ptrArray4 @data, int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component) - { - av_read_image_line_fptr(@dst, ref @data, @linesize, @desc, @x, @y, @c, @w, @read_pal_component); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_read_image_line2_delegate(void* @dst, ref byte_ptrArray4 @data, int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component, int @dst_element_size); - private static av_read_image_line2_delegate av_read_image_line2_fptr = (void* @dst, ref byte_ptrArray4 @data, int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component, int @dst_element_size) => - { - av_read_image_line2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_read_image_line2"); - if (av_read_image_line2_fptr == null) - { - av_read_image_line2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_read_image_line2")); - }; - } - av_read_image_line2_fptr(@dst, ref @data, @linesize, @desc, @x, @y, @c, @w, @read_pal_component, @dst_element_size); - }; - /// Read a line from an image, and write the values of the pixel format component c to dst. - /// the array containing the pointers to the planes of the image - /// the array containing the linesizes of the image - /// the pixel format descriptor for the image - /// the horizontal coordinate of the first pixel to read - /// the vertical coordinate of the first pixel to read - /// the width of the line to read, that is the number of values to write to dst - /// if not zero and the format is a paletted format writes the values corresponding to the palette component c in data[1] to dst, rather than the palette indexes in data[0]. The behavior is undefined if the format is not paletted. - /// size of elements in dst array (2 or 4 byte) - public static void av_read_image_line2(void* @dst, ref byte_ptrArray4 @data, int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component, int @dst_element_size) - { - av_read_image_line2_fptr(@dst, ref @data, @linesize, @desc, @x, @y, @c, @w, @read_pal_component, @dst_element_size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void* av_realloc_delegate(void* @ptr, ulong @size); - private static av_realloc_delegate av_realloc_fptr = (void* @ptr, ulong @size) => - { - av_realloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_realloc"); - if (av_realloc_fptr == null) - { - av_realloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_realloc")); - }; - } - return av_realloc_fptr(@ptr, @size); - }; - /// Allocate, reallocate, or free a block of memory. - /// Pointer to a memory block already allocated with av_realloc() or `NULL` - /// Size in bytes of the memory block to be allocated or reallocated - /// Pointer to a newly-reallocated block or `NULL` if the block cannot be reallocated - public static void* av_realloc(void* @ptr, ulong @size) - { - return av_realloc_fptr(@ptr, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void* av_realloc_array_delegate(void* @ptr, ulong @nmemb, ulong @size); - private static av_realloc_array_delegate av_realloc_array_fptr = (void* @ptr, ulong @nmemb, ulong @size) => - { - av_realloc_array_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_realloc_array"); - if (av_realloc_array_fptr == null) - { - av_realloc_array_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_realloc_array")); - }; - } - return av_realloc_array_fptr(@ptr, @nmemb, @size); - }; - /// Allocate, reallocate, or free an array. - /// Pointer to a memory block already allocated with av_realloc() or `NULL` - /// Number of elements in the array - /// Size of the single element of the array - /// Pointer to a newly-reallocated block or NULL if the block cannot be reallocated - public static void* av_realloc_array(void* @ptr, ulong @nmemb, ulong @size) - { - return av_realloc_array_fptr(@ptr, @nmemb, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void* av_realloc_f_delegate(void* @ptr, ulong @nelem, ulong @elsize); - private static av_realloc_f_delegate av_realloc_f_fptr = (void* @ptr, ulong @nelem, ulong @elsize) => - { - av_realloc_f_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_realloc_f"); - if (av_realloc_f_fptr == null) - { - av_realloc_f_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_realloc_f")); - }; - } - return av_realloc_f_fptr(@ptr, @nelem, @elsize); - }; - /// Allocate, reallocate, or free a block of memory. - public static void* av_realloc_f(void* @ptr, ulong @nelem, ulong @elsize) - { - return av_realloc_f_fptr(@ptr, @nelem, @elsize); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_reallocp_delegate(void* @ptr, ulong @size); - private static av_reallocp_delegate av_reallocp_fptr = (void* @ptr, ulong @size) => - { - av_reallocp_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_reallocp"); - if (av_reallocp_fptr == null) - { - av_reallocp_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_reallocp")); - }; - } - return av_reallocp_fptr(@ptr, @size); - }; - /// Allocate, reallocate, or free a block of memory through a pointer to a pointer. - /// Pointer to a pointer to a memory block already allocated with av_realloc(), or a pointer to `NULL`. The pointer is updated on success, or freed on failure. - /// Size in bytes for the memory block to be allocated or reallocated - /// Zero on success, an AVERROR error code on failure - public static int av_reallocp(void* @ptr, ulong @size) - { - return av_reallocp_fptr(@ptr, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_reallocp_array_delegate(void* @ptr, ulong @nmemb, ulong @size); - private static av_reallocp_array_delegate av_reallocp_array_fptr = (void* @ptr, ulong @nmemb, ulong @size) => - { - av_reallocp_array_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_reallocp_array"); - if (av_reallocp_array_fptr == null) - { - av_reallocp_array_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_reallocp_array")); - }; - } - return av_reallocp_array_fptr(@ptr, @nmemb, @size); - }; - /// Allocate, reallocate an array through a pointer to a pointer. - /// Pointer to a pointer to a memory block already allocated with av_realloc(), or a pointer to `NULL`. The pointer is updated on success, or freed on failure. - /// Number of elements - /// Size of the single element - /// Zero on success, an AVERROR error code on failure - public static int av_reallocp_array(void* @ptr, ulong @nmemb, ulong @size) - { - return av_reallocp_array_fptr(@ptr, @nmemb, @size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_reduce_delegate(int* @dst_num, int* @dst_den, long @num, long @den, long @max); - private static av_reduce_delegate av_reduce_fptr = (int* @dst_num, int* @dst_den, long @num, long @den, long @max) => - { - av_reduce_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_reduce"); - if (av_reduce_fptr == null) - { - av_reduce_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_reduce")); - }; - } - return av_reduce_fptr(@dst_num, @dst_den, @num, @den, @max); - }; - /// Reduce a fraction. - /// Destination numerator - /// Destination denominator - /// Source numerator - /// Source denominator - /// Maximum allowed values for `dst_num` & `dst_den` - /// 1 if the operation is exact, 0 otherwise - public static int av_reduce(int* @dst_num, int* @dst_den, long @num, long @den, long @max) - { - return av_reduce_fptr(@dst_num, @dst_den, @num, @den, @max); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate long av_rescale_delegate(long @a, long @b, long @c); - private static av_rescale_delegate av_rescale_fptr = (long @a, long @b, long @c) => - { - av_rescale_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_rescale"); - if (av_rescale_fptr == null) - { - av_rescale_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_rescale")); - }; - } - return av_rescale_fptr(@a, @b, @c); - }; - /// Rescale a 64-bit integer with rounding to nearest. - public static long av_rescale(long @a, long @b, long @c) - { - return av_rescale_fptr(@a, @b, @c); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate long av_rescale_delta_delegate(AVRational @in_tb, long @in_ts, AVRational @fs_tb, int @duration, long* @last, AVRational @out_tb); - private static av_rescale_delta_delegate av_rescale_delta_fptr = (AVRational @in_tb, long @in_ts, AVRational @fs_tb, int @duration, long* @last, AVRational @out_tb) => - { - av_rescale_delta_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_rescale_delta"); - if (av_rescale_delta_fptr == null) - { - av_rescale_delta_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_rescale_delta")); - }; - } - return av_rescale_delta_fptr(@in_tb, @in_ts, @fs_tb, @duration, @last, @out_tb); - }; - /// Rescale a timestamp while preserving known durations. - /// Input time base - /// Input timestamp - /// Duration time base; typically this is finer-grained (greater) than `in_tb` and `out_tb` - /// Duration till the next call to this function (i.e. duration of the current packet/frame) - /// Pointer to a timestamp expressed in terms of `fs_tb`, acting as a state variable - /// Output timebase - /// Timestamp expressed in terms of `out_tb` - public static long av_rescale_delta(AVRational @in_tb, long @in_ts, AVRational @fs_tb, int @duration, long* @last, AVRational @out_tb) - { - return av_rescale_delta_fptr(@in_tb, @in_ts, @fs_tb, @duration, @last, @out_tb); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate long av_rescale_q_delegate(long @a, AVRational @bq, AVRational @cq); - private static av_rescale_q_delegate av_rescale_q_fptr = (long @a, AVRational @bq, AVRational @cq) => - { - av_rescale_q_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_rescale_q"); - if (av_rescale_q_fptr == null) - { - av_rescale_q_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_rescale_q")); - }; - } - return av_rescale_q_fptr(@a, @bq, @cq); - }; - /// Rescale a 64-bit integer by 2 rational numbers. - public static long av_rescale_q(long @a, AVRational @bq, AVRational @cq) - { - return av_rescale_q_fptr(@a, @bq, @cq); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate long av_rescale_q_rnd_delegate(long @a, AVRational @bq, AVRational @cq, AVRounding @rnd); - private static av_rescale_q_rnd_delegate av_rescale_q_rnd_fptr = (long @a, AVRational @bq, AVRational @cq, AVRounding @rnd) => - { - av_rescale_q_rnd_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_rescale_q_rnd"); - if (av_rescale_q_rnd_fptr == null) - { - av_rescale_q_rnd_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_rescale_q_rnd")); - }; - } - return av_rescale_q_rnd_fptr(@a, @bq, @cq, @rnd); - }; - /// Rescale a 64-bit integer by 2 rational numbers with specified rounding. - public static long av_rescale_q_rnd(long @a, AVRational @bq, AVRational @cq, AVRounding @rnd) - { - return av_rescale_q_rnd_fptr(@a, @bq, @cq, @rnd); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate long av_rescale_rnd_delegate(long @a, long @b, long @c, AVRounding @rnd); - private static av_rescale_rnd_delegate av_rescale_rnd_fptr = (long @a, long @b, long @c, AVRounding @rnd) => - { - av_rescale_rnd_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_rescale_rnd"); - if (av_rescale_rnd_fptr == null) - { - av_rescale_rnd_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_rescale_rnd")); - }; - } - return av_rescale_rnd_fptr(@a, @b, @c, @rnd); - }; - /// Rescale a 64-bit integer with specified rounding. - public static long av_rescale_rnd(long @a, long @b, long @c, AVRounding @rnd) - { - return av_rescale_rnd_fptr(@a, @b, @c, @rnd); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_sample_fmt_is_planar_delegate(AVSampleFormat @sample_fmt); - private static av_sample_fmt_is_planar_delegate av_sample_fmt_is_planar_fptr = (AVSampleFormat @sample_fmt) => - { - av_sample_fmt_is_planar_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_sample_fmt_is_planar"); - if (av_sample_fmt_is_planar_fptr == null) - { - av_sample_fmt_is_planar_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_sample_fmt_is_planar")); - }; - } - return av_sample_fmt_is_planar_fptr(@sample_fmt); - }; - /// Check if the sample format is planar. - /// the sample format to inspect - /// 1 if the sample format is planar, 0 if it is interleaved - public static int av_sample_fmt_is_planar(AVSampleFormat @sample_fmt) - { - return av_sample_fmt_is_planar_fptr(@sample_fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_samples_alloc_delegate(byte** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align); - private static av_samples_alloc_delegate av_samples_alloc_fptr = (byte** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) => - { - av_samples_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_samples_alloc"); - if (av_samples_alloc_fptr == null) - { - av_samples_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_samples_alloc")); - }; - } - return av_samples_alloc_fptr(@audio_data, @linesize, @nb_channels, @nb_samples, @sample_fmt, @align); - }; - /// Allocate a samples buffer for nb_samples samples, and fill data pointers and linesize accordingly. The allocated samples buffer can be freed by using av_freep(&audio_data[0]) Allocated data will be initialized to silence. - /// array to be filled with the pointer for each channel - /// aligned size for audio buffer(s), may be NULL - /// number of audio channels - /// number of samples per channel - /// buffer size alignment (0 = default, 1 = no alignment) - /// >=0 on success or a negative error code on failure - public static int av_samples_alloc(byte** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) - { - return av_samples_alloc_fptr(@audio_data, @linesize, @nb_channels, @nb_samples, @sample_fmt, @align); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_samples_alloc_array_and_samples_delegate(byte*** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align); - private static av_samples_alloc_array_and_samples_delegate av_samples_alloc_array_and_samples_fptr = (byte*** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) => - { - av_samples_alloc_array_and_samples_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_samples_alloc_array_and_samples"); - if (av_samples_alloc_array_and_samples_fptr == null) - { - av_samples_alloc_array_and_samples_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_samples_alloc_array_and_samples")); - }; - } - return av_samples_alloc_array_and_samples_fptr(@audio_data, @linesize, @nb_channels, @nb_samples, @sample_fmt, @align); - }; - /// Allocate a data pointers array, samples buffer for nb_samples samples, and fill data pointers and linesize accordingly. - public static int av_samples_alloc_array_and_samples(byte*** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) - { - return av_samples_alloc_array_and_samples_fptr(@audio_data, @linesize, @nb_channels, @nb_samples, @sample_fmt, @align); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_samples_copy_delegate(byte** @dst, byte** @src, int @dst_offset, int @src_offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt); - private static av_samples_copy_delegate av_samples_copy_fptr = (byte** @dst, byte** @src, int @dst_offset, int @src_offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt) => - { - av_samples_copy_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_samples_copy"); - if (av_samples_copy_fptr == null) - { - av_samples_copy_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_samples_copy")); - }; - } - return av_samples_copy_fptr(@dst, @src, @dst_offset, @src_offset, @nb_samples, @nb_channels, @sample_fmt); - }; - /// Copy samples from src to dst. - /// destination array of pointers to data planes - /// source array of pointers to data planes - /// offset in samples at which the data will be written to dst - /// offset in samples at which the data will be read from src - /// number of samples to be copied - /// number of audio channels - /// audio sample format - public static int av_samples_copy(byte** @dst, byte** @src, int @dst_offset, int @src_offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt) - { - return av_samples_copy_fptr(@dst, @src, @dst_offset, @src_offset, @nb_samples, @nb_channels, @sample_fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_samples_fill_arrays_delegate(byte** @audio_data, int* @linesize, byte* @buf, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align); - private static av_samples_fill_arrays_delegate av_samples_fill_arrays_fptr = (byte** @audio_data, int* @linesize, byte* @buf, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) => - { - av_samples_fill_arrays_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_samples_fill_arrays"); - if (av_samples_fill_arrays_fptr == null) - { - av_samples_fill_arrays_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_samples_fill_arrays")); - }; - } - return av_samples_fill_arrays_fptr(@audio_data, @linesize, @buf, @nb_channels, @nb_samples, @sample_fmt, @align); - }; - /// Fill plane data pointers and linesize for samples with sample format sample_fmt. - /// array to be filled with the pointer for each channel - /// calculated linesize, may be NULL - /// the pointer to a buffer containing the samples - /// the number of channels - /// the number of samples in a single channel - /// the sample format - /// buffer size alignment (0 = default, 1 = no alignment) - /// minimum size in bytes required for the buffer on success, or a negative error code on failure - public static int av_samples_fill_arrays(byte** @audio_data, int* @linesize, byte* @buf, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) - { - return av_samples_fill_arrays_fptr(@audio_data, @linesize, @buf, @nb_channels, @nb_samples, @sample_fmt, @align); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_samples_get_buffer_size_delegate(int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align); - private static av_samples_get_buffer_size_delegate av_samples_get_buffer_size_fptr = (int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) => - { - av_samples_get_buffer_size_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_samples_get_buffer_size"); - if (av_samples_get_buffer_size_fptr == null) - { - av_samples_get_buffer_size_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_samples_get_buffer_size")); - }; - } - return av_samples_get_buffer_size_fptr(@linesize, @nb_channels, @nb_samples, @sample_fmt, @align); - }; - /// Get the required buffer size for the given audio parameters. - /// calculated linesize, may be NULL - /// the number of channels - /// the number of samples in a single channel - /// the sample format - /// buffer size alignment (0 = default, 1 = no alignment) - /// required buffer size, or negative error code on failure - public static int av_samples_get_buffer_size(int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) - { - return av_samples_get_buffer_size_fptr(@linesize, @nb_channels, @nb_samples, @sample_fmt, @align); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_samples_set_silence_delegate(byte** @audio_data, int @offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt); - private static av_samples_set_silence_delegate av_samples_set_silence_fptr = (byte** @audio_data, int @offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt) => - { - av_samples_set_silence_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_samples_set_silence"); - if (av_samples_set_silence_fptr == null) - { - av_samples_set_silence_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_samples_set_silence")); - }; - } - return av_samples_set_silence_fptr(@audio_data, @offset, @nb_samples, @nb_channels, @sample_fmt); - }; - /// Fill an audio buffer with silence. - /// array of pointers to data planes - /// offset in samples at which to start filling - /// number of samples to fill - /// number of audio channels - /// audio sample format - public static int av_samples_set_silence(byte** @audio_data, int @offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt) - { - return av_samples_set_silence_fptr(@audio_data, @offset, @nb_samples, @nb_channels, @sample_fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_set_options_string_delegate(void* @ctx, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @opts, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @key_val_sep, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @pairs_sep); - private static av_set_options_string_delegate av_set_options_string_fptr = (void* @ctx, string @opts, string @key_val_sep, string @pairs_sep) => - { - av_set_options_string_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_set_options_string"); - if (av_set_options_string_fptr == null) - { - av_set_options_string_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_set_options_string")); - }; - } - return av_set_options_string_fptr(@ctx, @opts, @key_val_sep, @pairs_sep); - }; - /// Parse the key/value pairs list in opts. For each key/value pair found, stores the value in the field in ctx that is named like the key. ctx must be an AVClass context, storing is done using AVOptions. - /// options string to parse, may be NULL - /// a 0-terminated list of characters used to separate key from value - /// a 0-terminated list of characters used to separate two pairs from each other - /// the number of successfully set key/value pairs, or a negative value corresponding to an AVERROR code in case of error: AVERROR(EINVAL) if opts cannot be parsed, the error code issued by av_opt_set() if a key/value pair cannot be set - public static int av_set_options_string(void* @ctx, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @opts, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @key_val_sep, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @pairs_sep) - { - return av_set_options_string_fptr(@ctx, @opts, @key_val_sep, @pairs_sep); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_size_mult_delegate(ulong @a, ulong @b, ulong* @r); - private static av_size_mult_delegate av_size_mult_fptr = (ulong @a, ulong @b, ulong* @r) => - { - av_size_mult_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_size_mult"); - if (av_size_mult_fptr == null) - { - av_size_mult_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_size_mult")); - }; - } - return av_size_mult_fptr(@a, @b, @r); - }; - /// Multiply two `size_t` values checking for overflow. - /// Pointer to the result of the operation - /// 0 on success, AVERROR(EINVAL) on overflow - public static int av_size_mult(ulong @a, ulong @b, ulong* @r) - { - return av_size_mult_fptr(@a, @b, @r); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate byte* av_strdup_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @s); - private static av_strdup_delegate av_strdup_fptr = (string @s) => - { - av_strdup_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_strdup"); - if (av_strdup_fptr == null) - { - av_strdup_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_strdup")); - }; - } - return av_strdup_fptr(@s); - }; - /// Duplicate a string. - /// String to be duplicated - /// Pointer to a newly-allocated string containing a copy of `s` or `NULL` if the string cannot be allocated - public static byte* av_strdup( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @s) - { - return av_strdup_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_strerror_delegate(int @errnum, byte* @errbuf, ulong @errbuf_size); - private static av_strerror_delegate av_strerror_fptr = (int @errnum, byte* @errbuf, ulong @errbuf_size) => - { - av_strerror_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_strerror"); - if (av_strerror_fptr == null) - { - av_strerror_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_strerror")); - }; - } - return av_strerror_fptr(@errnum, @errbuf, @errbuf_size); - }; - /// Put a description of the AVERROR code errnum in errbuf. In case of failure the global variable errno is set to indicate the error. Even in case of failure av_strerror() will print a generic error message indicating the errnum provided to errbuf. - /// error code to describe - /// buffer to which description is written - /// the size in bytes of errbuf - /// 0 on success, a negative value if a description for errnum cannot be found - public static int av_strerror(int @errnum, byte* @errbuf, ulong @errbuf_size) - { - return av_strerror_fptr(@errnum, @errbuf, @errbuf_size); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate byte* av_strndup_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @s, ulong @len); - private static av_strndup_delegate av_strndup_fptr = (string @s, ulong @len) => - { - av_strndup_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_strndup"); - if (av_strndup_fptr == null) - { - av_strndup_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_strndup")); - }; - } - return av_strndup_fptr(@s, @len); - }; - /// Duplicate a substring of a string. - /// String to be duplicated - /// Maximum length of the resulting string (not counting the terminating byte) - /// Pointer to a newly-allocated string containing a substring of `s` or `NULL` if the string cannot be allocated - public static byte* av_strndup( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @s, ulong @len) - { - return av_strndup_fptr(@s, @len); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVRational av_sub_q_delegate(AVRational @b, AVRational @c); - private static av_sub_q_delegate av_sub_q_fptr = (AVRational @b, AVRational @c) => - { - av_sub_q_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_sub_q"); - if (av_sub_q_fptr == null) - { - av_sub_q_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_sub_q")); - }; - } - return av_sub_q_fptr(@b, @c); - }; - /// Subtract one rational from another. - /// First rational - /// Second rational - /// b-c - public static AVRational av_sub_q(AVRational @b, AVRational @c) - { - return av_sub_q_fptr(@b, @c); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_tempfile_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @prefix, byte** @filename, int @log_offset, void* @log_ctx); - private static av_tempfile_delegate av_tempfile_fptr = (string @prefix, byte** @filename, int @log_offset, void* @log_ctx) => - { - av_tempfile_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_tempfile"); - if (av_tempfile_fptr == null) - { - av_tempfile_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_tempfile")); - }; - } - return av_tempfile_fptr(@prefix, @filename, @log_offset, @log_ctx); - }; - /// Wrapper to work around the lack of mkstemp() on mingw. Also, tries to create file in /tmp first, if possible. *prefix can be a character constant; *filename will be allocated internally. - /// file descriptor of opened file (or negative value corresponding to an AVERROR code on error) and opened file name in **filename. - [Obsolete("as fd numbers cannot be passed saftely between libs on some platforms")] - public static int av_tempfile( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @prefix, byte** @filename, int @log_offset, void* @log_ctx) - { - return av_tempfile_fptr(@prefix, @filename, @log_offset, @log_ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_timecode_adjust_ntsc_framenum2_delegate(int @framenum, int @fps); - private static av_timecode_adjust_ntsc_framenum2_delegate av_timecode_adjust_ntsc_framenum2_fptr = (int @framenum, int @fps) => - { - av_timecode_adjust_ntsc_framenum2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_timecode_adjust_ntsc_framenum2"); - if (av_timecode_adjust_ntsc_framenum2_fptr == null) - { - av_timecode_adjust_ntsc_framenum2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_timecode_adjust_ntsc_framenum2")); - }; - } - return av_timecode_adjust_ntsc_framenum2_fptr(@framenum, @fps); - }; - /// Adjust frame number for NTSC drop frame time code. - /// frame number to adjust - /// frame per second, multiples of 30 - /// adjusted frame number - public static int av_timecode_adjust_ntsc_framenum2(int @framenum, int @fps) - { - return av_timecode_adjust_ntsc_framenum2_fptr(@framenum, @fps); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_timecode_check_frame_rate_delegate(AVRational @rate); - private static av_timecode_check_frame_rate_delegate av_timecode_check_frame_rate_fptr = (AVRational @rate) => - { - av_timecode_check_frame_rate_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_timecode_check_frame_rate"); - if (av_timecode_check_frame_rate_fptr == null) - { - av_timecode_check_frame_rate_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_timecode_check_frame_rate")); - }; - } - return av_timecode_check_frame_rate_fptr(@rate); - }; - /// Check if the timecode feature is available for the given frame rate - /// 0 if supported, < 0 otherwise - public static int av_timecode_check_frame_rate(AVRational @rate) - { - return av_timecode_check_frame_rate_fptr(@rate); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint av_timecode_get_smpte_delegate(AVRational @rate, int @drop, int @hh, int @mm, int @ss, int @ff); - private static av_timecode_get_smpte_delegate av_timecode_get_smpte_fptr = (AVRational @rate, int @drop, int @hh, int @mm, int @ss, int @ff) => - { - av_timecode_get_smpte_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_timecode_get_smpte"); - if (av_timecode_get_smpte_fptr == null) - { - av_timecode_get_smpte_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_timecode_get_smpte")); - }; - } - return av_timecode_get_smpte_fptr(@rate, @drop, @hh, @mm, @ss, @ff); - }; - /// Convert sei info to SMPTE 12M binary representation. - /// frame rate in rational form - /// drop flag - /// hour - /// minute - /// second - /// frame number - /// the SMPTE binary representation - public static uint av_timecode_get_smpte(AVRational @rate, int @drop, int @hh, int @mm, int @ss, int @ff) - { - return av_timecode_get_smpte_fptr(@rate, @drop, @hh, @mm, @ss, @ff); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint av_timecode_get_smpte_from_framenum_delegate(AVTimecode* @tc, int @framenum); - private static av_timecode_get_smpte_from_framenum_delegate av_timecode_get_smpte_from_framenum_fptr = (AVTimecode* @tc, int @framenum) => - { - av_timecode_get_smpte_from_framenum_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_timecode_get_smpte_from_framenum"); - if (av_timecode_get_smpte_from_framenum_fptr == null) - { - av_timecode_get_smpte_from_framenum_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_timecode_get_smpte_from_framenum")); - }; - } - return av_timecode_get_smpte_from_framenum_fptr(@tc, @framenum); - }; - /// Convert frame number to SMPTE 12M binary representation. - /// timecode data correctly initialized - /// frame number - /// the SMPTE binary representation - public static uint av_timecode_get_smpte_from_framenum(AVTimecode* @tc, int @framenum) - { - return av_timecode_get_smpte_from_framenum_fptr(@tc, @framenum); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_timecode_init_delegate(AVTimecode* @tc, AVRational @rate, int @flags, int @frame_start, void* @log_ctx); - private static av_timecode_init_delegate av_timecode_init_fptr = (AVTimecode* @tc, AVRational @rate, int @flags, int @frame_start, void* @log_ctx) => - { - av_timecode_init_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_timecode_init"); - if (av_timecode_init_fptr == null) - { - av_timecode_init_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_timecode_init")); - }; - } - return av_timecode_init_fptr(@tc, @rate, @flags, @frame_start, @log_ctx); - }; - /// Init a timecode struct with the passed parameters. - /// pointer to an allocated AVTimecode - /// frame rate in rational form - /// miscellaneous flags such as drop frame, +24 hours, ... (see AVTimecodeFlag) - /// the first frame number - /// a pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct (used for av_log) - /// 0 on success, AVERROR otherwise - public static int av_timecode_init(AVTimecode* @tc, AVRational @rate, int @flags, int @frame_start, void* @log_ctx) - { - return av_timecode_init_fptr(@tc, @rate, @flags, @frame_start, @log_ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_timecode_init_from_components_delegate(AVTimecode* @tc, AVRational @rate, int @flags, int @hh, int @mm, int @ss, int @ff, void* @log_ctx); - private static av_timecode_init_from_components_delegate av_timecode_init_from_components_fptr = (AVTimecode* @tc, AVRational @rate, int @flags, int @hh, int @mm, int @ss, int @ff, void* @log_ctx) => - { - av_timecode_init_from_components_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_timecode_init_from_components"); - if (av_timecode_init_from_components_fptr == null) - { - av_timecode_init_from_components_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_timecode_init_from_components")); - }; - } - return av_timecode_init_from_components_fptr(@tc, @rate, @flags, @hh, @mm, @ss, @ff, @log_ctx); - }; - /// Init a timecode struct from the passed timecode components. - /// pointer to an allocated AVTimecode - /// frame rate in rational form - /// miscellaneous flags such as drop frame, +24 hours, ... (see AVTimecodeFlag) - /// hours - /// minutes - /// seconds - /// frames - /// a pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct (used for av_log) - /// 0 on success, AVERROR otherwise - public static int av_timecode_init_from_components(AVTimecode* @tc, AVRational @rate, int @flags, int @hh, int @mm, int @ss, int @ff, void* @log_ctx) - { - return av_timecode_init_from_components_fptr(@tc, @rate, @flags, @hh, @mm, @ss, @ff, @log_ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_timecode_init_from_string_delegate(AVTimecode* @tc, AVRational @rate, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @str, void* @log_ctx); - private static av_timecode_init_from_string_delegate av_timecode_init_from_string_fptr = (AVTimecode* @tc, AVRational @rate, string @str, void* @log_ctx) => - { - av_timecode_init_from_string_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_timecode_init_from_string"); - if (av_timecode_init_from_string_fptr == null) - { - av_timecode_init_from_string_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_timecode_init_from_string")); - }; - } - return av_timecode_init_from_string_fptr(@tc, @rate, @str, @log_ctx); - }; - /// Parse timecode representation (hh:mm:ss[:;.]ff). - /// pointer to an allocated AVTimecode - /// frame rate in rational form - /// timecode string which will determine the frame start - /// a pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct (used for av_log). - /// 0 on success, AVERROR otherwise - public static int av_timecode_init_from_string(AVTimecode* @tc, AVRational @rate, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @str, void* @log_ctx) - { - return av_timecode_init_from_string_fptr(@tc, @rate, @str, @log_ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate byte* av_timecode_make_mpeg_tc_string_delegate(byte* @buf, uint @tc25bit); - private static av_timecode_make_mpeg_tc_string_delegate av_timecode_make_mpeg_tc_string_fptr = (byte* @buf, uint @tc25bit) => - { - av_timecode_make_mpeg_tc_string_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_timecode_make_mpeg_tc_string"); - if (av_timecode_make_mpeg_tc_string_fptr == null) - { - av_timecode_make_mpeg_tc_string_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_timecode_make_mpeg_tc_string")); - }; - } - return av_timecode_make_mpeg_tc_string_fptr(@buf, @tc25bit); - }; - /// Get the timecode string from the 25-bit timecode format (MPEG GOP format). - /// destination buffer, must be at least AV_TIMECODE_STR_SIZE long - /// the 25-bits timecode - /// the buf parameter - public static byte* av_timecode_make_mpeg_tc_string(byte* @buf, uint @tc25bit) - { - return av_timecode_make_mpeg_tc_string_fptr(@buf, @tc25bit); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate byte* av_timecode_make_smpte_tc_string_delegate(byte* @buf, uint @tcsmpte, int @prevent_df); - private static av_timecode_make_smpte_tc_string_delegate av_timecode_make_smpte_tc_string_fptr = (byte* @buf, uint @tcsmpte, int @prevent_df) => - { - av_timecode_make_smpte_tc_string_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_timecode_make_smpte_tc_string"); - if (av_timecode_make_smpte_tc_string_fptr == null) - { - av_timecode_make_smpte_tc_string_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_timecode_make_smpte_tc_string")); - }; - } - return av_timecode_make_smpte_tc_string_fptr(@buf, @tcsmpte, @prevent_df); - }; - /// Get the timecode string from the SMPTE timecode format. - /// destination buffer, must be at least AV_TIMECODE_STR_SIZE long - /// the 32-bit SMPTE timecode - /// prevent the use of a drop flag when it is known the DF bit is arbitrary - /// the buf parameter - public static byte* av_timecode_make_smpte_tc_string(byte* @buf, uint @tcsmpte, int @prevent_df) - { - return av_timecode_make_smpte_tc_string_fptr(@buf, @tcsmpte, @prevent_df); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate byte* av_timecode_make_smpte_tc_string2_delegate(byte* @buf, AVRational @rate, uint @tcsmpte, int @prevent_df, int @skip_field); - private static av_timecode_make_smpte_tc_string2_delegate av_timecode_make_smpte_tc_string2_fptr = (byte* @buf, AVRational @rate, uint @tcsmpte, int @prevent_df, int @skip_field) => - { - av_timecode_make_smpte_tc_string2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_timecode_make_smpte_tc_string2"); - if (av_timecode_make_smpte_tc_string2_fptr == null) - { - av_timecode_make_smpte_tc_string2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_timecode_make_smpte_tc_string2")); - }; - } - return av_timecode_make_smpte_tc_string2_fptr(@buf, @rate, @tcsmpte, @prevent_df, @skip_field); - }; - /// Get the timecode string from the SMPTE timecode format. - /// destination buffer, must be at least AV_TIMECODE_STR_SIZE long - /// frame rate of the timecode - /// the 32-bit SMPTE timecode - /// prevent the use of a drop flag when it is known the DF bit is arbitrary - /// prevent the use of a field flag when it is known the field bit is arbitrary (e.g. because it is used as PC flag) - /// the buf parameter - public static byte* av_timecode_make_smpte_tc_string2(byte* @buf, AVRational @rate, uint @tcsmpte, int @prevent_df, int @skip_field) - { - return av_timecode_make_smpte_tc_string2_fptr(@buf, @rate, @tcsmpte, @prevent_df, @skip_field); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate byte* av_timecode_make_string_delegate(AVTimecode* @tc, byte* @buf, int @framenum); - private static av_timecode_make_string_delegate av_timecode_make_string_fptr = (AVTimecode* @tc, byte* @buf, int @framenum) => - { - av_timecode_make_string_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_timecode_make_string"); - if (av_timecode_make_string_fptr == null) - { - av_timecode_make_string_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_timecode_make_string")); - }; - } - return av_timecode_make_string_fptr(@tc, @buf, @framenum); - }; - /// Load timecode string in buf. - /// timecode data correctly initialized - /// destination buffer, must be at least AV_TIMECODE_STR_SIZE long - /// frame number - /// the buf parameter - public static byte* av_timecode_make_string(AVTimecode* @tc, byte* @buf, int @framenum) - { - return av_timecode_make_string_fptr(@tc, @buf, @framenum); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_tree_destroy_delegate(AVTreeNode* @t); - private static av_tree_destroy_delegate av_tree_destroy_fptr = (AVTreeNode* @t) => - { - av_tree_destroy_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_tree_destroy"); - if (av_tree_destroy_fptr == null) - { - av_tree_destroy_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_tree_destroy")); - }; - } - av_tree_destroy_fptr(@t); - }; - public static void av_tree_destroy(AVTreeNode* @t) - { - av_tree_destroy_fptr(@t); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_tree_enumerate_delegate(AVTreeNode* @t, void* @opaque, av_tree_enumerate_cmp_func @cmp, av_tree_enumerate_enu_func @enu); - private static av_tree_enumerate_delegate av_tree_enumerate_fptr = (AVTreeNode* @t, void* @opaque, av_tree_enumerate_cmp_func @cmp, av_tree_enumerate_enu_func @enu) => - { - av_tree_enumerate_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_tree_enumerate"); - if (av_tree_enumerate_fptr == null) - { - av_tree_enumerate_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_tree_enumerate")); - }; - } - av_tree_enumerate_fptr(@t, @opaque, @cmp, @enu); - }; - /// Apply enu(opaque, &elem) to all the elements in the tree in a given range. - /// a comparison function that returns < 0 for an element below the range, > 0 for an element above the range and == 0 for an element inside the range - public static void av_tree_enumerate(AVTreeNode* @t, void* @opaque, av_tree_enumerate_cmp_func @cmp, av_tree_enumerate_enu_func @enu) - { - av_tree_enumerate_fptr(@t, @opaque, @cmp, @enu); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void* av_tree_find_delegate(AVTreeNode* @root, void* @key, av_tree_find_cmp_func @cmp, void_ptrArray2 @next); - private static av_tree_find_delegate av_tree_find_fptr = (AVTreeNode* @root, void* @key, av_tree_find_cmp_func @cmp, void_ptrArray2 @next) => - { - av_tree_find_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_tree_find"); - if (av_tree_find_fptr == null) - { - av_tree_find_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_tree_find")); - }; - } - return av_tree_find_fptr(@root, @key, @cmp, @next); - }; - /// Find an element. - /// a pointer to the root node of the tree - /// compare function used to compare elements in the tree, API identical to that of Standard C's qsort It is guaranteed that the first and only the first argument to cmp() will be the key parameter to av_tree_find(), thus it could if the user wants, be a different type (like an opaque context). - /// If next is not NULL, then next[0] will contain the previous element and next[1] the next element. If either does not exist, then the corresponding entry in next is unchanged. - /// An element with cmp(key, elem) == 0 or NULL if no such element exists in the tree. - public static void* av_tree_find(AVTreeNode* @root, void* @key, av_tree_find_cmp_func @cmp, void_ptrArray2 @next) - { - return av_tree_find_fptr(@root, @key, @cmp, @next); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void* av_tree_insert_delegate(AVTreeNode** @rootp, void* @key, av_tree_insert_cmp_func @cmp, AVTreeNode** @next); - private static av_tree_insert_delegate av_tree_insert_fptr = (AVTreeNode** @rootp, void* @key, av_tree_insert_cmp_func @cmp, AVTreeNode** @next) => - { - av_tree_insert_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_tree_insert"); - if (av_tree_insert_fptr == null) - { - av_tree_insert_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_tree_insert")); - }; - } - return av_tree_insert_fptr(@rootp, @key, @cmp, @next); - }; - /// Insert or remove an element. - /// A pointer to a pointer to the root node of the tree; note that the root node can change during insertions, this is required to keep the tree balanced. - /// pointer to the element key to insert in the tree - /// compare function used to compare elements in the tree, API identical to that of Standard C's qsort - /// Used to allocate and free AVTreeNodes. For insertion the user must set it to an allocated and zeroed object of at least av_tree_node_size bytes size. av_tree_insert() will set it to NULL if it has been consumed. For deleting elements *next is set to NULL by the user and av_tree_insert() will set it to the AVTreeNode which was used for the removed element. This allows the use of flat arrays, which have lower overhead compared to many malloced elements. You might want to define a function like: - /// If no insertion happened, the found element; if an insertion or removal happened, then either key or NULL will be returned. Which one it is depends on the tree state and the implementation. You should make no assumptions that it's one or the other in the code. - public static void* av_tree_insert(AVTreeNode** @rootp, void* @key, av_tree_insert_cmp_func @cmp, AVTreeNode** @next) - { - return av_tree_insert_fptr(@rootp, @key, @cmp, @next); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVTreeNode* av_tree_node_alloc_delegate(); - private static av_tree_node_alloc_delegate av_tree_node_alloc_fptr = () => - { - av_tree_node_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_tree_node_alloc"); - if (av_tree_node_alloc_fptr == null) - { - av_tree_node_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_tree_node_alloc")); - }; - } - return av_tree_node_alloc_fptr(); - }; - /// Allocate an AVTreeNode. - public static AVTreeNode* av_tree_node_alloc() - { - return av_tree_node_alloc_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int av_usleep_delegate(uint @usec); - private static av_usleep_delegate av_usleep_fptr = (uint @usec) => - { - av_usleep_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_usleep"); - if (av_usleep_fptr == null) - { - av_usleep_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_usleep")); - }; - } - return av_usleep_fptr(@usec); - }; - /// Sleep for a period of time. Although the duration is expressed in microseconds, the actual delay may be rounded to the precision of the system timer. - /// Number of microseconds to sleep. - /// zero on success or (negative) error code. - public static int av_usleep(uint @usec) - { - return av_usleep_fptr(@usec); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string av_version_info_delegate(); - private static av_version_info_delegate av_version_info_fptr = () => - { - av_version_info_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_version_info"); - if (av_version_info_fptr == null) - { - av_version_info_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_version_info")); - }; - } - return av_version_info_fptr(); - }; - /// Return an informative version string. This usually is the actual release version number or a git commit description. This string has no fixed format and can change any time. It should never be parsed by code. - public static string av_version_info() - { - return av_version_info_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_vlog_delegate(void* @avcl, int @level, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @fmt, byte* @vl); - private static av_vlog_delegate av_vlog_fptr = (void* @avcl, int @level, string @fmt, byte* @vl) => - { - av_vlog_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_vlog"); - if (av_vlog_fptr == null) - { - av_vlog_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_vlog")); - }; - } - av_vlog_fptr(@avcl, @level, @fmt, @vl); - }; - /// Send the specified message to the log if the level is less than or equal to the current av_log_level. By default, all logging messages are sent to stderr. This behavior can be altered by setting a different logging callback function. - /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct. - /// The importance level of the message expressed using a "Logging Constant". - /// The format string (printf-compatible) that specifies how subsequent arguments are converted to output. - /// The arguments referenced by the format string. - public static void av_vlog(void* @avcl, int @level, - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @fmt, byte* @vl) - { - av_vlog_fptr(@avcl, @level, @fmt, @vl); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_write_image_line_delegate(ushort* @src, ref byte_ptrArray4 @data, int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w); - private static av_write_image_line_delegate av_write_image_line_fptr = (ushort* @src, ref byte_ptrArray4 @data, int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w) => - { - av_write_image_line_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_write_image_line"); - if (av_write_image_line_fptr == null) - { - av_write_image_line_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_write_image_line")); - }; - } - av_write_image_line_fptr(@src, ref @data, @linesize, @desc, @x, @y, @c, @w); - }; - public static void av_write_image_line(ushort* @src, ref byte_ptrArray4 @data, int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w) - { - av_write_image_line_fptr(@src, ref @data, @linesize, @desc, @x, @y, @c, @w); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void av_write_image_line2_delegate(void* @src, ref byte_ptrArray4 @data, int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @src_element_size); - private static av_write_image_line2_delegate av_write_image_line2_fptr = (void* @src, ref byte_ptrArray4 @data, int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @src_element_size) => - { - av_write_image_line2_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "av_write_image_line2"); - if (av_write_image_line2_fptr == null) - { - av_write_image_line2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "av_write_image_line2")); - }; - } - av_write_image_line2_fptr(@src, ref @data, @linesize, @desc, @x, @y, @c, @w, @src_element_size); - }; - /// Write the values from src to the pixel format component c of an image line. - /// array containing the values to write - /// the array containing the pointers to the planes of the image to write into. It is supposed to be zeroed. - /// the array containing the linesizes of the image - /// the pixel format descriptor for the image - /// the horizontal coordinate of the first pixel to write - /// the vertical coordinate of the first pixel to write - /// the width of the line to write, that is the number of values to write to the image line - /// size of elements in src array (2 or 4 byte) - public static void av_write_image_line2(void* @src, ref byte_ptrArray4 @data, int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @src_element_size) - { - av_write_image_line2_fptr(@src, ref @data, @linesize, @desc, @x, @y, @c, @w, @src_element_size); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string avutil_configuration_delegate(); - private static avutil_configuration_delegate avutil_configuration_fptr = () => - { - avutil_configuration_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "avutil_configuration"); - if (avutil_configuration_fptr == null) - { - avutil_configuration_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avutil_configuration")); - }; - } - return avutil_configuration_fptr(); - }; - /// Return the libavutil build-time configuration. - public static string avutil_configuration() - { - return avutil_configuration_fptr(); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string avutil_license_delegate(); - private static avutil_license_delegate avutil_license_fptr = () => - { - avutil_license_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "avutil_license"); - if (avutil_license_fptr == null) - { - avutil_license_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avutil_license")); - }; - } - return avutil_license_fptr(); - }; - /// Return the libavutil license. - public static string avutil_license() - { - return avutil_license_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint avutil_version_delegate(); - private static avutil_version_delegate avutil_version_fptr = () => - { - avutil_version_fptr = GetFunctionDelegate(GetOrLoadLibrary("avutil"), "avutil_version"); - if (avutil_version_fptr == null) - { - avutil_version_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "avutil_version")); - }; - } - return avutil_version_fptr(); - }; - /// Return the LIBAVUTIL_VERSION_INT constant. - public static uint avutil_version() - { - return avutil_version_fptr(); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string postproc_configuration_delegate(); - private static postproc_configuration_delegate postproc_configuration_fptr = () => - { - postproc_configuration_fptr = GetFunctionDelegate(GetOrLoadLibrary("postproc"), "postproc_configuration"); - if (postproc_configuration_fptr == null) - { - postproc_configuration_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "postproc_configuration")); - }; - } - return postproc_configuration_fptr(); - }; - /// Return the libpostproc build-time configuration. - public static string postproc_configuration() - { - return postproc_configuration_fptr(); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string postproc_license_delegate(); - private static postproc_license_delegate postproc_license_fptr = () => - { - postproc_license_fptr = GetFunctionDelegate(GetOrLoadLibrary("postproc"), "postproc_license"); - if (postproc_license_fptr == null) - { - postproc_license_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "postproc_license")); - }; - } - return postproc_license_fptr(); - }; - /// Return the libpostproc license. - public static string postproc_license() - { - return postproc_license_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint postproc_version_delegate(); - private static postproc_version_delegate postproc_version_fptr = () => - { - postproc_version_fptr = GetFunctionDelegate(GetOrLoadLibrary("postproc"), "postproc_version"); - if (postproc_version_fptr == null) - { - postproc_version_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "postproc_version")); - }; - } - return postproc_version_fptr(); - }; - /// Return the LIBPOSTPROC_VERSION_INT constant. - public static uint postproc_version() - { - return postproc_version_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void pp_free_context_delegate(void* @ppContext); - private static pp_free_context_delegate pp_free_context_fptr = (void* @ppContext) => - { - pp_free_context_fptr = GetFunctionDelegate(GetOrLoadLibrary("postproc"), "pp_free_context"); - if (pp_free_context_fptr == null) - { - pp_free_context_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "pp_free_context")); - }; - } - pp_free_context_fptr(@ppContext); - }; - public static void pp_free_context(void* @ppContext) - { - pp_free_context_fptr(@ppContext); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void pp_free_mode_delegate(void* @mode); - private static pp_free_mode_delegate pp_free_mode_fptr = (void* @mode) => - { - pp_free_mode_fptr = GetFunctionDelegate(GetOrLoadLibrary("postproc"), "pp_free_mode"); - if (pp_free_mode_fptr == null) - { - pp_free_mode_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "pp_free_mode")); - }; - } - pp_free_mode_fptr(@mode); - }; - public static void pp_free_mode(void* @mode) - { - pp_free_mode_fptr(@mode); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void* pp_get_context_delegate(int @width, int @height, int @flags); - private static pp_get_context_delegate pp_get_context_fptr = (int @width, int @height, int @flags) => - { - pp_get_context_fptr = GetFunctionDelegate(GetOrLoadLibrary("postproc"), "pp_get_context"); - if (pp_get_context_fptr == null) - { - pp_get_context_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "pp_get_context")); - }; - } - return pp_get_context_fptr(@width, @height, @flags); - }; - public static void* pp_get_context(int @width, int @height, int @flags) - { - return pp_get_context_fptr(@width, @height, @flags); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void* pp_get_mode_by_name_and_quality_delegate( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @quality); - private static pp_get_mode_by_name_and_quality_delegate pp_get_mode_by_name_and_quality_fptr = (string @name, int @quality) => - { - pp_get_mode_by_name_and_quality_fptr = GetFunctionDelegate(GetOrLoadLibrary("postproc"), "pp_get_mode_by_name_and_quality"); - if (pp_get_mode_by_name_and_quality_fptr == null) - { - pp_get_mode_by_name_and_quality_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "pp_get_mode_by_name_and_quality")); - }; - } - return pp_get_mode_by_name_and_quality_fptr(@name, @quality); - }; - /// Return a pp_mode or NULL if an error occurred. - /// the string after "-pp" on the command line - /// a number from 0 to PP_QUALITY_MAX - public static void* pp_get_mode_by_name_and_quality( - #if NETSTANDARD2_1_OR_GREATER - [MarshalAs(UnmanagedType.LPUTF8Str)] - #else - [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] - #endif - string @name, int @quality) - { - return pp_get_mode_by_name_and_quality_fptr(@name, @quality); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void pp_postprocess_delegate(byte_ptrArray3 @src, int_array3 @srcStride, ref byte_ptrArray3 @dst, int_array3 @dstStride, int @horizontalSize, int @verticalSize, sbyte* @QP_store, int @QP_stride, void* @mode, void* @ppContext, int @pict_type); - private static pp_postprocess_delegate pp_postprocess_fptr = (byte_ptrArray3 @src, int_array3 @srcStride, ref byte_ptrArray3 @dst, int_array3 @dstStride, int @horizontalSize, int @verticalSize, sbyte* @QP_store, int @QP_stride, void* @mode, void* @ppContext, int @pict_type) => - { - pp_postprocess_fptr = GetFunctionDelegate(GetOrLoadLibrary("postproc"), "pp_postprocess"); - if (pp_postprocess_fptr == null) - { - pp_postprocess_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "pp_postprocess")); - }; - } - pp_postprocess_fptr(@src, @srcStride, ref @dst, @dstStride, @horizontalSize, @verticalSize, @QP_store, @QP_stride, @mode, @ppContext, @pict_type); - }; - public static void pp_postprocess(byte_ptrArray3 @src, int_array3 @srcStride, ref byte_ptrArray3 @dst, int_array3 @dstStride, int @horizontalSize, int @verticalSize, sbyte* @QP_store, int @QP_stride, void* @mode, void* @ppContext, int @pict_type) - { - pp_postprocess_fptr(@src, @srcStride, ref @dst, @dstStride, @horizontalSize, @verticalSize, @QP_store, @QP_stride, @mode, @ppContext, @pict_type); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate SwrContext* swr_alloc_delegate(); - private static swr_alloc_delegate swr_alloc_fptr = () => - { - swr_alloc_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_alloc"); - if (swr_alloc_fptr == null) - { - swr_alloc_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_alloc")); - }; - } - return swr_alloc_fptr(); - }; - /// Allocate SwrContext. - /// NULL on error, allocated context otherwise - public static SwrContext* swr_alloc() - { - return swr_alloc_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate SwrContext* swr_alloc_set_opts_delegate(SwrContext* @s, long @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, long @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx); - private static swr_alloc_set_opts_delegate swr_alloc_set_opts_fptr = (SwrContext* @s, long @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, long @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx) => - { - swr_alloc_set_opts_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_alloc_set_opts"); - if (swr_alloc_set_opts_fptr == null) - { - swr_alloc_set_opts_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_alloc_set_opts")); - }; - } - return swr_alloc_set_opts_fptr(@s, @out_ch_layout, @out_sample_fmt, @out_sample_rate, @in_ch_layout, @in_sample_fmt, @in_sample_rate, @log_offset, @log_ctx); - }; - /// Allocate SwrContext if needed and set/reset common parameters. - /// existing Swr context if available, or NULL if not - /// output channel layout (AV_CH_LAYOUT_*) - /// output sample format (AV_SAMPLE_FMT_*). - /// output sample rate (frequency in Hz) - /// input channel layout (AV_CH_LAYOUT_*) - /// input sample format (AV_SAMPLE_FMT_*). - /// input sample rate (frequency in Hz) - /// logging level offset - /// parent logging context, can be NULL - /// NULL on error, allocated context otherwise - [Obsolete("use ")] - public static SwrContext* swr_alloc_set_opts(SwrContext* @s, long @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, long @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx) - { - return swr_alloc_set_opts_fptr(@s, @out_ch_layout, @out_sample_fmt, @out_sample_rate, @in_ch_layout, @in_sample_fmt, @in_sample_rate, @log_offset, @log_ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int swr_alloc_set_opts2_delegate(SwrContext** @ps, AVChannelLayout* @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, AVChannelLayout* @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx); - private static swr_alloc_set_opts2_delegate swr_alloc_set_opts2_fptr = (SwrContext** @ps, AVChannelLayout* @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, AVChannelLayout* @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx) => - { - swr_alloc_set_opts2_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_alloc_set_opts2"); - if (swr_alloc_set_opts2_fptr == null) - { - swr_alloc_set_opts2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_alloc_set_opts2")); - }; - } - return swr_alloc_set_opts2_fptr(@ps, @out_ch_layout, @out_sample_fmt, @out_sample_rate, @in_ch_layout, @in_sample_fmt, @in_sample_rate, @log_offset, @log_ctx); - }; - /// Allocate SwrContext if needed and set/reset common parameters. - /// Pointer to an existing Swr context if available, or to NULL if not. On success, *ps will be set to the allocated context. - /// output channel layout (e.g. AV_CHANNEL_LAYOUT_*) - /// output sample format (AV_SAMPLE_FMT_*). - /// output sample rate (frequency in Hz) - /// input channel layout (e.g. AV_CHANNEL_LAYOUT_*) - /// input sample format (AV_SAMPLE_FMT_*). - /// input sample rate (frequency in Hz) - /// logging level offset - /// parent logging context, can be NULL - /// 0 on success, a negative AVERROR code on error. On error, the Swr context is freed and *ps set to NULL. - public static int swr_alloc_set_opts2(SwrContext** @ps, AVChannelLayout* @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, AVChannelLayout* @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx) - { - return swr_alloc_set_opts2_fptr(@ps, @out_ch_layout, @out_sample_fmt, @out_sample_rate, @in_ch_layout, @in_sample_fmt, @in_sample_rate, @log_offset, @log_ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int swr_build_matrix_delegate(ulong @in_layout, ulong @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @rematrix_maxval, double @rematrix_volume, double* @matrix, int @stride, AVMatrixEncoding @matrix_encoding, void* @log_ctx); - private static swr_build_matrix_delegate swr_build_matrix_fptr = (ulong @in_layout, ulong @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @rematrix_maxval, double @rematrix_volume, double* @matrix, int @stride, AVMatrixEncoding @matrix_encoding, void* @log_ctx) => - { - swr_build_matrix_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_build_matrix"); - if (swr_build_matrix_fptr == null) - { - swr_build_matrix_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_build_matrix")); - }; - } - return swr_build_matrix_fptr(@in_layout, @out_layout, @center_mix_level, @surround_mix_level, @lfe_mix_level, @rematrix_maxval, @rematrix_volume, @matrix, @stride, @matrix_encoding, @log_ctx); - }; - /// Generate a channel mixing matrix. - /// input channel layout - /// output channel layout - /// mix level for the center channel - /// mix level for the surround channel(s) - /// mix level for the low-frequency effects channel - /// if 1.0, coefficients will be normalized to prevent overflow. if INT_MAX, coefficients will not be normalized. - /// mixing coefficients; matrix[i + stride * o] is the weight of input channel i in output channel o. - /// distance between adjacent input channels in the matrix array - /// matrixed stereo downmix mode (e.g. dplii) - /// parent logging context, can be NULL - /// 0 on success, negative AVERROR code on failure - [Obsolete("use ")] - public static int swr_build_matrix(ulong @in_layout, ulong @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @rematrix_maxval, double @rematrix_volume, double* @matrix, int @stride, AVMatrixEncoding @matrix_encoding, void* @log_ctx) - { - return swr_build_matrix_fptr(@in_layout, @out_layout, @center_mix_level, @surround_mix_level, @lfe_mix_level, @rematrix_maxval, @rematrix_volume, @matrix, @stride, @matrix_encoding, @log_ctx); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int swr_build_matrix2_delegate(AVChannelLayout* @in_layout, AVChannelLayout* @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @maxval, double @rematrix_volume, double* @matrix, long @stride, AVMatrixEncoding @matrix_encoding, void* @log_context); - private static swr_build_matrix2_delegate swr_build_matrix2_fptr = (AVChannelLayout* @in_layout, AVChannelLayout* @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @maxval, double @rematrix_volume, double* @matrix, long @stride, AVMatrixEncoding @matrix_encoding, void* @log_context) => - { - swr_build_matrix2_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_build_matrix2"); - if (swr_build_matrix2_fptr == null) - { - swr_build_matrix2_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_build_matrix2")); - }; - } - return swr_build_matrix2_fptr(@in_layout, @out_layout, @center_mix_level, @surround_mix_level, @lfe_mix_level, @maxval, @rematrix_volume, @matrix, @stride, @matrix_encoding, @log_context); - }; - /// Generate a channel mixing matrix. - /// input channel layout - /// output channel layout - /// mix level for the center channel - /// mix level for the surround channel(s) - /// mix level for the low-frequency effects channel - /// mixing coefficients; matrix[i + stride * o] is the weight of input channel i in output channel o. - /// distance between adjacent input channels in the matrix array - /// matrixed stereo downmix mode (e.g. dplii) - /// 0 on success, negative AVERROR code on failure - public static int swr_build_matrix2(AVChannelLayout* @in_layout, AVChannelLayout* @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @maxval, double @rematrix_volume, double* @matrix, long @stride, AVMatrixEncoding @matrix_encoding, void* @log_context) - { - return swr_build_matrix2_fptr(@in_layout, @out_layout, @center_mix_level, @surround_mix_level, @lfe_mix_level, @maxval, @rematrix_volume, @matrix, @stride, @matrix_encoding, @log_context); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void swr_close_delegate(SwrContext* @s); - private static swr_close_delegate swr_close_fptr = (SwrContext* @s) => - { - swr_close_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_close"); - if (swr_close_fptr == null) - { - swr_close_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_close")); - }; - } - swr_close_fptr(@s); - }; - /// Closes the context so that swr_is_initialized() returns 0. - /// Swr context to be closed - public static void swr_close(SwrContext* @s) - { - swr_close_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int swr_config_frame_delegate(SwrContext* @swr, AVFrame* @out, AVFrame* @in); - private static swr_config_frame_delegate swr_config_frame_fptr = (SwrContext* @swr, AVFrame* @out, AVFrame* @in) => - { - swr_config_frame_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_config_frame"); - if (swr_config_frame_fptr == null) - { - swr_config_frame_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_config_frame")); - }; - } - return swr_config_frame_fptr(@swr, @out, @in); - }; - /// Configure or reconfigure the SwrContext using the information provided by the AVFrames. - /// audio resample context - /// 0 on success, AVERROR on failure. - public static int swr_config_frame(SwrContext* @swr, AVFrame* @out, AVFrame* @in) - { - return swr_config_frame_fptr(@swr, @out, @in); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int swr_convert_delegate(SwrContext* @s, byte** @out, int @out_count, byte** @in, int @in_count); - private static swr_convert_delegate swr_convert_fptr = (SwrContext* @s, byte** @out, int @out_count, byte** @in, int @in_count) => - { - swr_convert_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_convert"); - if (swr_convert_fptr == null) - { - swr_convert_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_convert")); - }; - } - return swr_convert_fptr(@s, @out, @out_count, @in, @in_count); - }; - /// Convert audio. - /// allocated Swr context, with parameters set - /// output buffers, only the first one need be set in case of packed audio - /// amount of space available for output in samples per channel - /// input buffers, only the first one need to be set in case of packed audio - /// number of input samples available in one channel - /// number of samples output per channel, negative value on error - public static int swr_convert(SwrContext* @s, byte** @out, int @out_count, byte** @in, int @in_count) - { - return swr_convert_fptr(@s, @out, @out_count, @in, @in_count); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int swr_convert_frame_delegate(SwrContext* @swr, AVFrame* @output, AVFrame* @input); - private static swr_convert_frame_delegate swr_convert_frame_fptr = (SwrContext* @swr, AVFrame* @output, AVFrame* @input) => - { - swr_convert_frame_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_convert_frame"); - if (swr_convert_frame_fptr == null) - { - swr_convert_frame_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_convert_frame")); - }; - } - return swr_convert_frame_fptr(@swr, @output, @input); - }; - /// Convert the samples in the input AVFrame and write them to the output AVFrame. - /// audio resample context - /// output AVFrame - /// input AVFrame - /// 0 on success, AVERROR on failure or nonmatching configuration. - public static int swr_convert_frame(SwrContext* @swr, AVFrame* @output, AVFrame* @input) - { - return swr_convert_frame_fptr(@swr, @output, @input); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int swr_drop_output_delegate(SwrContext* @s, int @count); - private static swr_drop_output_delegate swr_drop_output_fptr = (SwrContext* @s, int @count) => - { - swr_drop_output_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_drop_output"); - if (swr_drop_output_fptr == null) - { - swr_drop_output_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_drop_output")); - }; - } - return swr_drop_output_fptr(@s, @count); - }; - /// Drops the specified number of output samples. - /// allocated Swr context - /// number of samples to be dropped - /// >= 0 on success, or a negative AVERROR code on failure - public static int swr_drop_output(SwrContext* @s, int @count) - { - return swr_drop_output_fptr(@s, @count); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void swr_free_delegate(SwrContext** @s); - private static swr_free_delegate swr_free_fptr = (SwrContext** @s) => - { - swr_free_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_free"); - if (swr_free_fptr == null) - { - swr_free_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_free")); - }; - } - swr_free_fptr(@s); - }; - /// Free the given SwrContext and set the pointer to NULL. - /// a pointer to a pointer to Swr context - public static void swr_free(SwrContext** @s) - { - swr_free_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVClass* swr_get_class_delegate(); - private static swr_get_class_delegate swr_get_class_fptr = () => - { - swr_get_class_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_get_class"); - if (swr_get_class_fptr == null) - { - swr_get_class_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_get_class")); - }; - } - return swr_get_class_fptr(); - }; - /// Get the AVClass for SwrContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. - /// the AVClass of SwrContext - public static AVClass* swr_get_class() - { - return swr_get_class_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate long swr_get_delay_delegate(SwrContext* @s, long @base); - private static swr_get_delay_delegate swr_get_delay_fptr = (SwrContext* @s, long @base) => - { - swr_get_delay_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_get_delay"); - if (swr_get_delay_fptr == null) - { - swr_get_delay_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_get_delay")); - }; - } - return swr_get_delay_fptr(@s, @base); - }; - /// Gets the delay the next input sample will experience relative to the next output sample. - /// swr context - /// timebase in which the returned delay will be: - public static long swr_get_delay(SwrContext* @s, long @base) - { - return swr_get_delay_fptr(@s, @base); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int swr_get_out_samples_delegate(SwrContext* @s, int @in_samples); - private static swr_get_out_samples_delegate swr_get_out_samples_fptr = (SwrContext* @s, int @in_samples) => - { - swr_get_out_samples_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_get_out_samples"); - if (swr_get_out_samples_fptr == null) - { - swr_get_out_samples_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_get_out_samples")); - }; - } - return swr_get_out_samples_fptr(@s, @in_samples); - }; - /// Find an upper bound on the number of samples that the next swr_convert call will output, if called with in_samples of input samples. This depends on the internal state, and anything changing the internal state (like further swr_convert() calls) will may change the number of samples swr_get_out_samples() returns for the same number of input samples. - /// number of input samples. - public static int swr_get_out_samples(SwrContext* @s, int @in_samples) - { - return swr_get_out_samples_fptr(@s, @in_samples); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int swr_init_delegate(SwrContext* @s); - private static swr_init_delegate swr_init_fptr = (SwrContext* @s) => - { - swr_init_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_init"); - if (swr_init_fptr == null) - { - swr_init_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_init")); - }; - } - return swr_init_fptr(@s); - }; - /// Initialize context after user parameters have been set. - /// Swr context to initialize - /// AVERROR error code in case of failure. - public static int swr_init(SwrContext* @s) - { - return swr_init_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int swr_inject_silence_delegate(SwrContext* @s, int @count); - private static swr_inject_silence_delegate swr_inject_silence_fptr = (SwrContext* @s, int @count) => - { - swr_inject_silence_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_inject_silence"); - if (swr_inject_silence_fptr == null) - { - swr_inject_silence_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_inject_silence")); - }; - } - return swr_inject_silence_fptr(@s, @count); - }; - /// Injects the specified number of silence samples. - /// allocated Swr context - /// number of samples to be dropped - /// >= 0 on success, or a negative AVERROR code on failure - public static int swr_inject_silence(SwrContext* @s, int @count) - { - return swr_inject_silence_fptr(@s, @count); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int swr_is_initialized_delegate(SwrContext* @s); - private static swr_is_initialized_delegate swr_is_initialized_fptr = (SwrContext* @s) => - { - swr_is_initialized_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_is_initialized"); - if (swr_is_initialized_fptr == null) - { - swr_is_initialized_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_is_initialized")); - }; - } - return swr_is_initialized_fptr(@s); - }; - /// Check whether an swr context has been initialized or not. - /// Swr context to check - /// positive if it has been initialized, 0 if not initialized - public static int swr_is_initialized(SwrContext* @s) - { - return swr_is_initialized_fptr(@s); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate long swr_next_pts_delegate(SwrContext* @s, long @pts); - private static swr_next_pts_delegate swr_next_pts_fptr = (SwrContext* @s, long @pts) => - { - swr_next_pts_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_next_pts"); - if (swr_next_pts_fptr == null) - { - swr_next_pts_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_next_pts")); - }; - } - return swr_next_pts_fptr(@s, @pts); - }; - /// Convert the next timestamp from input to output timestamps are in 1/(in_sample_rate * out_sample_rate) units. - /// the output timestamp for the next output sample - public static long swr_next_pts(SwrContext* @s, long @pts) - { - return swr_next_pts_fptr(@s, @pts); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int swr_set_channel_mapping_delegate(SwrContext* @s, int* @channel_map); - private static swr_set_channel_mapping_delegate swr_set_channel_mapping_fptr = (SwrContext* @s, int* @channel_map) => - { - swr_set_channel_mapping_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_set_channel_mapping"); - if (swr_set_channel_mapping_fptr == null) - { - swr_set_channel_mapping_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_set_channel_mapping")); - }; - } - return swr_set_channel_mapping_fptr(@s, @channel_map); - }; - /// Set a customized input channel mapping. - /// allocated Swr context, not yet initialized - /// customized input channel mapping (array of channel indexes, -1 for a muted channel) - /// >= 0 on success, or AVERROR error code in case of failure. - public static int swr_set_channel_mapping(SwrContext* @s, int* @channel_map) - { - return swr_set_channel_mapping_fptr(@s, @channel_map); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int swr_set_compensation_delegate(SwrContext* @s, int @sample_delta, int @compensation_distance); - private static swr_set_compensation_delegate swr_set_compensation_fptr = (SwrContext* @s, int @sample_delta, int @compensation_distance) => - { - swr_set_compensation_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_set_compensation"); - if (swr_set_compensation_fptr == null) - { - swr_set_compensation_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_set_compensation")); - }; - } - return swr_set_compensation_fptr(@s, @sample_delta, @compensation_distance); - }; - /// Activate resampling compensation ("soft" compensation). This function is internally called when needed in swr_next_pts(). - /// allocated Swr context. If it is not initialized, or SWR_FLAG_RESAMPLE is not set, swr_init() is called with the flag set. - /// delta in PTS per sample - /// number of samples to compensate for - /// >= 0 on success, AVERROR error codes if: - public static int swr_set_compensation(SwrContext* @s, int @sample_delta, int @compensation_distance) - { - return swr_set_compensation_fptr(@s, @sample_delta, @compensation_distance); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int swr_set_matrix_delegate(SwrContext* @s, double* @matrix, int @stride); - private static swr_set_matrix_delegate swr_set_matrix_fptr = (SwrContext* @s, double* @matrix, int @stride) => - { - swr_set_matrix_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swr_set_matrix"); - if (swr_set_matrix_fptr == null) - { - swr_set_matrix_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swr_set_matrix")); - }; - } - return swr_set_matrix_fptr(@s, @matrix, @stride); - }; - /// Set a customized remix matrix. - /// allocated Swr context, not yet initialized - /// remix coefficients; matrix[i + stride * o] is the weight of input channel i in output channel o - /// offset between lines of the matrix - /// >= 0 on success, or AVERROR error code in case of failure. - public static int swr_set_matrix(SwrContext* @s, double* @matrix, int @stride) - { - return swr_set_matrix_fptr(@s, @matrix, @stride); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string swresample_configuration_delegate(); - private static swresample_configuration_delegate swresample_configuration_fptr = () => - { - swresample_configuration_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swresample_configuration"); - if (swresample_configuration_fptr == null) - { - swresample_configuration_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swresample_configuration")); - }; - } - return swresample_configuration_fptr(); - }; - /// Return the swr build-time configuration. - public static string swresample_configuration() - { - return swresample_configuration_fptr(); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string swresample_license_delegate(); - private static swresample_license_delegate swresample_license_fptr = () => - { - swresample_license_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swresample_license"); - if (swresample_license_fptr == null) - { - swresample_license_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swresample_license")); - }; - } - return swresample_license_fptr(); - }; - /// Return the swr license. - public static string swresample_license() - { - return swresample_license_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint swresample_version_delegate(); - private static swresample_version_delegate swresample_version_fptr = () => - { - swresample_version_fptr = GetFunctionDelegate(GetOrLoadLibrary("swresample"), "swresample_version"); - if (swresample_version_fptr == null) - { - swresample_version_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swresample_version")); - }; - } - return swresample_version_fptr(); - }; - /// Return the LIBSWRESAMPLE_VERSION_INT constant. - public static uint swresample_version() - { - return swresample_version_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate SwsContext* sws_alloc_context_delegate(); - private static sws_alloc_context_delegate sws_alloc_context_fptr = () => - { - sws_alloc_context_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_alloc_context"); - if (sws_alloc_context_fptr == null) - { - sws_alloc_context_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_alloc_context")); - }; - } - return sws_alloc_context_fptr(); - }; - /// Allocate an empty SwsContext. This must be filled and passed to sws_init_context(). For filling see AVOptions, options.c and sws_setColorspaceDetails(). - public static SwsContext* sws_alloc_context() - { - return sws_alloc_context_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate SwsVector* sws_allocVec_delegate(int @length); - private static sws_allocVec_delegate sws_allocVec_fptr = (int @length) => - { - sws_allocVec_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_allocVec"); - if (sws_allocVec_fptr == null) - { - sws_allocVec_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_allocVec")); - }; - } - return sws_allocVec_fptr(@length); - }; - /// Allocate and return an uninitialized vector with length coefficients. - public static SwsVector* sws_allocVec(int @length) - { - return sws_allocVec_fptr(@length); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void sws_convertPalette8ToPacked24_delegate(byte* @src, byte* @dst, int @num_pixels, byte* @palette); - private static sws_convertPalette8ToPacked24_delegate sws_convertPalette8ToPacked24_fptr = (byte* @src, byte* @dst, int @num_pixels, byte* @palette) => - { - sws_convertPalette8ToPacked24_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_convertPalette8ToPacked24"); - if (sws_convertPalette8ToPacked24_fptr == null) - { - sws_convertPalette8ToPacked24_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_convertPalette8ToPacked24")); - }; - } - sws_convertPalette8ToPacked24_fptr(@src, @dst, @num_pixels, @palette); - }; - /// Convert an 8-bit paletted frame into a frame with a color depth of 24 bits. - /// source frame buffer - /// destination frame buffer - /// number of pixels to convert - /// array with [256] entries, which must match color arrangement (RGB or BGR) of src - public static void sws_convertPalette8ToPacked24(byte* @src, byte* @dst, int @num_pixels, byte* @palette) - { - sws_convertPalette8ToPacked24_fptr(@src, @dst, @num_pixels, @palette); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void sws_convertPalette8ToPacked32_delegate(byte* @src, byte* @dst, int @num_pixels, byte* @palette); - private static sws_convertPalette8ToPacked32_delegate sws_convertPalette8ToPacked32_fptr = (byte* @src, byte* @dst, int @num_pixels, byte* @palette) => - { - sws_convertPalette8ToPacked32_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_convertPalette8ToPacked32"); - if (sws_convertPalette8ToPacked32_fptr == null) - { - sws_convertPalette8ToPacked32_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_convertPalette8ToPacked32")); - }; - } - sws_convertPalette8ToPacked32_fptr(@src, @dst, @num_pixels, @palette); - }; - /// Convert an 8-bit paletted frame into a frame with a color depth of 32 bits. - /// source frame buffer - /// destination frame buffer - /// number of pixels to convert - /// array with [256] entries, which must match color arrangement (RGB or BGR) of src - public static void sws_convertPalette8ToPacked32(byte* @src, byte* @dst, int @num_pixels, byte* @palette) - { - sws_convertPalette8ToPacked32_fptr(@src, @dst, @num_pixels, @palette); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void sws_frame_end_delegate(SwsContext* @c); - private static sws_frame_end_delegate sws_frame_end_fptr = (SwsContext* @c) => - { - sws_frame_end_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_frame_end"); - if (sws_frame_end_fptr == null) - { - sws_frame_end_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_frame_end")); - }; - } - sws_frame_end_fptr(@c); - }; - /// Finish the scaling process for a pair of source/destination frames previously submitted with sws_frame_start(). Must be called after all sws_send_slice() and sws_receive_slice() calls are done, before any new sws_frame_start() calls. - public static void sws_frame_end(SwsContext* @c) - { - sws_frame_end_fptr(@c); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int sws_frame_start_delegate(SwsContext* @c, AVFrame* @dst, AVFrame* @src); - private static sws_frame_start_delegate sws_frame_start_fptr = (SwsContext* @c, AVFrame* @dst, AVFrame* @src) => - { - sws_frame_start_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_frame_start"); - if (sws_frame_start_fptr == null) - { - sws_frame_start_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_frame_start")); - }; - } - return sws_frame_start_fptr(@c, @dst, @src); - }; - /// Initialize the scaling process for a given pair of source/destination frames. Must be called before any calls to sws_send_slice() and sws_receive_slice(). - /// The destination frame. - /// The source frame. The data buffers must be allocated, but the frame data does not have to be ready at this point. Data availability is then signalled by sws_send_slice(). - /// 0 on success, a negative AVERROR code on failure - public static int sws_frame_start(SwsContext* @c, AVFrame* @dst, AVFrame* @src) - { - return sws_frame_start_fptr(@c, @dst, @src); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void sws_freeContext_delegate(SwsContext* @swsContext); - private static sws_freeContext_delegate sws_freeContext_fptr = (SwsContext* @swsContext) => - { - sws_freeContext_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_freeContext"); - if (sws_freeContext_fptr == null) - { - sws_freeContext_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_freeContext")); - }; - } - sws_freeContext_fptr(@swsContext); - }; - /// Free the swscaler context swsContext. If swsContext is NULL, then does nothing. - public static void sws_freeContext(SwsContext* @swsContext) - { - sws_freeContext_fptr(@swsContext); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void sws_freeFilter_delegate(SwsFilter* @filter); - private static sws_freeFilter_delegate sws_freeFilter_fptr = (SwsFilter* @filter) => - { - sws_freeFilter_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_freeFilter"); - if (sws_freeFilter_fptr == null) - { - sws_freeFilter_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_freeFilter")); - }; - } - sws_freeFilter_fptr(@filter); - }; - public static void sws_freeFilter(SwsFilter* @filter) - { - sws_freeFilter_fptr(@filter); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void sws_freeVec_delegate(SwsVector* @a); - private static sws_freeVec_delegate sws_freeVec_fptr = (SwsVector* @a) => - { - sws_freeVec_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_freeVec"); - if (sws_freeVec_fptr == null) - { - sws_freeVec_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_freeVec")); - }; - } - sws_freeVec_fptr(@a); - }; - public static void sws_freeVec(SwsVector* @a) - { - sws_freeVec_fptr(@a); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate AVClass* sws_get_class_delegate(); - private static sws_get_class_delegate sws_get_class_fptr = () => - { - sws_get_class_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_get_class"); - if (sws_get_class_fptr == null) - { - sws_get_class_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_get_class")); - }; - } - return sws_get_class_fptr(); - }; - /// Get the AVClass for swsContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. - public static AVClass* sws_get_class() - { - return sws_get_class_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate SwsContext* sws_getCachedContext_delegate(SwsContext* @context, int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param); - private static sws_getCachedContext_delegate sws_getCachedContext_fptr = (SwsContext* @context, int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param) => - { - sws_getCachedContext_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_getCachedContext"); - if (sws_getCachedContext_fptr == null) - { - sws_getCachedContext_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_getCachedContext")); - }; - } - return sws_getCachedContext_fptr(@context, @srcW, @srcH, @srcFormat, @dstW, @dstH, @dstFormat, @flags, @srcFilter, @dstFilter, @param); - }; - /// Check if context can be reused, otherwise reallocate a new one. - public static SwsContext* sws_getCachedContext(SwsContext* @context, int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param) - { - return sws_getCachedContext_fptr(@context, @srcW, @srcH, @srcFormat, @dstW, @dstH, @dstFormat, @flags, @srcFilter, @dstFilter, @param); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int* sws_getCoefficients_delegate(int @colorspace); - private static sws_getCoefficients_delegate sws_getCoefficients_fptr = (int @colorspace) => - { - sws_getCoefficients_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_getCoefficients"); - if (sws_getCoefficients_fptr == null) - { - sws_getCoefficients_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_getCoefficients")); - }; - } - return sws_getCoefficients_fptr(@colorspace); - }; - /// Return a pointer to yuv<->rgb coefficients for the given colorspace suitable for sws_setColorspaceDetails(). - /// One of the SWS_CS_* macros. If invalid, SWS_CS_DEFAULT is used. - public static int* sws_getCoefficients(int @colorspace) - { - return sws_getCoefficients_fptr(@colorspace); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int sws_getColorspaceDetails_delegate(SwsContext* @c, int** @inv_table, int* @srcRange, int** @table, int* @dstRange, int* @brightness, int* @contrast, int* @saturation); - private static sws_getColorspaceDetails_delegate sws_getColorspaceDetails_fptr = (SwsContext* @c, int** @inv_table, int* @srcRange, int** @table, int* @dstRange, int* @brightness, int* @contrast, int* @saturation) => - { - sws_getColorspaceDetails_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_getColorspaceDetails"); - if (sws_getColorspaceDetails_fptr == null) - { - sws_getColorspaceDetails_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_getColorspaceDetails")); - }; - } - return sws_getColorspaceDetails_fptr(@c, @inv_table, @srcRange, @table, @dstRange, @brightness, @contrast, @saturation); - }; - /// #if LIBSWSCALE_VERSION_MAJOR > 6 - /// negative error code on error, non negative otherwise #else - public static int sws_getColorspaceDetails(SwsContext* @c, int** @inv_table, int* @srcRange, int** @table, int* @dstRange, int* @brightness, int* @contrast, int* @saturation) - { - return sws_getColorspaceDetails_fptr(@c, @inv_table, @srcRange, @table, @dstRange, @brightness, @contrast, @saturation); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate SwsContext* sws_getContext_delegate(int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param); - private static sws_getContext_delegate sws_getContext_fptr = (int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param) => - { - sws_getContext_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_getContext"); - if (sws_getContext_fptr == null) - { - sws_getContext_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_getContext")); - }; - } - return sws_getContext_fptr(@srcW, @srcH, @srcFormat, @dstW, @dstH, @dstFormat, @flags, @srcFilter, @dstFilter, @param); - }; - /// Allocate and return an SwsContext. You need it to perform scaling/conversion operations using sws_scale(). - /// the width of the source image - /// the height of the source image - /// the source image format - /// the width of the destination image - /// the height of the destination image - /// the destination image format - /// specify which algorithm and options to use for rescaling - /// extra parameters to tune the used scaler For SWS_BICUBIC param[0] and [1] tune the shape of the basis function, param[0] tunes f(1) and param[1] f´(1) For SWS_GAUSS param[0] tunes the exponent and thus cutoff frequency For SWS_LANCZOS param[0] tunes the width of the window function - /// a pointer to an allocated context, or NULL in case of error - public static SwsContext* sws_getContext(int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param) - { - return sws_getContext_fptr(@srcW, @srcH, @srcFormat, @dstW, @dstH, @dstFormat, @flags, @srcFilter, @dstFilter, @param); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate SwsFilter* sws_getDefaultFilter_delegate(float @lumaGBlur, float @chromaGBlur, float @lumaSharpen, float @chromaSharpen, float @chromaHShift, float @chromaVShift, int @verbose); - private static sws_getDefaultFilter_delegate sws_getDefaultFilter_fptr = (float @lumaGBlur, float @chromaGBlur, float @lumaSharpen, float @chromaSharpen, float @chromaHShift, float @chromaVShift, int @verbose) => - { - sws_getDefaultFilter_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_getDefaultFilter"); - if (sws_getDefaultFilter_fptr == null) - { - sws_getDefaultFilter_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_getDefaultFilter")); - }; - } - return sws_getDefaultFilter_fptr(@lumaGBlur, @chromaGBlur, @lumaSharpen, @chromaSharpen, @chromaHShift, @chromaVShift, @verbose); - }; - public static SwsFilter* sws_getDefaultFilter(float @lumaGBlur, float @chromaGBlur, float @lumaSharpen, float @chromaSharpen, float @chromaHShift, float @chromaVShift, int @verbose) - { - return sws_getDefaultFilter_fptr(@lumaGBlur, @chromaGBlur, @lumaSharpen, @chromaSharpen, @chromaHShift, @chromaVShift, @verbose); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate SwsVector* sws_getGaussianVec_delegate(double @variance, double @quality); - private static sws_getGaussianVec_delegate sws_getGaussianVec_fptr = (double @variance, double @quality) => - { - sws_getGaussianVec_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_getGaussianVec"); - if (sws_getGaussianVec_fptr == null) - { - sws_getGaussianVec_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_getGaussianVec")); - }; - } - return sws_getGaussianVec_fptr(@variance, @quality); - }; - /// Return a normalized Gaussian curve used to filter stuff quality = 3 is high quality, lower is lower quality. - public static SwsVector* sws_getGaussianVec(double @variance, double @quality) - { - return sws_getGaussianVec_fptr(@variance, @quality); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int sws_init_context_delegate(SwsContext* @sws_context, SwsFilter* @srcFilter, SwsFilter* @dstFilter); - private static sws_init_context_delegate sws_init_context_fptr = (SwsContext* @sws_context, SwsFilter* @srcFilter, SwsFilter* @dstFilter) => - { - sws_init_context_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_init_context"); - if (sws_init_context_fptr == null) - { - sws_init_context_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_init_context")); - }; - } - return sws_init_context_fptr(@sws_context, @srcFilter, @dstFilter); - }; - /// Initialize the swscaler context sws_context. - /// zero or positive value on success, a negative value on error - public static int sws_init_context(SwsContext* @sws_context, SwsFilter* @srcFilter, SwsFilter* @dstFilter) - { - return sws_init_context_fptr(@sws_context, @srcFilter, @dstFilter); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int sws_isSupportedEndiannessConversion_delegate(AVPixelFormat @pix_fmt); - private static sws_isSupportedEndiannessConversion_delegate sws_isSupportedEndiannessConversion_fptr = (AVPixelFormat @pix_fmt) => - { - sws_isSupportedEndiannessConversion_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_isSupportedEndiannessConversion"); - if (sws_isSupportedEndiannessConversion_fptr == null) - { - sws_isSupportedEndiannessConversion_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_isSupportedEndiannessConversion")); - }; - } - return sws_isSupportedEndiannessConversion_fptr(@pix_fmt); - }; - /// Returns a positive value if an endianness conversion for pix_fmt is supported, 0 otherwise. - /// the pixel format - /// a positive value if an endianness conversion for pix_fmt is supported, 0 otherwise. - public static int sws_isSupportedEndiannessConversion(AVPixelFormat @pix_fmt) - { - return sws_isSupportedEndiannessConversion_fptr(@pix_fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int sws_isSupportedInput_delegate(AVPixelFormat @pix_fmt); - private static sws_isSupportedInput_delegate sws_isSupportedInput_fptr = (AVPixelFormat @pix_fmt) => - { - sws_isSupportedInput_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_isSupportedInput"); - if (sws_isSupportedInput_fptr == null) - { - sws_isSupportedInput_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_isSupportedInput")); - }; - } - return sws_isSupportedInput_fptr(@pix_fmt); - }; - /// Return a positive value if pix_fmt is a supported input format, 0 otherwise. - public static int sws_isSupportedInput(AVPixelFormat @pix_fmt) - { - return sws_isSupportedInput_fptr(@pix_fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int sws_isSupportedOutput_delegate(AVPixelFormat @pix_fmt); - private static sws_isSupportedOutput_delegate sws_isSupportedOutput_fptr = (AVPixelFormat @pix_fmt) => - { - sws_isSupportedOutput_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_isSupportedOutput"); - if (sws_isSupportedOutput_fptr == null) - { - sws_isSupportedOutput_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_isSupportedOutput")); - }; - } - return sws_isSupportedOutput_fptr(@pix_fmt); - }; - /// Return a positive value if pix_fmt is a supported output format, 0 otherwise. - public static int sws_isSupportedOutput(AVPixelFormat @pix_fmt) - { - return sws_isSupportedOutput_fptr(@pix_fmt); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void sws_normalizeVec_delegate(SwsVector* @a, double @height); - private static sws_normalizeVec_delegate sws_normalizeVec_fptr = (SwsVector* @a, double @height) => - { - sws_normalizeVec_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_normalizeVec"); - if (sws_normalizeVec_fptr == null) - { - sws_normalizeVec_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_normalizeVec")); - }; - } - sws_normalizeVec_fptr(@a, @height); - }; - /// Scale all the coefficients of a so that their sum equals height. - public static void sws_normalizeVec(SwsVector* @a, double @height) - { - sws_normalizeVec_fptr(@a, @height); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int sws_receive_slice_delegate(SwsContext* @c, uint @slice_start, uint @slice_height); - private static sws_receive_slice_delegate sws_receive_slice_fptr = (SwsContext* @c, uint @slice_start, uint @slice_height) => - { - sws_receive_slice_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_receive_slice"); - if (sws_receive_slice_fptr == null) - { - sws_receive_slice_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_receive_slice")); - }; - } - return sws_receive_slice_fptr(@c, @slice_start, @slice_height); - }; - /// Request a horizontal slice of the output data to be written into the frame previously provided to sws_frame_start(). - /// first row of the slice; must be a multiple of sws_receive_slice_alignment() - /// number of rows in the slice; must be a multiple of sws_receive_slice_alignment(), except for the last slice (i.e. when slice_start+slice_height is equal to output frame height) - /// a non-negative number if the data was successfully written into the output AVERROR(EAGAIN) if more input data needs to be provided before the output can be produced another negative AVERROR code on other kinds of scaling failure - public static int sws_receive_slice(SwsContext* @c, uint @slice_start, uint @slice_height) - { - return sws_receive_slice_fptr(@c, @slice_start, @slice_height); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint sws_receive_slice_alignment_delegate(SwsContext* @c); - private static sws_receive_slice_alignment_delegate sws_receive_slice_alignment_fptr = (SwsContext* @c) => - { - sws_receive_slice_alignment_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_receive_slice_alignment"); - if (sws_receive_slice_alignment_fptr == null) - { - sws_receive_slice_alignment_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_receive_slice_alignment")); - }; - } - return sws_receive_slice_alignment_fptr(@c); - }; - /// Returns alignment required for output slices requested with sws_receive_slice(). Slice offsets and sizes passed to sws_receive_slice() must be multiples of the value returned from this function. - /// alignment required for output slices requested with sws_receive_slice(). Slice offsets and sizes passed to sws_receive_slice() must be multiples of the value returned from this function. - public static uint sws_receive_slice_alignment(SwsContext* @c) - { - return sws_receive_slice_alignment_fptr(@c); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int sws_scale_delegate(SwsContext* @c, byte*[] @srcSlice, int[] @srcStride, int @srcSliceY, int @srcSliceH, byte*[] @dst, int[] @dstStride); - private static sws_scale_delegate sws_scale_fptr = (SwsContext* @c, byte*[] @srcSlice, int[] @srcStride, int @srcSliceY, int @srcSliceH, byte*[] @dst, int[] @dstStride) => - { - sws_scale_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_scale"); - if (sws_scale_fptr == null) - { - sws_scale_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_scale")); - }; - } - return sws_scale_fptr(@c, @srcSlice, @srcStride, @srcSliceY, @srcSliceH, @dst, @dstStride); - }; - /// Scale the image slice in srcSlice and put the resulting scaled slice in the image in dst. A slice is a sequence of consecutive rows in an image. - /// the scaling context previously created with sws_getContext() - /// the array containing the pointers to the planes of the source slice - /// the array containing the strides for each plane of the source image - /// the position in the source image of the slice to process, that is the number (counted starting from zero) in the image of the first row of the slice - /// the height of the source slice, that is the number of rows in the slice - /// the array containing the pointers to the planes of the destination image - /// the array containing the strides for each plane of the destination image - /// the height of the output slice - public static int sws_scale(SwsContext* @c, byte*[] @srcSlice, int[] @srcStride, int @srcSliceY, int @srcSliceH, byte*[] @dst, int[] @dstStride) - { - return sws_scale_fptr(@c, @srcSlice, @srcStride, @srcSliceY, @srcSliceH, @dst, @dstStride); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int sws_scale_frame_delegate(SwsContext* @c, AVFrame* @dst, AVFrame* @src); - private static sws_scale_frame_delegate sws_scale_frame_fptr = (SwsContext* @c, AVFrame* @dst, AVFrame* @src) => - { - sws_scale_frame_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_scale_frame"); - if (sws_scale_frame_fptr == null) - { - sws_scale_frame_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_scale_frame")); - }; - } - return sws_scale_frame_fptr(@c, @dst, @src); - }; - /// Scale source data from src and write the output to dst. - /// The destination frame. See documentation for sws_frame_start() for more details. - /// The source frame. - /// 0 on success, a negative AVERROR code on failure - public static int sws_scale_frame(SwsContext* @c, AVFrame* @dst, AVFrame* @src) - { - return sws_scale_frame_fptr(@c, @dst, @src); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate void sws_scaleVec_delegate(SwsVector* @a, double @scalar); - private static sws_scaleVec_delegate sws_scaleVec_fptr = (SwsVector* @a, double @scalar) => - { - sws_scaleVec_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_scaleVec"); - if (sws_scaleVec_fptr == null) - { - sws_scaleVec_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_scaleVec")); - }; - } - sws_scaleVec_fptr(@a, @scalar); - }; - /// Scale all the coefficients of a by the scalar value. - public static void sws_scaleVec(SwsVector* @a, double @scalar) - { - sws_scaleVec_fptr(@a, @scalar); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int sws_send_slice_delegate(SwsContext* @c, uint @slice_start, uint @slice_height); - private static sws_send_slice_delegate sws_send_slice_fptr = (SwsContext* @c, uint @slice_start, uint @slice_height) => - { - sws_send_slice_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_send_slice"); - if (sws_send_slice_fptr == null) - { - sws_send_slice_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_send_slice")); - }; - } - return sws_send_slice_fptr(@c, @slice_start, @slice_height); - }; - /// Indicate that a horizontal slice of input data is available in the source frame previously provided to sws_frame_start(). The slices may be provided in any order, but may not overlap. For vertically subsampled pixel formats, the slices must be aligned according to subsampling. - /// first row of the slice - /// number of rows in the slice - /// a non-negative number on success, a negative AVERROR code on failure. - public static int sws_send_slice(SwsContext* @c, uint @slice_start, uint @slice_height) - { - return sws_send_slice_fptr(@c, @slice_start, @slice_height); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate int sws_setColorspaceDetails_delegate(SwsContext* @c, int_array4 @inv_table, int @srcRange, int_array4 @table, int @dstRange, int @brightness, int @contrast, int @saturation); - private static sws_setColorspaceDetails_delegate sws_setColorspaceDetails_fptr = (SwsContext* @c, int_array4 @inv_table, int @srcRange, int_array4 @table, int @dstRange, int @brightness, int @contrast, int @saturation) => - { - sws_setColorspaceDetails_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "sws_setColorspaceDetails"); - if (sws_setColorspaceDetails_fptr == null) - { - sws_setColorspaceDetails_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "sws_setColorspaceDetails")); - }; - } - return sws_setColorspaceDetails_fptr(@c, @inv_table, @srcRange, @table, @dstRange, @brightness, @contrast, @saturation); - }; - /// Returns negative error code on error, non negative otherwise #else Returns -1 if not supported #endif - /// the yuv2rgb coefficients describing the input yuv space, normally ff_yuv2rgb_coeffs[x] - /// flag indicating the while-black range of the input (1=jpeg / 0=mpeg) - /// the yuv2rgb coefficients describing the output yuv space, normally ff_yuv2rgb_coeffs[x] - /// flag indicating the while-black range of the output (1=jpeg / 0=mpeg) - /// 16.16 fixed point brightness correction - /// 16.16 fixed point contrast correction - /// 16.16 fixed point saturation correction #if LIBSWSCALE_VERSION_MAJOR > 6 - /// negative error code on error, non negative otherwise #else - public static int sws_setColorspaceDetails(SwsContext* @c, int_array4 @inv_table, int @srcRange, int_array4 @table, int @dstRange, int @brightness, int @contrast, int @saturation) - { - return sws_setColorspaceDetails_fptr(@c, @inv_table, @srcRange, @table, @dstRange, @brightness, @contrast, @saturation); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string swscale_configuration_delegate(); - private static swscale_configuration_delegate swscale_configuration_fptr = () => - { - swscale_configuration_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "swscale_configuration"); - if (swscale_configuration_fptr == null) - { - swscale_configuration_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swscale_configuration")); - }; - } - return swscale_configuration_fptr(); - }; - /// Return the libswscale build-time configuration. - public static string swscale_configuration() - { - return swscale_configuration_fptr(); - } - - - [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate string swscale_license_delegate(); - private static swscale_license_delegate swscale_license_fptr = () => - { - swscale_license_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "swscale_license"); - if (swscale_license_fptr == null) - { - swscale_license_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swscale_license")); - }; - } - return swscale_license_fptr(); - }; - /// Return the libswscale license. - public static string swscale_license() - { - return swscale_license_fptr(); - } - - - [UnmanagedFunctionPointer(CallingConvention.Cdecl, CharSet = CharSet.Ansi)] - private delegate uint swscale_version_delegate(); - private static swscale_version_delegate swscale_version_fptr = () => - { - swscale_version_fptr = GetFunctionDelegate(GetOrLoadLibrary("swscale"), "swscale_version"); - if (swscale_version_fptr == null) - { - swscale_version_fptr = delegate - { - throw new PlatformNotSupportedException(string.Format(PlatformNotSupportedMessageFormat, "swscale_version")); - }; - } - return swscale_version_fptr(); - }; - /// Color conversion and scaling library. - public static uint swscale_version() - { - return swscale_version_fptr(); - } - - - } -} diff --git a/FFmpeg.AutoGen/FFmpeg.functions.inline.g.cs b/FFmpeg.AutoGen/FFmpeg.functions.inline.g.cs deleted file mode 100644 index cdfa0027..00000000 --- a/FFmpeg.AutoGen/FFmpeg.functions.inline.g.cs +++ /dev/null @@ -1,403 +0,0 @@ -using System; -using System.Runtime.InteropServices; - -namespace FFmpeg.AutoGen -{ - public unsafe static partial class ffmpeg - { - /// Compute ceil(log2(x)). - /// value used to compute ceil(log2(x)) - /// computed ceiling of log2(x) - public static int av_ceil_log2_c(int @x) - { - return av_log2((uint)(x - 1U) << 1); - } - // original body hash: Y9QGw919/NB5ltczSPmZu5WZt+BfR1GGQ58ULgOxiNo= - - - /// Clip a signed integer value into the amin-amax range. - /// value to clip - /// minimum value of the clip range - /// maximum value of the clip range - /// clipped value - public static int av_clip_c(int @a, int @amin, int @amax) - { - if (a < amin) - return amin; - else if (a > amax) - return amax; - else - return a; - } - // original body hash: FGSX8EvLhMgYqP9+0z1+Clej4HxjpENDPDX7uAYLx6k= - - - /// Clip a signed integer value into the -32768,32767 range. - /// value to clip - /// clipped value - public static short av_clip_int16_c(int @a) - { - if (((a + 32768U) & ~65535) != 0) - return (short)((a >> 31) ^ 32767); - else - return (short)a; - } - // original body hash: l7ot2X+8YIG7Ze9ecaMTap87pGl9Q5kffGq1e9dS9Es= - - - /// Clip a signed integer value into the -128,127 range. - /// value to clip - /// clipped value - public static sbyte av_clip_int8_c(int @a) - { - if (((a + 128U) & ~255) != 0) - return (sbyte)((a >> 31) ^ 127); - else - return (sbyte)a; - } - // original body hash: 959D6ojD8+Bo9o7pGvHcWTnCDg5Ax0o328RGYDIiUvo= - - - /// Clip a signed integer into the -(2^p),(2^p-1) range. - /// value to clip - /// bit position to clip at - /// clipped value - public static int av_clip_intp2_c(int @a, int @p) - { - if ((((uint)a + (1 << p)) & ~((2 << p) - 1)) != 0) - return (a >> 31) ^ ((1 << p) - 1); - else - return a; - } - // original body hash: /qM73AkEE6w4/NOhpvKw1SVRZPxbN61+Yqc3i9L/2bM= - - - /// Clip a signed integer value into the 0-65535 range. - /// value to clip - /// clipped value - public static ushort av_clip_uint16_c(int @a) - { - if ((a & (~65535)) != 0) - return (ushort)((~a) >> 31); - else - return (ushort)a; - } - // original body hash: nI5Vkw30nAjS2NmNSdCSnHeAUcY47XT0lnrnsUK/bJ4= - - - /// Clip a signed integer value into the 0-255 range. - /// value to clip - /// clipped value - public static byte av_clip_uint8_c(int @a) - { - if ((a & (~255)) != 0) - return (byte)((~a) >> 31); - else - return (byte)a; - } - // original body hash: 32OGGgXBFRL7EcU8DizK9KbIFfU356+5hgUEyAOjIUY= - - - /// Clip a signed integer to an unsigned power of two range. - /// value to clip - /// bit position to clip at - /// clipped value - public static uint av_clip_uintp2_c(int @a, int @p) - { - if ((a & ~((1 << p) - 1)) != 0) - return (uint)((~a) >> 31 & ((1 << p) - 1)); - else - return (uint)a; - } - // original body hash: 01v+7HjG6Id/YAdTCeWBkPwvakfGiCosPM6u5MXI8pU= - - - /// Clip a signed 64bit integer value into the amin-amax range. - /// value to clip - /// minimum value of the clip range - /// maximum value of the clip range - /// clipped value - public static long av_clip64_c(long @a, long @amin, long @amax) - { - if (a < amin) - return amin; - else if (a > amax) - return amax; - else - return a; - } - // original body hash: FGSX8EvLhMgYqP9+0z1+Clej4HxjpENDPDX7uAYLx6k= - - - /// Clip a double value into the amin-amax range. If a is nan or -inf amin will be returned. If a is +inf amax will be returned. - /// value to clip - /// minimum value of the clip range - /// maximum value of the clip range - /// clipped value - public static double av_clipd_c(double @a, double @amin, double @amax) -{ - return ((((a) > (amin) ? (a) : (amin))) > (amax) ? (amax) : (((a) > (amin) ? (a) : (amin)))); -} - // original body hash: 3g76qefPWCYqXraY2vYdxoH58/EKn5EeR9v7cGEBM6Y= - - - /// Clip a float value into the amin-amax range. If a is nan or -inf amin will be returned. If a is +inf amax will be returned. - /// value to clip - /// minimum value of the clip range - /// maximum value of the clip range - /// clipped value - public static float av_clipf_c(float @a, float @amin, float @amax) -{ - return ((((a) > (amin) ? (a) : (amin))) > (amax) ? (amax) : (((a) > (amin) ? (a) : (amin)))); -} - // original body hash: 3g76qefPWCYqXraY2vYdxoH58/EKn5EeR9v7cGEBM6Y= - - - /// Clip a signed 64-bit integer value into the -2147483648,2147483647 range. - /// value to clip - /// clipped value - public static int av_clipl_int32_c(long @a) - { - if ((((ulong)a + 2147483648UL) & ~(4294967295UL)) != 0) - return (int)((a >> 63) ^ 2147483647); - else - return (int)a; - } - // original body hash: 00dWv9FNYsEeRh1lPjYlSw3TQiOlthet3Kyi6z91Hbo= - - - /// Compare two rationals. - /// First rational - /// Second rational - /// One of the following values: - 0 if `a == b` - 1 if `a > b` - -1 if `a < b` - `INT_MIN` if one of the values is of the form `0 / 0` - public static int av_cmp_q(AVRational @a, AVRational @b) - { - long tmp = a.num * (long)b.den - b.num * (long)a.den; - if (tmp != 0) - return (int)((tmp ^ a.den ^ b.den) >> 63) | 1; - else if (b.den != 0 && a.den != 0) - return 0; - else if (a.num != 0 && b.num != 0) - return (a.num >> 31) - (b.num >> 31); - else - return (-2147483647 - 1); - } - // original body hash: M+RGb5gXGdDjfY/gK5ZeCYeYrZAxjTXZA9+XVu0I66Q= - - - /// Reinterpret a double as a 64-bit integer. - public static ulong av_double2int(double @f) - { - return (ulong)@f; - } - // original body hash: 2HuHK8WLchm3u+cK6H4QWhflx2JqfewtaSpj2Cwfi8M= - - - /// Reinterpret a float as a 32-bit integer. - public static uint av_float2int(float @f) - { - return (uint)@f; - } - // original body hash: uBvsHd8EeFnxDvSdDE1+k5Um29kCuf0aEJhAvDy0wZk= - - - /// Reinterpret a 64-bit integer as a double. - public static double av_int2double(ulong @i) - { - return (double)@i; - } - // original body hash: iFt3hVHTpF9jjqIGAAf/c7FrGfenOXGxdsyMjmrbwvw= - - - /// Reinterpret a 32-bit integer as a float. - public static float av_int2float(uint @i) - { - return (float)@i; - } - // original body hash: wLGFPpW+aIvxW79y6BVY1LKz/j7yc3BdiaJ7mD4oQmw= - - - /// Invert a rational. - /// value - /// 1 / q - public static AVRational av_inv_q(AVRational @q) - { - var r = new AVRational { @num = q.den, @den = q.num }; - return r; - } - // original body hash: sXbO4D7vmayAx56EFqz9C0kakcSPSryJHdk0hr0MOFY= - - - /// Fill the provided buffer with a string containing an error string corresponding to the AVERROR code errnum. - /// a buffer - /// size in bytes of errbuf - /// error code to describe - /// the buffer in input, filled with the error description - public static byte* av_make_error_string(byte* @errbuf, ulong @errbuf_size, int @errnum) - { - av_strerror(errnum, errbuf, errbuf_size); - return errbuf; - } - // original body hash: DRHQHyLQNo9pTxA+wRw4zVDrC7Md1u3JWawQX0BVkqE= - - - /// Create an AVRational. - public static AVRational av_make_q(int @num, int @den) - { - var r = new AVRational { @num = num, @den = den }; - return r; - } - // original body hash: IAPYNNcg3GX0PGxINeLQhb41dH921lPVKcnqxCk7ERA= - - - /// Clear high bits from an unsigned integer starting with specific bit position - /// value to clip - /// bit position to clip at - /// clipped value - public static uint av_mod_uintp2_c(uint @a, uint @p) - { - return a & (uint)((1 << (int)p) - 1); - } - // original body hash: ncn4Okxr9Nas1g/qCfpRHKtywuNmJuf3UED+o3wjadc= - - - public static int av_parity_c(uint @v) - { - return av_popcount_c(v) & 1; - } - // original body hash: Hsrq5CWkNvuNTnqES92ZJYVYpKXFwosrZNja/oaUd0s= - - - /// Count number of bits set to one in x - /// value to count bits of - /// the number of bits set to one in x - public static int av_popcount_c(uint @x) - { - x -= (x >> 1) & 1431655765; - x = (x & 858993459) + ((x >> 2) & 858993459); - x = (x + (x >> 4)) & 252645135; - x += x >> 8; - return (int)((x + (x >> 16)) & 63); - } - // original body hash: 6EqV8Ll7t/MGINV9Nh3TSEbNyUYeskm7HucpU0SAkgg= - - - /// Count number of bits set to one in x - /// value to count bits of - /// the number of bits set to one in x - public static int av_popcount64_c(ulong @x) - { - return av_popcount_c((uint)x) + av_popcount_c((uint)(x >> 32)); - } - // original body hash: 4wjPAKU9R0yS6OI8Y9h3L6de+uXt/lBm+zX7t5Ch18k= - - - /// Convert an AVRational to a `double`. - /// AVRational to convert - /// `a` in floating-point form - public static double av_q2d(AVRational @a) - { - return a.num / (double)a.den; - } - // original body hash: j4R2BS8nF6czcUDVk5kKi9nLEdlTI/NRDYtnc1KFeyE= - - - /// Add two signed 32-bit values with saturation. - /// one value - /// another value - /// sum with signed saturation - public static int av_sat_add32_c(int @a, int @b) - { - return av_clipl_int32_c((long)a + b); - } - // original body hash: GAAy4GsS2n+9kJ/8hzuONPUOGIsiOj7PvXnLHUVrimY= - - - /// Add two signed 64-bit values with saturation. - /// one value - /// another value - /// sum with signed saturation - public static long av_sat_add64_c(long @a, long @b) - { - try - { - return @a + @b; - } - catch (OverflowException) - { - return ((double)@a +(double)@b) > 0d ? long.MaxValue : long.MinValue; - } - } - // original body hash: qeup76rp1rjakhMYQJWWEYIkpgscUcDfzDIrjyqk5iM= - - - /// Add a doubled value to another value with saturation at both stages. - /// first value - /// value doubled and added to a - /// sum sat(a + sat(2*b)) with signed saturation - public static int av_sat_dadd32_c(int @a, int @b) - { - return av_sat_add32_c(a, av_sat_add32_c(b, b)); - } - // original body hash: Kbha6XFULk7dxB6zc5WRwoPczQVN7HBcNs9Hjlj/Caw= - - - /// Subtract a doubled value from another value with saturation at both stages. - /// first value - /// value doubled and subtracted from a - /// difference sat(a - sat(2*b)) with signed saturation - public static int av_sat_dsub32_c(int @a, int @b) - { - return av_sat_sub32_c(a, av_sat_add32_c(b, b)); - } - // original body hash: ypu4i+30n3CeMxdL8pq7XDYAFBi1N5d2mkIT6zQ1bO0= - - - /// Subtract two signed 32-bit values with saturation. - /// one value - /// another value - /// difference with signed saturation - public static int av_sat_sub32_c(int @a, int @b) - { - return av_clipl_int32_c((long)a - b); - } - // original body hash: /tgXI2zbIgliqOwZbpnq7jSiVj0N70RjBFsbkIkWhsM= - - - /// Subtract two signed 64-bit values with saturation. - /// one value - /// another value - /// difference with signed saturation - public static long av_sat_sub64_c(long @a, long @b) - { - try - { - return @a - @b; - } - catch (OverflowException) - { - return ((double) @a - (double) @b) > 0d ? long.MaxValue : long.MinValue; - } - } - // original body hash: 6YrSxDrYVG1ac1wlCiXKMhTwj7Kx6eym/YtspKusrGk= - - - /// Return x default pointer in case p is NULL. - public static void* av_x_if_null(void* @p, void* @x) - { - return (void*)(p != null ? p : x); - } - // original body hash: zOY924eIk3VeTSNb9XcE2Yw8aZ4/jlzQSfP06k5n0nU= - - - /// ftell() equivalent for AVIOContext. - /// position or AVERROR. - public static long avio_tell(AVIOContext* @s) - { - return avio_seek(s, 0, 1); - } - // original body hash: o18c3ypeh9EsmYaplTel2ssgM2PZKTTDfMjsqEopycw= - - - } -} diff --git a/FFmpeg.AutoGen/FFmpeg.libraries.g.cs b/FFmpeg.AutoGen/FFmpeg.libraries.g.cs deleted file mode 100644 index da8e9b0c..00000000 --- a/FFmpeg.AutoGen/FFmpeg.libraries.g.cs +++ /dev/null @@ -1,22 +0,0 @@ -using System; -using System.Runtime.InteropServices; - -namespace FFmpeg.AutoGen -{ - using System.Collections.Generic; - - public unsafe static partial class ffmpeg - { - public static Dictionary LibraryVersionMap = new Dictionary - { - {"avcodec", 59}, - {"avdevice", 59}, - {"avfilter", 8}, - {"avformat", 59}, - {"avutil", 57}, - {"postproc", 56}, - {"swresample", 4}, - {"swscale", 6}, - }; - } -} diff --git a/FFmpeg.AutoGen/FFmpeg.macros.g.cs b/FFmpeg.AutoGen/FFmpeg.macros.g.cs deleted file mode 100644 index 1214ac9c..00000000 --- a/FFmpeg.AutoGen/FFmpeg.macros.g.cs +++ /dev/null @@ -1,1691 +0,0 @@ -using System; -using System.Runtime.InteropServices; - -namespace FFmpeg.AutoGen -{ - public unsafe static partial class ffmpeg - { - /// _WIN32_WINNT = 0x602 - public const int _WIN32_WINNT = 0x602; - // public static attribute_deprecated = __declspec(deprecated); - // public static av_alias = __attribute__((may_alias)); - // public static av_alloc_size = (...); - // public static av_always_inline = __forceinline; - /// AV_BUFFER_FLAG_READONLY = (1 << 0) - public const int AV_BUFFER_FLAG_READONLY = 0x1 << 0x0; - /// AV_BUFFERSINK_FLAG_NO_REQUEST = 0x2 - public const int AV_BUFFERSINK_FLAG_NO_REQUEST = 0x2; - /// AV_BUFFERSINK_FLAG_PEEK = 0x1 - public const int AV_BUFFERSINK_FLAG_PEEK = 0x1; - // public static av_builtin_constant_p = __builtin_constant_p; - // public static av_ceil_log2 = av_ceil_log2_c; - // public static AV_CEIL_RSHIFT = (a,b) (!av_builtin_constant_p(b) ? -((-(a)) >> (b)) : ((a) + (1<<(b)) - 1) >> (b)); - /// AV_CH_BACK_CENTER = (1ULL << AV_CHAN_BACK_CENTER ) - public static readonly ulong AV_CH_BACK_CENTER = 0x1UL << 8; - /// AV_CH_BACK_LEFT = (1ULL << AV_CHAN_BACK_LEFT ) - public static readonly ulong AV_CH_BACK_LEFT = 0x1UL << 4; - /// AV_CH_BACK_RIGHT = (1ULL << AV_CHAN_BACK_RIGHT ) - public static readonly ulong AV_CH_BACK_RIGHT = 0x1UL << 5; - /// AV_CH_BOTTOM_FRONT_CENTER = (1ULL << AV_CHAN_BOTTOM_FRONT_CENTER ) - public static readonly ulong AV_CH_BOTTOM_FRONT_CENTER = 0x1UL << 38; - /// AV_CH_BOTTOM_FRONT_LEFT = (1ULL << AV_CHAN_BOTTOM_FRONT_LEFT ) - public static readonly ulong AV_CH_BOTTOM_FRONT_LEFT = 0x1UL << 39; - /// AV_CH_BOTTOM_FRONT_RIGHT = (1ULL << AV_CHAN_BOTTOM_FRONT_RIGHT ) - public static readonly ulong AV_CH_BOTTOM_FRONT_RIGHT = 0x1UL << 40; - /// AV_CH_FRONT_CENTER = (1ULL << AV_CHAN_FRONT_CENTER ) - public static readonly ulong AV_CH_FRONT_CENTER = 0x1UL << 2; - /// AV_CH_FRONT_LEFT = (1ULL << AV_CHAN_FRONT_LEFT ) - public static readonly ulong AV_CH_FRONT_LEFT = 0x1UL << 0; - /// AV_CH_FRONT_LEFT_OF_CENTER = (1ULL << AV_CHAN_FRONT_LEFT_OF_CENTER ) - public static readonly ulong AV_CH_FRONT_LEFT_OF_CENTER = 0x1UL << 6; - /// AV_CH_FRONT_RIGHT = (1ULL << AV_CHAN_FRONT_RIGHT ) - public static readonly ulong AV_CH_FRONT_RIGHT = 0x1UL << 1; - /// AV_CH_FRONT_RIGHT_OF_CENTER = (1ULL << AV_CHAN_FRONT_RIGHT_OF_CENTER) - public static readonly ulong AV_CH_FRONT_RIGHT_OF_CENTER = 0x1UL << 7; - /// AV_CH_LAYOUT_2_1 = (AV_CH_LAYOUT_STEREO|AV_CH_BACK_CENTER) - public static readonly ulong AV_CH_LAYOUT_2_1 = AV_CH_LAYOUT_STEREO | AV_CH_BACK_CENTER; - /// AV_CH_LAYOUT_2_2 = (AV_CH_LAYOUT_STEREO|AV_CH_SIDE_LEFT|AV_CH_SIDE_RIGHT) - public static readonly ulong AV_CH_LAYOUT_2_2 = AV_CH_LAYOUT_STEREO | AV_CH_SIDE_LEFT | AV_CH_SIDE_RIGHT; - /// AV_CH_LAYOUT_22POINT2 = (AV_CH_LAYOUT_5POINT1_BACK|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER|AV_CH_BACK_CENTER|AV_CH_LOW_FREQUENCY_2|AV_CH_SIDE_LEFT|AV_CH_SIDE_RIGHT|AV_CH_TOP_FRONT_LEFT|AV_CH_TOP_FRONT_RIGHT|AV_CH_TOP_FRONT_CENTER|AV_CH_TOP_CENTER|AV_CH_TOP_BACK_LEFT|AV_CH_TOP_BACK_RIGHT|AV_CH_TOP_SIDE_LEFT|AV_CH_TOP_SIDE_RIGHT|AV_CH_TOP_BACK_CENTER|AV_CH_BOTTOM_FRONT_CENTER|AV_CH_BOTTOM_FRONT_LEFT|AV_CH_BOTTOM_FRONT_RIGHT) - public static readonly ulong AV_CH_LAYOUT_22POINT2 = AV_CH_LAYOUT_5POINT1_BACK | AV_CH_FRONT_LEFT_OF_CENTER | AV_CH_FRONT_RIGHT_OF_CENTER | AV_CH_BACK_CENTER | AV_CH_LOW_FREQUENCY_2 | AV_CH_SIDE_LEFT | AV_CH_SIDE_RIGHT | AV_CH_TOP_FRONT_LEFT | AV_CH_TOP_FRONT_RIGHT | AV_CH_TOP_FRONT_CENTER | AV_CH_TOP_CENTER | AV_CH_TOP_BACK_LEFT | AV_CH_TOP_BACK_RIGHT | AV_CH_TOP_SIDE_LEFT | AV_CH_TOP_SIDE_RIGHT | AV_CH_TOP_BACK_CENTER | AV_CH_BOTTOM_FRONT_CENTER | AV_CH_BOTTOM_FRONT_LEFT | AV_CH_BOTTOM_FRONT_RIGHT; - /// AV_CH_LAYOUT_2POINT1 = (AV_CH_LAYOUT_STEREO|AV_CH_LOW_FREQUENCY) - public static readonly ulong AV_CH_LAYOUT_2POINT1 = AV_CH_LAYOUT_STEREO | AV_CH_LOW_FREQUENCY; - /// AV_CH_LAYOUT_3POINT1 = (AV_CH_LAYOUT_SURROUND|AV_CH_LOW_FREQUENCY) - public static readonly ulong AV_CH_LAYOUT_3POINT1 = AV_CH_LAYOUT_SURROUND | AV_CH_LOW_FREQUENCY; - /// AV_CH_LAYOUT_4POINT0 = (AV_CH_LAYOUT_SURROUND|AV_CH_BACK_CENTER) - public static readonly ulong AV_CH_LAYOUT_4POINT0 = AV_CH_LAYOUT_SURROUND | AV_CH_BACK_CENTER; - /// AV_CH_LAYOUT_4POINT1 = (AV_CH_LAYOUT_4POINT0|AV_CH_LOW_FREQUENCY) - public static readonly ulong AV_CH_LAYOUT_4POINT1 = AV_CH_LAYOUT_4POINT0 | AV_CH_LOW_FREQUENCY; - /// AV_CH_LAYOUT_5POINT0 = (AV_CH_LAYOUT_SURROUND|AV_CH_SIDE_LEFT|AV_CH_SIDE_RIGHT) - public static readonly ulong AV_CH_LAYOUT_5POINT0 = AV_CH_LAYOUT_SURROUND | AV_CH_SIDE_LEFT | AV_CH_SIDE_RIGHT; - /// AV_CH_LAYOUT_5POINT0_BACK = (AV_CH_LAYOUT_SURROUND|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT) - public static readonly ulong AV_CH_LAYOUT_5POINT0_BACK = AV_CH_LAYOUT_SURROUND | AV_CH_BACK_LEFT | AV_CH_BACK_RIGHT; - /// AV_CH_LAYOUT_5POINT1 = (AV_CH_LAYOUT_5POINT0|AV_CH_LOW_FREQUENCY) - public static readonly ulong AV_CH_LAYOUT_5POINT1 = AV_CH_LAYOUT_5POINT0 | AV_CH_LOW_FREQUENCY; - /// AV_CH_LAYOUT_5POINT1_BACK = (AV_CH_LAYOUT_5POINT0_BACK|AV_CH_LOW_FREQUENCY) - public static readonly ulong AV_CH_LAYOUT_5POINT1_BACK = AV_CH_LAYOUT_5POINT0_BACK | AV_CH_LOW_FREQUENCY; - /// AV_CH_LAYOUT_6POINT0 = (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_CENTER) - public static readonly ulong AV_CH_LAYOUT_6POINT0 = AV_CH_LAYOUT_5POINT0 | AV_CH_BACK_CENTER; - /// AV_CH_LAYOUT_6POINT0_FRONT = (AV_CH_LAYOUT_2_2|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER) - public static readonly ulong AV_CH_LAYOUT_6POINT0_FRONT = AV_CH_LAYOUT_2_2 | AV_CH_FRONT_LEFT_OF_CENTER | AV_CH_FRONT_RIGHT_OF_CENTER; - /// AV_CH_LAYOUT_6POINT1 = (AV_CH_LAYOUT_5POINT1|AV_CH_BACK_CENTER) - public static readonly ulong AV_CH_LAYOUT_6POINT1 = AV_CH_LAYOUT_5POINT1 | AV_CH_BACK_CENTER; - /// AV_CH_LAYOUT_6POINT1_BACK = (AV_CH_LAYOUT_5POINT1_BACK|AV_CH_BACK_CENTER) - public static readonly ulong AV_CH_LAYOUT_6POINT1_BACK = AV_CH_LAYOUT_5POINT1_BACK | AV_CH_BACK_CENTER; - /// AV_CH_LAYOUT_6POINT1_FRONT = (AV_CH_LAYOUT_6POINT0_FRONT|AV_CH_LOW_FREQUENCY) - public static readonly ulong AV_CH_LAYOUT_6POINT1_FRONT = AV_CH_LAYOUT_6POINT0_FRONT | AV_CH_LOW_FREQUENCY; - /// AV_CH_LAYOUT_7POINT0 = (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT) - public static readonly ulong AV_CH_LAYOUT_7POINT0 = AV_CH_LAYOUT_5POINT0 | AV_CH_BACK_LEFT | AV_CH_BACK_RIGHT; - /// AV_CH_LAYOUT_7POINT0_FRONT = (AV_CH_LAYOUT_5POINT0|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER) - public static readonly ulong AV_CH_LAYOUT_7POINT0_FRONT = AV_CH_LAYOUT_5POINT0 | AV_CH_FRONT_LEFT_OF_CENTER | AV_CH_FRONT_RIGHT_OF_CENTER; - /// AV_CH_LAYOUT_7POINT1 = (AV_CH_LAYOUT_5POINT1|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT) - public static readonly ulong AV_CH_LAYOUT_7POINT1 = AV_CH_LAYOUT_5POINT1 | AV_CH_BACK_LEFT | AV_CH_BACK_RIGHT; - /// AV_CH_LAYOUT_7POINT1_WIDE = (AV_CH_LAYOUT_5POINT1|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER) - public static readonly ulong AV_CH_LAYOUT_7POINT1_WIDE = AV_CH_LAYOUT_5POINT1 | AV_CH_FRONT_LEFT_OF_CENTER | AV_CH_FRONT_RIGHT_OF_CENTER; - /// AV_CH_LAYOUT_7POINT1_WIDE_BACK = (AV_CH_LAYOUT_5POINT1_BACK|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER) - public static readonly ulong AV_CH_LAYOUT_7POINT1_WIDE_BACK = AV_CH_LAYOUT_5POINT1_BACK | AV_CH_FRONT_LEFT_OF_CENTER | AV_CH_FRONT_RIGHT_OF_CENTER; - /// AV_CH_LAYOUT_HEXADECAGONAL = (AV_CH_LAYOUT_OCTAGONAL|AV_CH_WIDE_LEFT|AV_CH_WIDE_RIGHT|AV_CH_TOP_BACK_LEFT|AV_CH_TOP_BACK_RIGHT|AV_CH_TOP_BACK_CENTER|AV_CH_TOP_FRONT_CENTER|AV_CH_TOP_FRONT_LEFT|AV_CH_TOP_FRONT_RIGHT) - public static readonly ulong AV_CH_LAYOUT_HEXADECAGONAL = AV_CH_LAYOUT_OCTAGONAL | AV_CH_WIDE_LEFT | AV_CH_WIDE_RIGHT | AV_CH_TOP_BACK_LEFT | AV_CH_TOP_BACK_RIGHT | AV_CH_TOP_BACK_CENTER | AV_CH_TOP_FRONT_CENTER | AV_CH_TOP_FRONT_LEFT | AV_CH_TOP_FRONT_RIGHT; - /// AV_CH_LAYOUT_HEXAGONAL = (AV_CH_LAYOUT_5POINT0_BACK|AV_CH_BACK_CENTER) - public static readonly ulong AV_CH_LAYOUT_HEXAGONAL = AV_CH_LAYOUT_5POINT0_BACK | AV_CH_BACK_CENTER; - /// AV_CH_LAYOUT_MONO = (AV_CH_FRONT_CENTER) - public static readonly ulong AV_CH_LAYOUT_MONO = AV_CH_FRONT_CENTER; - /// AV_CH_LAYOUT_NATIVE = 0x8000000000000000ULL - public const ulong AV_CH_LAYOUT_NATIVE = 0x8000000000000000UL; - /// AV_CH_LAYOUT_OCTAGONAL = (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_LEFT|AV_CH_BACK_CENTER|AV_CH_BACK_RIGHT) - public static readonly ulong AV_CH_LAYOUT_OCTAGONAL = AV_CH_LAYOUT_5POINT0 | AV_CH_BACK_LEFT | AV_CH_BACK_CENTER | AV_CH_BACK_RIGHT; - /// AV_CH_LAYOUT_QUAD = (AV_CH_LAYOUT_STEREO|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT) - public static readonly ulong AV_CH_LAYOUT_QUAD = AV_CH_LAYOUT_STEREO | AV_CH_BACK_LEFT | AV_CH_BACK_RIGHT; - /// AV_CH_LAYOUT_STEREO = (AV_CH_FRONT_LEFT|AV_CH_FRONT_RIGHT) - public static readonly ulong AV_CH_LAYOUT_STEREO = AV_CH_FRONT_LEFT | AV_CH_FRONT_RIGHT; - /// AV_CH_LAYOUT_STEREO_DOWNMIX = (AV_CH_STEREO_LEFT|AV_CH_STEREO_RIGHT) - public static readonly ulong AV_CH_LAYOUT_STEREO_DOWNMIX = AV_CH_STEREO_LEFT | AV_CH_STEREO_RIGHT; - /// AV_CH_LAYOUT_SURROUND = (AV_CH_LAYOUT_STEREO|AV_CH_FRONT_CENTER) - public static readonly ulong AV_CH_LAYOUT_SURROUND = AV_CH_LAYOUT_STEREO | AV_CH_FRONT_CENTER; - /// AV_CH_LOW_FREQUENCY = (1ULL << AV_CHAN_LOW_FREQUENCY ) - public static readonly ulong AV_CH_LOW_FREQUENCY = 0x1UL << 3; - /// AV_CH_LOW_FREQUENCY_2 = (1ULL << AV_CHAN_LOW_FREQUENCY_2 ) - public static readonly ulong AV_CH_LOW_FREQUENCY_2 = 0x1UL << 35; - /// AV_CH_SIDE_LEFT = (1ULL << AV_CHAN_SIDE_LEFT ) - public static readonly ulong AV_CH_SIDE_LEFT = 0x1UL << 9; - /// AV_CH_SIDE_RIGHT = (1ULL << AV_CHAN_SIDE_RIGHT ) - public static readonly ulong AV_CH_SIDE_RIGHT = 0x1UL << 10; - /// AV_CH_STEREO_LEFT = (1ULL << AV_CHAN_STEREO_LEFT ) - public static readonly ulong AV_CH_STEREO_LEFT = 0x1UL << 29; - /// AV_CH_STEREO_RIGHT = (1ULL << AV_CHAN_STEREO_RIGHT ) - public static readonly ulong AV_CH_STEREO_RIGHT = 0x1UL << 30; - /// AV_CH_SURROUND_DIRECT_LEFT = (1ULL << AV_CHAN_SURROUND_DIRECT_LEFT ) - public static readonly ulong AV_CH_SURROUND_DIRECT_LEFT = 0x1UL << 33; - /// AV_CH_SURROUND_DIRECT_RIGHT = (1ULL << AV_CHAN_SURROUND_DIRECT_RIGHT) - public static readonly ulong AV_CH_SURROUND_DIRECT_RIGHT = 0x1UL << 34; - /// AV_CH_TOP_BACK_CENTER = (1ULL << AV_CHAN_TOP_BACK_CENTER ) - public static readonly ulong AV_CH_TOP_BACK_CENTER = 0x1UL << 16; - /// AV_CH_TOP_BACK_LEFT = (1ULL << AV_CHAN_TOP_BACK_LEFT ) - public static readonly ulong AV_CH_TOP_BACK_LEFT = 0x1UL << 15; - /// AV_CH_TOP_BACK_RIGHT = (1ULL << AV_CHAN_TOP_BACK_RIGHT ) - public static readonly ulong AV_CH_TOP_BACK_RIGHT = 0x1UL << 17; - /// AV_CH_TOP_CENTER = (1ULL << AV_CHAN_TOP_CENTER ) - public static readonly ulong AV_CH_TOP_CENTER = 0x1UL << 11; - /// AV_CH_TOP_FRONT_CENTER = (1ULL << AV_CHAN_TOP_FRONT_CENTER ) - public static readonly ulong AV_CH_TOP_FRONT_CENTER = 0x1UL << 13; - /// AV_CH_TOP_FRONT_LEFT = (1ULL << AV_CHAN_TOP_FRONT_LEFT ) - public static readonly ulong AV_CH_TOP_FRONT_LEFT = 0x1UL << 12; - /// AV_CH_TOP_FRONT_RIGHT = (1ULL << AV_CHAN_TOP_FRONT_RIGHT ) - public static readonly ulong AV_CH_TOP_FRONT_RIGHT = 0x1UL << 14; - /// AV_CH_TOP_SIDE_LEFT = (1ULL << AV_CHAN_TOP_SIDE_LEFT ) - public static readonly ulong AV_CH_TOP_SIDE_LEFT = 0x1UL << 36; - /// AV_CH_TOP_SIDE_RIGHT = (1ULL << AV_CHAN_TOP_SIDE_RIGHT ) - public static readonly ulong AV_CH_TOP_SIDE_RIGHT = 0x1UL << 37; - /// AV_CH_WIDE_LEFT = (1ULL << AV_CHAN_WIDE_LEFT ) - public static readonly ulong AV_CH_WIDE_LEFT = 0x1UL << 31; - /// AV_CH_WIDE_RIGHT = (1ULL << AV_CHAN_WIDE_RIGHT ) - public static readonly ulong AV_CH_WIDE_RIGHT = 0x1UL << 32; - // public static AV_CHANNEL_LAYOUT_2_1 = AV_CHANNEL_LAYOUT_MASK(0x3, AV_CH_LAYOUT_2_1); - // public static AV_CHANNEL_LAYOUT_2_2 = AV_CHANNEL_LAYOUT_MASK(0x4, AV_CH_LAYOUT_2_2); - // public static AV_CHANNEL_LAYOUT_22POINT2 = AV_CHANNEL_LAYOUT_MASK(0x18, AV_CH_LAYOUT_22POINT2); - // public static AV_CHANNEL_LAYOUT_2POINT1 = AV_CHANNEL_LAYOUT_MASK(0x3, AV_CH_LAYOUT_2POINT1); - // public static AV_CHANNEL_LAYOUT_3POINT1 = AV_CHANNEL_LAYOUT_MASK(0x4, AV_CH_LAYOUT_3POINT1); - // public static AV_CHANNEL_LAYOUT_4POINT0 = AV_CHANNEL_LAYOUT_MASK(0x4, AV_CH_LAYOUT_4POINT0); - // public static AV_CHANNEL_LAYOUT_4POINT1 = AV_CHANNEL_LAYOUT_MASK(0x5, AV_CH_LAYOUT_4POINT1); - // public static AV_CHANNEL_LAYOUT_5POINT0 = AV_CHANNEL_LAYOUT_MASK(0x5, AV_CH_LAYOUT_5POINT0); - // public static AV_CHANNEL_LAYOUT_5POINT0_BACK = AV_CHANNEL_LAYOUT_MASK(0x5, AV_CH_LAYOUT_5POINT0_BACK); - // public static AV_CHANNEL_LAYOUT_5POINT1 = AV_CHANNEL_LAYOUT_MASK(0x6, AV_CH_LAYOUT_5POINT1); - // public static AV_CHANNEL_LAYOUT_5POINT1_BACK = AV_CHANNEL_LAYOUT_MASK(0x6, AV_CH_LAYOUT_5POINT1_BACK); - // public static AV_CHANNEL_LAYOUT_6POINT0 = AV_CHANNEL_LAYOUT_MASK(0x6, AV_CH_LAYOUT_6POINT0); - // public static AV_CHANNEL_LAYOUT_6POINT0_FRONT = AV_CHANNEL_LAYOUT_MASK(0x6, AV_CH_LAYOUT_6POINT0_FRONT); - // public static AV_CHANNEL_LAYOUT_6POINT1 = AV_CHANNEL_LAYOUT_MASK(0x7, AV_CH_LAYOUT_6POINT1); - // public static AV_CHANNEL_LAYOUT_6POINT1_BACK = AV_CHANNEL_LAYOUT_MASK(0x7, AV_CH_LAYOUT_6POINT1_BACK); - // public static AV_CHANNEL_LAYOUT_6POINT1_FRONT = AV_CHANNEL_LAYOUT_MASK(0x7, AV_CH_LAYOUT_6POINT1_FRONT); - // public static AV_CHANNEL_LAYOUT_7POINT0 = AV_CHANNEL_LAYOUT_MASK(0x7, AV_CH_LAYOUT_7POINT0); - // public static AV_CHANNEL_LAYOUT_7POINT0_FRONT = AV_CHANNEL_LAYOUT_MASK(0x7, AV_CH_LAYOUT_7POINT0_FRONT); - // public static AV_CHANNEL_LAYOUT_7POINT1 = AV_CHANNEL_LAYOUT_MASK(0x8, AV_CH_LAYOUT_7POINT1); - // public static AV_CHANNEL_LAYOUT_7POINT1_WIDE = AV_CHANNEL_LAYOUT_MASK(0x8, AV_CH_LAYOUT_7POINT1_WIDE); - // public static AV_CHANNEL_LAYOUT_7POINT1_WIDE_BACK = AV_CHANNEL_LAYOUT_MASK(0x8, AV_CH_LAYOUT_7POINT1_WIDE_BACK); - // public static AV_CHANNEL_LAYOUT_AMBISONIC_FIRST_ORDER = { .order = AV_CHANNEL_ORDER_AMBISONIC, .nb_channels = 4, .u = { .mask = 0 }}; - // public static AV_CHANNEL_LAYOUT_HEXADECAGONAL = AV_CHANNEL_LAYOUT_MASK(0x10, AV_CH_LAYOUT_HEXADECAGONAL); - // public static AV_CHANNEL_LAYOUT_HEXAGONAL = AV_CHANNEL_LAYOUT_MASK(0x6, AV_CH_LAYOUT_HEXAGONAL); - // public static AV_CHANNEL_LAYOUT_MASK = nb; - // public static AV_CHANNEL_LAYOUT_MONO = AV_CHANNEL_LAYOUT_MASK(0x1, AV_CH_LAYOUT_MONO); - // public static AV_CHANNEL_LAYOUT_OCTAGONAL = AV_CHANNEL_LAYOUT_MASK(0x8, AV_CH_LAYOUT_OCTAGONAL); - // public static AV_CHANNEL_LAYOUT_QUAD = AV_CHANNEL_LAYOUT_MASK(0x4, AV_CH_LAYOUT_QUAD); - // public static AV_CHANNEL_LAYOUT_STEREO = AV_CHANNEL_LAYOUT_MASK(0x2, AV_CH_LAYOUT_STEREO); - // public static AV_CHANNEL_LAYOUT_STEREO_DOWNMIX = AV_CHANNEL_LAYOUT_MASK(0x2, AV_CH_LAYOUT_STEREO_DOWNMIX); - // public static AV_CHANNEL_LAYOUT_SURROUND = AV_CHANNEL_LAYOUT_MASK(0x3, AV_CH_LAYOUT_SURROUND); - // public static av_clip = av_clip_c; - // public static av_clip_int16 = av_clip_int16_c; - // public static av_clip_int8 = av_clip_int8_c; - // public static av_clip_intp2 = av_clip_intp2_c; - // public static av_clip_uint16 = av_clip_uint16_c; - // public static av_clip_uint8 = av_clip_uint8_c; - // public static av_clip_uintp2 = av_clip_uintp2_c; - // public static av_clip64 = av_clip64_c; - // public static av_clipd = av_clipd_c; - // public static av_clipf = av_clipf_c; - // public static av_clipl_int32 = av_clipl_int32_c; - /// AV_CODEC_CAP_AUTO_THREADS = AV_CODEC_CAP_OTHER_THREADS - public const int AV_CODEC_CAP_AUTO_THREADS = AV_CODEC_CAP_OTHER_THREADS; - /// AV_CODEC_CAP_AVOID_PROBING = (1 << 17) - public const int AV_CODEC_CAP_AVOID_PROBING = 0x1 << 0x11; - /// AV_CODEC_CAP_CHANNEL_CONF = (1 << 10) - public const int AV_CODEC_CAP_CHANNEL_CONF = 0x1 << 0xa; - /// AV_CODEC_CAP_DELAY = (1 << 5) - public const int AV_CODEC_CAP_DELAY = 0x1 << 0x5; - /// AV_CODEC_CAP_DR1 = (1 << 1) - public const int AV_CODEC_CAP_DR1 = 0x1 << 0x1; - /// AV_CODEC_CAP_DRAW_HORIZ_BAND = (1 << 0) - public const int AV_CODEC_CAP_DRAW_HORIZ_BAND = 0x1 << 0x0; - /// AV_CODEC_CAP_ENCODER_FLUSH = (1 << 21) - public const int AV_CODEC_CAP_ENCODER_FLUSH = 0x1 << 0x15; - /// AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE = (1 << 20) - public const int AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE = 0x1 << 0x14; - /// AV_CODEC_CAP_EXPERIMENTAL = (1 << 9) - public const int AV_CODEC_CAP_EXPERIMENTAL = 0x1 << 0x9; - /// AV_CODEC_CAP_FRAME_THREADS = (1 << 12) - public const int AV_CODEC_CAP_FRAME_THREADS = 0x1 << 0xc; - /// AV_CODEC_CAP_HARDWARE = (1 << 18) - public const int AV_CODEC_CAP_HARDWARE = 0x1 << 0x12; - /// AV_CODEC_CAP_HYBRID = (1 << 19) - public const int AV_CODEC_CAP_HYBRID = 0x1 << 0x13; - /// AV_CODEC_CAP_INTRA_ONLY = 0x40000000 - public const int AV_CODEC_CAP_INTRA_ONLY = 0x40000000; - /// AV_CODEC_CAP_LOSSLESS = 0x80000000 - public const uint AV_CODEC_CAP_LOSSLESS = 0x80000000U; - /// AV_CODEC_CAP_OTHER_THREADS = (1 << 15) - public const int AV_CODEC_CAP_OTHER_THREADS = 0x1 << 0xf; - /// AV_CODEC_CAP_PARAM_CHANGE = (1 << 14) - public const int AV_CODEC_CAP_PARAM_CHANGE = 0x1 << 0xe; - /// AV_CODEC_CAP_SLICE_THREADS = (1 << 13) - public const int AV_CODEC_CAP_SLICE_THREADS = 0x1 << 0xd; - /// AV_CODEC_CAP_SMALL_LAST_FRAME = (1 << 6) - public const int AV_CODEC_CAP_SMALL_LAST_FRAME = 0x1 << 0x6; - /// AV_CODEC_CAP_SUBFRAMES = (1 << 8) - public const int AV_CODEC_CAP_SUBFRAMES = 0x1 << 0x8; - /// AV_CODEC_CAP_TRUNCATED = (1 << 3) - public const int AV_CODEC_CAP_TRUNCATED = 0x1 << 0x3; - /// AV_CODEC_CAP_VARIABLE_FRAME_SIZE = (1 << 16) - public const int AV_CODEC_CAP_VARIABLE_FRAME_SIZE = 0x1 << 0x10; - /// AV_CODEC_EXPORT_DATA_FILM_GRAIN = 0x1 << 0x3 - public const int AV_CODEC_EXPORT_DATA_FILM_GRAIN = 0x1 << 0x3; - /// AV_CODEC_EXPORT_DATA_MVS = 0x1 << 0x0 - public const int AV_CODEC_EXPORT_DATA_MVS = 0x1 << 0x0; - /// AV_CODEC_EXPORT_DATA_PRFT = 0x1 << 0x1 - public const int AV_CODEC_EXPORT_DATA_PRFT = 0x1 << 0x1; - /// AV_CODEC_EXPORT_DATA_VIDEO_ENC_PARAMS = 0x1 << 0x2 - public const int AV_CODEC_EXPORT_DATA_VIDEO_ENC_PARAMS = 0x1 << 0x2; - /// AV_CODEC_FLAG_4MV = 0x1 << 0x2 - public const int AV_CODEC_FLAG_4MV = 0x1 << 0x2; - /// AV_CODEC_FLAG_AC_PRED = 0x1 << 0x18 - public const int AV_CODEC_FLAG_AC_PRED = 0x1 << 0x18; - /// AV_CODEC_FLAG_BITEXACT = 0x1 << 0x17 - public const int AV_CODEC_FLAG_BITEXACT = 0x1 << 0x17; - /// AV_CODEC_FLAG_CLOSED_GOP = 0x1U << 0x1f - public const uint AV_CODEC_FLAG_CLOSED_GOP = 0x1U << 0x1f; - /// AV_CODEC_FLAG_DROPCHANGED = 0x1 << 0x5 - public const int AV_CODEC_FLAG_DROPCHANGED = 0x1 << 0x5; - /// AV_CODEC_FLAG_GLOBAL_HEADER = 0x1 << 0x16 - public const int AV_CODEC_FLAG_GLOBAL_HEADER = 0x1 << 0x16; - /// AV_CODEC_FLAG_GRAY = 0x1 << 0xd - public const int AV_CODEC_FLAG_GRAY = 0x1 << 0xd; - /// AV_CODEC_FLAG_INTERLACED_DCT = 0x1 << 0x12 - public const int AV_CODEC_FLAG_INTERLACED_DCT = 0x1 << 0x12; - /// AV_CODEC_FLAG_INTERLACED_ME = 0x1 << 0x1d - public const int AV_CODEC_FLAG_INTERLACED_ME = 0x1 << 0x1d; - /// AV_CODEC_FLAG_LOOP_FILTER = 0x1 << 0xb - public const int AV_CODEC_FLAG_LOOP_FILTER = 0x1 << 0xb; - /// AV_CODEC_FLAG_LOW_DELAY = 0x1 << 0x13 - public const int AV_CODEC_FLAG_LOW_DELAY = 0x1 << 0x13; - /// AV_CODEC_FLAG_OUTPUT_CORRUPT = 0x1 << 0x3 - public const int AV_CODEC_FLAG_OUTPUT_CORRUPT = 0x1 << 0x3; - /// AV_CODEC_FLAG_PASS1 = 0x1 << 0x9 - public const int AV_CODEC_FLAG_PASS1 = 0x1 << 0x9; - /// AV_CODEC_FLAG_PASS2 = 0x1 << 0xa - public const int AV_CODEC_FLAG_PASS2 = 0x1 << 0xa; - /// AV_CODEC_FLAG_PSNR = 0x1 << 0xf - public const int AV_CODEC_FLAG_PSNR = 0x1 << 0xf; - /// AV_CODEC_FLAG_QPEL = 0x1 << 0x4 - public const int AV_CODEC_FLAG_QPEL = 0x1 << 0x4; - /// AV_CODEC_FLAG_QSCALE = 0x1 << 0x1 - public const int AV_CODEC_FLAG_QSCALE = 0x1 << 0x1; - /// AV_CODEC_FLAG_TRUNCATED = 0x1 << 0x10 - public const int AV_CODEC_FLAG_TRUNCATED = 0x1 << 0x10; - /// AV_CODEC_FLAG_UNALIGNED = 0x1 << 0x0 - public const int AV_CODEC_FLAG_UNALIGNED = 0x1 << 0x0; - /// AV_CODEC_FLAG2_CHUNKS = 0x1 << 0xf - public const int AV_CODEC_FLAG2_CHUNKS = 0x1 << 0xf; - /// AV_CODEC_FLAG2_DROP_FRAME_TIMECODE = 0x1 << 0xd - public const int AV_CODEC_FLAG2_DROP_FRAME_TIMECODE = 0x1 << 0xd; - /// AV_CODEC_FLAG2_EXPORT_MVS = 0x1 << 0x1c - public const int AV_CODEC_FLAG2_EXPORT_MVS = 0x1 << 0x1c; - /// AV_CODEC_FLAG2_FAST = 0x1 << 0x0 - public const int AV_CODEC_FLAG2_FAST = 0x1 << 0x0; - /// AV_CODEC_FLAG2_IGNORE_CROP = 0x1 << 0x10 - public const int AV_CODEC_FLAG2_IGNORE_CROP = 0x1 << 0x10; - /// AV_CODEC_FLAG2_LOCAL_HEADER = 0x1 << 0x3 - public const int AV_CODEC_FLAG2_LOCAL_HEADER = 0x1 << 0x3; - /// AV_CODEC_FLAG2_NO_OUTPUT = 0x1 << 0x2 - public const int AV_CODEC_FLAG2_NO_OUTPUT = 0x1 << 0x2; - /// AV_CODEC_FLAG2_RO_FLUSH_NOOP = 0x1 << 0x1e - public const int AV_CODEC_FLAG2_RO_FLUSH_NOOP = 0x1 << 0x1e; - /// AV_CODEC_FLAG2_SHOW_ALL = 0x1 << 0x16 - public const int AV_CODEC_FLAG2_SHOW_ALL = 0x1 << 0x16; - /// AV_CODEC_FLAG2_SKIP_MANUAL = 0x1 << 0x1d - public const int AV_CODEC_FLAG2_SKIP_MANUAL = 0x1 << 0x1d; - /// AV_CODEC_ID_H265 = AV_CODEC_ID_HEVC - public static readonly int AV_CODEC_ID_H265 = 173; - /// AV_CODEC_ID_H266 = AV_CODEC_ID_VVC - public static readonly int AV_CODEC_ID_H266 = 196; - /// AV_CODEC_ID_IFF_BYTERUN1 = AV_CODEC_ID_IFF_ILBM - public static readonly int AV_CODEC_ID_IFF_BYTERUN1 = 136; - /// AV_CODEC_PROP_BITMAP_SUB = 0x1 << 0x10 - public const int AV_CODEC_PROP_BITMAP_SUB = 0x1 << 0x10; - /// AV_CODEC_PROP_INTRA_ONLY = 0x1 << 0x0 - public const int AV_CODEC_PROP_INTRA_ONLY = 0x1 << 0x0; - /// AV_CODEC_PROP_LOSSLESS = 0x1 << 0x2 - public const int AV_CODEC_PROP_LOSSLESS = 0x1 << 0x2; - /// AV_CODEC_PROP_LOSSY = 0x1 << 0x1 - public const int AV_CODEC_PROP_LOSSY = 0x1 << 0x1; - /// AV_CODEC_PROP_REORDER = 0x1 << 0x3 - public const int AV_CODEC_PROP_REORDER = 0x1 << 0x3; - /// AV_CODEC_PROP_TEXT_SUB = 0x1 << 0x11 - public const int AV_CODEC_PROP_TEXT_SUB = 0x1 << 0x11; - // public static av_cold = __attribute__((cold)); - // public static av_const = __attribute__((const)); - /// AV_CPU_FLAG_3DNOW = 0x4 - public const int AV_CPU_FLAG_3DNOW = 0x4; - /// AV_CPU_FLAG_3DNOWEXT = 0x20 - public const int AV_CPU_FLAG_3DNOWEXT = 0x20; - /// AV_CPU_FLAG_AESNI = 0x80000 - public const int AV_CPU_FLAG_AESNI = 0x80000; - /// AV_CPU_FLAG_ALTIVEC = 0x1 - public const int AV_CPU_FLAG_ALTIVEC = 0x1; - /// AV_CPU_FLAG_ARMV5TE = 0x1 << 0x0 - public const int AV_CPU_FLAG_ARMV5TE = 0x1 << 0x0; - /// AV_CPU_FLAG_ARMV6 = 0x1 << 0x1 - public const int AV_CPU_FLAG_ARMV6 = 0x1 << 0x1; - /// AV_CPU_FLAG_ARMV6T2 = 0x1 << 0x2 - public const int AV_CPU_FLAG_ARMV6T2 = 0x1 << 0x2; - /// AV_CPU_FLAG_ARMV8 = 0x1 << 0x6 - public const int AV_CPU_FLAG_ARMV8 = 0x1 << 0x6; - /// AV_CPU_FLAG_ATOM = 0x10000000 - public const int AV_CPU_FLAG_ATOM = 0x10000000; - /// AV_CPU_FLAG_AVX = 0x4000 - public const int AV_CPU_FLAG_AVX = 0x4000; - /// AV_CPU_FLAG_AVX2 = 0x8000 - public const int AV_CPU_FLAG_AVX2 = 0x8000; - /// AV_CPU_FLAG_AVX512 = 0x100000 - public const int AV_CPU_FLAG_AVX512 = 0x100000; - /// AV_CPU_FLAG_AVX512ICL = 0x200000 - public const int AV_CPU_FLAG_AVX512ICL = 0x200000; - /// AV_CPU_FLAG_AVXSLOW = 0x8000000 - public const int AV_CPU_FLAG_AVXSLOW = 0x8000000; - /// AV_CPU_FLAG_BMI1 = 0x20000 - public const int AV_CPU_FLAG_BMI1 = 0x20000; - /// AV_CPU_FLAG_BMI2 = 0x40000 - public const int AV_CPU_FLAG_BMI2 = 0x40000; - /// AV_CPU_FLAG_CMOV = 0x1000 - public const int AV_CPU_FLAG_CMOV = 0x1000; - /// AV_CPU_FLAG_FMA3 = 0x10000 - public const int AV_CPU_FLAG_FMA3 = 0x10000; - /// AV_CPU_FLAG_FMA4 = 0x800 - public const int AV_CPU_FLAG_FMA4 = 0x800; - /// AV_CPU_FLAG_FORCE = 0x80000000U - public const uint AV_CPU_FLAG_FORCE = 0x80000000U; - /// AV_CPU_FLAG_LASX = 0x1 << 0x1 - public const int AV_CPU_FLAG_LASX = 0x1 << 0x1; - /// AV_CPU_FLAG_LSX = 0x1 << 0x0 - public const int AV_CPU_FLAG_LSX = 0x1 << 0x0; - /// AV_CPU_FLAG_MMI = 0x1 << 0x0 - public const int AV_CPU_FLAG_MMI = 0x1 << 0x0; - /// AV_CPU_FLAG_MMX = 0x1 - public const int AV_CPU_FLAG_MMX = 0x1; - /// AV_CPU_FLAG_MMX2 = 0x2 - public const int AV_CPU_FLAG_MMX2 = 0x2; - /// AV_CPU_FLAG_MMXEXT = 0x2 - public const int AV_CPU_FLAG_MMXEXT = 0x2; - /// AV_CPU_FLAG_MSA = 0x1 << 0x1 - public const int AV_CPU_FLAG_MSA = 0x1 << 0x1; - /// AV_CPU_FLAG_NEON = 0x1 << 0x5 - public const int AV_CPU_FLAG_NEON = 0x1 << 0x5; - /// AV_CPU_FLAG_POWER8 = 0x4 - public const int AV_CPU_FLAG_POWER8 = 0x4; - /// AV_CPU_FLAG_SETEND = 0x1 << 0x10 - public const int AV_CPU_FLAG_SETEND = 0x1 << 0x10; - /// AV_CPU_FLAG_SLOW_GATHER = 0x2000000 - public const int AV_CPU_FLAG_SLOW_GATHER = 0x2000000; - /// AV_CPU_FLAG_SSE = 0x8 - public const int AV_CPU_FLAG_SSE = 0x8; - /// AV_CPU_FLAG_SSE2 = 0x10 - public const int AV_CPU_FLAG_SSE2 = 0x10; - /// AV_CPU_FLAG_SSE2SLOW = 0x40000000 - public const int AV_CPU_FLAG_SSE2SLOW = 0x40000000; - /// AV_CPU_FLAG_SSE3 = 0x40 - public const int AV_CPU_FLAG_SSE3 = 0x40; - /// AV_CPU_FLAG_SSE3SLOW = 0x20000000 - public const int AV_CPU_FLAG_SSE3SLOW = 0x20000000; - /// AV_CPU_FLAG_SSE4 = 0x100 - public const int AV_CPU_FLAG_SSE4 = 0x100; - /// AV_CPU_FLAG_SSE42 = 0x200 - public const int AV_CPU_FLAG_SSE42 = 0x200; - /// AV_CPU_FLAG_SSSE3 = 0x80 - public const int AV_CPU_FLAG_SSSE3 = 0x80; - /// AV_CPU_FLAG_SSSE3SLOW = 0x4000000 - public const int AV_CPU_FLAG_SSSE3SLOW = 0x4000000; - /// AV_CPU_FLAG_VFP = 0x1 << 0x3 - public const int AV_CPU_FLAG_VFP = 0x1 << 0x3; - /// AV_CPU_FLAG_VFP_VM = 0x1 << 0x7 - public const int AV_CPU_FLAG_VFP_VM = 0x1 << 0x7; - /// AV_CPU_FLAG_VFPV3 = 0x1 << 0x4 - public const int AV_CPU_FLAG_VFPV3 = 0x1 << 0x4; - /// AV_CPU_FLAG_VSX = 0x2 - public const int AV_CPU_FLAG_VSX = 0x2; - /// AV_CPU_FLAG_XOP = 0x400 - public const int AV_CPU_FLAG_XOP = 0x400; - /// AV_DICT_APPEND = 32 - public const int AV_DICT_APPEND = 0x20; - /// AV_DICT_DONT_OVERWRITE = 16 - public const int AV_DICT_DONT_OVERWRITE = 0x10; - /// AV_DICT_DONT_STRDUP_KEY = 4 - public const int AV_DICT_DONT_STRDUP_KEY = 0x4; - /// AV_DICT_DONT_STRDUP_VAL = 8 - public const int AV_DICT_DONT_STRDUP_VAL = 0x8; - /// AV_DICT_IGNORE_SUFFIX = 2 - public const int AV_DICT_IGNORE_SUFFIX = 0x2; - /// AV_DICT_MATCH_CASE = 1 - public const int AV_DICT_MATCH_CASE = 0x1; - /// AV_DICT_MULTIKEY = 64 - public const int AV_DICT_MULTIKEY = 0x40; - /// AV_DISPOSITION_ATTACHED_PIC = (1 << 10) - public const int AV_DISPOSITION_ATTACHED_PIC = 0x1 << 0xa; - /// AV_DISPOSITION_CAPTIONS = (1 << 16) - public const int AV_DISPOSITION_CAPTIONS = 0x1 << 0x10; - /// AV_DISPOSITION_CLEAN_EFFECTS = (1 << 9) - public const int AV_DISPOSITION_CLEAN_EFFECTS = 0x1 << 0x9; - /// AV_DISPOSITION_COMMENT = (1 << 3) - public const int AV_DISPOSITION_COMMENT = 0x1 << 0x3; - /// AV_DISPOSITION_DEFAULT = (1 << 0) - public const int AV_DISPOSITION_DEFAULT = 0x1 << 0x0; - /// AV_DISPOSITION_DEPENDENT = (1 << 19) - public const int AV_DISPOSITION_DEPENDENT = 0x1 << 0x13; - /// AV_DISPOSITION_DESCRIPTIONS = (1 << 17) - public const int AV_DISPOSITION_DESCRIPTIONS = 0x1 << 0x11; - /// AV_DISPOSITION_DUB = (1 << 1) - public const int AV_DISPOSITION_DUB = 0x1 << 0x1; - /// AV_DISPOSITION_FORCED = (1 << 6) - public const int AV_DISPOSITION_FORCED = 0x1 << 0x6; - /// AV_DISPOSITION_HEARING_IMPAIRED = (1 << 7) - public const int AV_DISPOSITION_HEARING_IMPAIRED = 0x1 << 0x7; - /// AV_DISPOSITION_KARAOKE = (1 << 5) - public const int AV_DISPOSITION_KARAOKE = 0x1 << 0x5; - /// AV_DISPOSITION_LYRICS = (1 << 4) - public const int AV_DISPOSITION_LYRICS = 0x1 << 0x4; - /// AV_DISPOSITION_METADATA = (1 << 18) - public const int AV_DISPOSITION_METADATA = 0x1 << 0x12; - /// AV_DISPOSITION_NON_DIEGETIC = (1 << 12) - public const int AV_DISPOSITION_NON_DIEGETIC = 0x1 << 0xc; - /// AV_DISPOSITION_ORIGINAL = (1 << 2) - public const int AV_DISPOSITION_ORIGINAL = 0x1 << 0x2; - /// AV_DISPOSITION_STILL_IMAGE = (1 << 20) - public const int AV_DISPOSITION_STILL_IMAGE = 0x1 << 0x14; - /// AV_DISPOSITION_TIMED_THUMBNAILS = (1 << 11) - public const int AV_DISPOSITION_TIMED_THUMBNAILS = 0x1 << 0xb; - /// AV_DISPOSITION_VISUAL_IMPAIRED = (1 << 8) - public const int AV_DISPOSITION_VISUAL_IMPAIRED = 0x1 << 0x8; - /// AV_EF_AGGRESSIVE = 0x1 << 0x12 - public const int AV_EF_AGGRESSIVE = 0x1 << 0x12; - /// AV_EF_BITSTREAM = 0x1 << 0x1 - public const int AV_EF_BITSTREAM = 0x1 << 0x1; - /// AV_EF_BUFFER = 0x1 << 0x2 - public const int AV_EF_BUFFER = 0x1 << 0x2; - /// AV_EF_CAREFUL = 0x1 << 0x10 - public const int AV_EF_CAREFUL = 0x1 << 0x10; - /// AV_EF_COMPLIANT = 0x1 << 0x11 - public const int AV_EF_COMPLIANT = 0x1 << 0x11; - /// AV_EF_CRCCHECK = 0x1 << 0x0 - public const int AV_EF_CRCCHECK = 0x1 << 0x0; - /// AV_EF_EXPLODE = 0x1 << 0x3 - public const int AV_EF_EXPLODE = 0x1 << 0x3; - /// AV_EF_IGNORE_ERR = 0x1 << 0xf - public const int AV_EF_IGNORE_ERR = 0x1 << 0xf; - // public static av_err2str = (errnum) av_make_error_string((char[AV_ERROR_MAX_STRING_SIZE]){0}, AV_ERROR_MAX_STRING_SIZE, errnum); - /// AV_ERROR_MAX_STRING_SIZE = 64 - public const int AV_ERROR_MAX_STRING_SIZE = 0x40; - // public static av_extern_inline = inline; - /// AV_FOURCC_MAX_STRING_SIZE = 32 - public const int AV_FOURCC_MAX_STRING_SIZE = 0x20; - // public static av_fourcc2str = (fourcc) av_fourcc_make_string((char[AV_FOURCC_MAX_STRING_SIZE]){0}, fourcc); - /// AV_FRAME_FILENAME_FLAGS_MULTIPLE = 1 - public const int AV_FRAME_FILENAME_FLAGS_MULTIPLE = 0x1; - /// AV_FRAME_FLAG_CORRUPT = (1 << 0) - public const int AV_FRAME_FLAG_CORRUPT = 0x1 << 0x0; - /// AV_FRAME_FLAG_DISCARD = (1 << 2) - public const int AV_FRAME_FLAG_DISCARD = 0x1 << 0x2; - // public static AV_GCC_VERSION_AT_LEAST = x; - // public static AV_GCC_VERSION_AT_MOST = x; - /// AV_GET_BUFFER_FLAG_REF = 0x1 << 0x0 - public const int AV_GET_BUFFER_FLAG_REF = 0x1 << 0x0; - /// AV_GET_ENCODE_BUFFER_FLAG_REF = 0x1 << 0x0 - public const int AV_GET_ENCODE_BUFFER_FLAG_REF = 0x1 << 0x0; - // public static AV_GLUE = (a, b) a ## b; - // public static AV_HAS_BUILTIN = (x)(__has_builtin(x)); - /// AV_HAVE_BIGENDIAN = 0 - public const int AV_HAVE_BIGENDIAN = 0x0; - /// AV_HAVE_FAST_UNALIGNED = 1 - public const int AV_HAVE_FAST_UNALIGNED = 0x1; - /// AV_HWACCEL_CODEC_CAP_EXPERIMENTAL = 0x200 - public const int AV_HWACCEL_CODEC_CAP_EXPERIMENTAL = 0x200; - /// AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH = 0x1 << 0x1 - public const int AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH = 0x1 << 0x1; - /// AV_HWACCEL_FLAG_ALLOW_PROFILE_MISMATCH = 0x1 << 0x2 - public const int AV_HWACCEL_FLAG_ALLOW_PROFILE_MISMATCH = 0x1 << 0x2; - /// AV_HWACCEL_FLAG_IGNORE_LEVEL = 0x1 << 0x0 - public const int AV_HWACCEL_FLAG_IGNORE_LEVEL = 0x1 << 0x0; - /// AV_INPUT_BUFFER_MIN_SIZE = 0x4000 - public const int AV_INPUT_BUFFER_MIN_SIZE = 0x4000; - /// AV_INPUT_BUFFER_PADDING_SIZE = 64 - public const int AV_INPUT_BUFFER_PADDING_SIZE = 0x40; - // public static av_int_list_length = list; - // public static AV_IS_INPUT_DEVICE = (category)((category)(==41) || (category)(==43) || (category)(==45)); - // public static AV_IS_OUTPUT_DEVICE = (category)((category)(==40) || (category)(==42) || (category)(==44)); - // public static AV_JOIN = a; - // public static AV_LOG_C = (x)((x)(<<0x8)); - /// AV_LOG_DEBUG = 48 - public const int AV_LOG_DEBUG = 0x30; - /// AV_LOG_ERROR = 16 - public const int AV_LOG_ERROR = 0x10; - /// AV_LOG_FATAL = 8 - public const int AV_LOG_FATAL = 0x8; - /// AV_LOG_INFO = 32 - public const int AV_LOG_INFO = 0x20; - /// AV_LOG_MAX_OFFSET = (AV_LOG_TRACE - AV_LOG_QUIET) - public const int AV_LOG_MAX_OFFSET = AV_LOG_TRACE - AV_LOG_QUIET; - /// AV_LOG_PANIC = 0 - public const int AV_LOG_PANIC = 0x0; - /// AV_LOG_PRINT_LEVEL = 2 - public const int AV_LOG_PRINT_LEVEL = 0x2; - /// AV_LOG_QUIET = -8 - public const int AV_LOG_QUIET = -0x8; - /// AV_LOG_SKIP_REPEATED = 1 - public const int AV_LOG_SKIP_REPEATED = 0x1; - /// AV_LOG_TRACE = 56 - public const int AV_LOG_TRACE = 0x38; - /// AV_LOG_VERBOSE = 40 - public const int AV_LOG_VERBOSE = 0x28; - /// AV_LOG_WARNING = 24 - public const int AV_LOG_WARNING = 0x18; - // public static av_mod_uintp2 = av_mod_uintp2_c; - // public static AV_NE = be; - // public static av_noinline = __declspec(noinline); - /// AV_NOPTS_VALUE = ((int64_t)UINT64_C(0x8000000000000000)) - public static readonly long AV_NOPTS_VALUE = (long)(UINT64_C(0x8000000000000000L)); - // public static av_noreturn = __attribute__((noreturn)); - // public static AV_NOWARN_DEPRECATED = (code)(_Pragma("GCC diagnostic push")); - /// AV_NUM_DATA_POINTERS = 8 - public const int AV_NUM_DATA_POINTERS = 0x8; - /// AV_OPT_ALLOW_NULL = (1 << 2) - public const int AV_OPT_ALLOW_NULL = 0x1 << 0x2; - /// AV_OPT_FLAG_AUDIO_PARAM = 8 - public const int AV_OPT_FLAG_AUDIO_PARAM = 0x8; - /// AV_OPT_FLAG_BSF_PARAM = (1<<8) - public const int AV_OPT_FLAG_BSF_PARAM = 0x1 << 0x8; - /// AV_OPT_FLAG_CHILD_CONSTS = (1<<18) - public const int AV_OPT_FLAG_CHILD_CONSTS = 0x1 << 0x12; - /// AV_OPT_FLAG_DECODING_PARAM = 2 - public const int AV_OPT_FLAG_DECODING_PARAM = 0x2; - /// AV_OPT_FLAG_DEPRECATED = (1<<17) - public const int AV_OPT_FLAG_DEPRECATED = 0x1 << 0x11; - /// AV_OPT_FLAG_ENCODING_PARAM = 1 - public const int AV_OPT_FLAG_ENCODING_PARAM = 0x1; - /// AV_OPT_FLAG_EXPORT = 64 - public const int AV_OPT_FLAG_EXPORT = 0x40; - /// AV_OPT_FLAG_FILTERING_PARAM = (1<<16) - public const int AV_OPT_FLAG_FILTERING_PARAM = 0x1 << 0x10; - /// AV_OPT_FLAG_READONLY = 128 - public const int AV_OPT_FLAG_READONLY = 0x80; - /// AV_OPT_FLAG_RUNTIME_PARAM = (1<<15) - public const int AV_OPT_FLAG_RUNTIME_PARAM = 0x1 << 0xf; - /// AV_OPT_FLAG_SUBTITLE_PARAM = 32 - public const int AV_OPT_FLAG_SUBTITLE_PARAM = 0x20; - /// AV_OPT_FLAG_VIDEO_PARAM = 16 - public const int AV_OPT_FLAG_VIDEO_PARAM = 0x10; - /// AV_OPT_MULTI_COMPONENT_RANGE = (1 << 12) - public const int AV_OPT_MULTI_COMPONENT_RANGE = 0x1 << 0xc; - /// AV_OPT_SEARCH_CHILDREN = (1 << 0) - public const int AV_OPT_SEARCH_CHILDREN = 0x1 << 0x0; - /// AV_OPT_SEARCH_FAKE_OBJ = (1 << 1) - public const int AV_OPT_SEARCH_FAKE_OBJ = 0x1 << 0x1; - /// AV_OPT_SERIALIZE_OPT_FLAGS_EXACT = 0x00000002 - public const int AV_OPT_SERIALIZE_OPT_FLAGS_EXACT = 0x2; - /// AV_OPT_SERIALIZE_SKIP_DEFAULTS = 0x00000001 - public const int AV_OPT_SERIALIZE_SKIP_DEFAULTS = 0x1; - // public static av_opt_set_int_list = (obj, name, val, term, flags) (av_int_list_length(val, term) > INT_MAX / sizeof(*(val)) ? AVERROR(EINVAL) : av_opt_set_bin(obj, name, (const uint8_t *)(val), av_int_list_length(val, term) * sizeof(*(val)), flags)); - // public static av_parity = av_parity_c; - /// AV_PARSER_PTS_NB = 0x4 - public const int AV_PARSER_PTS_NB = 0x4; - // public static AV_PIX_FMT_0BGR32 = AV_PIX_FMT_NE(0BGR, RGB0); - // public static AV_PIX_FMT_0RGB32 = AV_PIX_FMT_NE(0x0, RGB, BGR0); - // public static AV_PIX_FMT_AYUV64 = AV_PIX_FMT_NE(AYUV64BE, AYUV64LE); - // public static AV_PIX_FMT_BAYER_BGGR16 = AV_PIX_FMT_NE(BAYER_BGGR16BE, BAYER_BGGR16LE); - // public static AV_PIX_FMT_BAYER_GBRG16 = AV_PIX_FMT_NE(BAYER_GBRG16BE, BAYER_GBRG16LE); - // public static AV_PIX_FMT_BAYER_GRBG16 = AV_PIX_FMT_NE(BAYER_GRBG16BE, BAYER_GRBG16LE); - // public static AV_PIX_FMT_BAYER_RGGB16 = AV_PIX_FMT_NE(BAYER_RGGB16BE, BAYER_RGGB16LE); - // public static AV_PIX_FMT_BGR32 = AV_PIX_FMT_NE(ABGR, RGBA); - // public static AV_PIX_FMT_BGR32_1 = AV_PIX_FMT_NE(BGRA, ARGB); - // public static AV_PIX_FMT_BGR444 = AV_PIX_FMT_NE(BGR444BE, BGR444LE); - // public static AV_PIX_FMT_BGR48 = AV_PIX_FMT_NE(BGR48BE, BGR48LE); - // public static AV_PIX_FMT_BGR555 = AV_PIX_FMT_NE(BGR555BE, BGR555LE); - // public static AV_PIX_FMT_BGR565 = AV_PIX_FMT_NE(BGR565BE, BGR565LE); - // public static AV_PIX_FMT_BGRA64 = AV_PIX_FMT_NE(BGRA64BE, BGRA64LE); - /// AV_PIX_FMT_FLAG_ALPHA = 0x1 << 0x7 - public const int AV_PIX_FMT_FLAG_ALPHA = 0x1 << 0x7; - /// AV_PIX_FMT_FLAG_BAYER = 0x1 << 0x8 - public const int AV_PIX_FMT_FLAG_BAYER = 0x1 << 0x8; - /// AV_PIX_FMT_FLAG_BE = 0x1 << 0x0 - public const int AV_PIX_FMT_FLAG_BE = 0x1 << 0x0; - /// AV_PIX_FMT_FLAG_BITSTREAM = 0x1 << 0x2 - public const int AV_PIX_FMT_FLAG_BITSTREAM = 0x1 << 0x2; - /// AV_PIX_FMT_FLAG_FLOAT = 0x1 << 0x9 - public const int AV_PIX_FMT_FLAG_FLOAT = 0x1 << 0x9; - /// AV_PIX_FMT_FLAG_HWACCEL = 0x1 << 0x3 - public const int AV_PIX_FMT_FLAG_HWACCEL = 0x1 << 0x3; - /// AV_PIX_FMT_FLAG_PAL = 0x1 << 0x1 - public const int AV_PIX_FMT_FLAG_PAL = 0x1 << 0x1; - /// AV_PIX_FMT_FLAG_PLANAR = 0x1 << 0x4 - public const int AV_PIX_FMT_FLAG_PLANAR = 0x1 << 0x4; - /// AV_PIX_FMT_FLAG_RGB = 0x1 << 0x5 - public const int AV_PIX_FMT_FLAG_RGB = 0x1 << 0x5; - // public static AV_PIX_FMT_GBRAP10 = AV_PIX_FMT_NE(GBRAP10BE, GBRAP10LE); - // public static AV_PIX_FMT_GBRAP12 = AV_PIX_FMT_NE(GBRAP12BE, GBRAP12LE); - // public static AV_PIX_FMT_GBRAP16 = AV_PIX_FMT_NE(GBRAP16BE, GBRAP16LE); - // public static AV_PIX_FMT_GBRAPF32 = AV_PIX_FMT_NE(GBRAPF32BE, GBRAPF32LE); - // public static AV_PIX_FMT_GBRP10 = AV_PIX_FMT_NE(GBRP10BE, GBRP10LE); - // public static AV_PIX_FMT_GBRP12 = AV_PIX_FMT_NE(GBRP12BE, GBRP12LE); - // public static AV_PIX_FMT_GBRP14 = AV_PIX_FMT_NE(GBRP14BE, GBRP14LE); - // public static AV_PIX_FMT_GBRP16 = AV_PIX_FMT_NE(GBRP16BE, GBRP16LE); - // public static AV_PIX_FMT_GBRP9 = AV_PIX_FMT_NE(GBRP9BE, GBRP9LE); - // public static AV_PIX_FMT_GBRPF32 = AV_PIX_FMT_NE(GBRPF32BE, GBRPF32LE); - // public static AV_PIX_FMT_GRAY10 = AV_PIX_FMT_NE(GRAY10BE, GRAY10LE); - // public static AV_PIX_FMT_GRAY12 = AV_PIX_FMT_NE(GRAY12BE, GRAY12LE); - // public static AV_PIX_FMT_GRAY14 = AV_PIX_FMT_NE(GRAY14BE, GRAY14LE); - // public static AV_PIX_FMT_GRAY16 = AV_PIX_FMT_NE(GRAY16BE, GRAY16LE); - // public static AV_PIX_FMT_GRAY9 = AV_PIX_FMT_NE(GRAY9BE, GRAY9LE); - // public static AV_PIX_FMT_GRAYF32 = AV_PIX_FMT_NE(GRAYF32BE, GRAYF32LE); - // public static AV_PIX_FMT_NE = (be, le) AV_PIX_FMT_##le; - // public static AV_PIX_FMT_NV20 = AV_PIX_FMT_NE(NV20BE, NV20LE); - // public static AV_PIX_FMT_P010 = AV_PIX_FMT_NE(P010BE, P010LE); - // public static AV_PIX_FMT_P016 = AV_PIX_FMT_NE(P016BE, P016LE); - // public static AV_PIX_FMT_P210 = AV_PIX_FMT_NE(P210BE, P210LE); - // public static AV_PIX_FMT_P216 = AV_PIX_FMT_NE(P216BE, P216LE); - // public static AV_PIX_FMT_P410 = AV_PIX_FMT_NE(P410BE, P410LE); - // public static AV_PIX_FMT_P416 = AV_PIX_FMT_NE(P416BE, P416LE); - // public static AV_PIX_FMT_RGB32 = AV_PIX_FMT_NE(ARGB, BGRA); - // public static AV_PIX_FMT_RGB32_1 = AV_PIX_FMT_NE(RGBA, ABGR); - // public static AV_PIX_FMT_RGB444 = AV_PIX_FMT_NE(RGB444BE, RGB444LE); - // public static AV_PIX_FMT_RGB48 = AV_PIX_FMT_NE(RGB48BE, RGB48LE); - // public static AV_PIX_FMT_RGB555 = AV_PIX_FMT_NE(RGB555BE, RGB555LE); - // public static AV_PIX_FMT_RGB565 = AV_PIX_FMT_NE(RGB565BE, RGB565LE); - // public static AV_PIX_FMT_RGBA64 = AV_PIX_FMT_NE(RGBA64BE, RGBA64LE); - // public static AV_PIX_FMT_X2BGR10 = AV_PIX_FMT_NE(X2BGR10BE, X2BGR10LE); - // public static AV_PIX_FMT_X2RGB10 = AV_PIX_FMT_NE(X2RGB10BE, X2RGB10LE); - // public static AV_PIX_FMT_XYZ12 = AV_PIX_FMT_NE(XYZ12BE, XYZ12LE); - // public static AV_PIX_FMT_Y210 = AV_PIX_FMT_NE(Y210BE, Y210LE); - // public static AV_PIX_FMT_YA16 = AV_PIX_FMT_NE(YA16BE, YA16LE); - // public static AV_PIX_FMT_YUV420P10 = AV_PIX_FMT_NE(YUV420P10BE, YUV420P10LE); - // public static AV_PIX_FMT_YUV420P12 = AV_PIX_FMT_NE(YUV420P12BE, YUV420P12LE); - // public static AV_PIX_FMT_YUV420P14 = AV_PIX_FMT_NE(YUV420P14BE, YUV420P14LE); - // public static AV_PIX_FMT_YUV420P16 = AV_PIX_FMT_NE(YUV420P16BE, YUV420P16LE); - // public static AV_PIX_FMT_YUV420P9 = AV_PIX_FMT_NE(YUV420P9BE, YUV420P9LE); - // public static AV_PIX_FMT_YUV422P10 = AV_PIX_FMT_NE(YUV422P10BE, YUV422P10LE); - // public static AV_PIX_FMT_YUV422P12 = AV_PIX_FMT_NE(YUV422P12BE, YUV422P12LE); - // public static AV_PIX_FMT_YUV422P14 = AV_PIX_FMT_NE(YUV422P14BE, YUV422P14LE); - // public static AV_PIX_FMT_YUV422P16 = AV_PIX_FMT_NE(YUV422P16BE, YUV422P16LE); - // public static AV_PIX_FMT_YUV422P9 = AV_PIX_FMT_NE(YUV422P9BE, YUV422P9LE); - // public static AV_PIX_FMT_YUV440P10 = AV_PIX_FMT_NE(YUV440P10BE, YUV440P10LE); - // public static AV_PIX_FMT_YUV440P12 = AV_PIX_FMT_NE(YUV440P12BE, YUV440P12LE); - // public static AV_PIX_FMT_YUV444P10 = AV_PIX_FMT_NE(YUV444P10BE, YUV444P10LE); - // public static AV_PIX_FMT_YUV444P12 = AV_PIX_FMT_NE(YUV444P12BE, YUV444P12LE); - // public static AV_PIX_FMT_YUV444P14 = AV_PIX_FMT_NE(YUV444P14BE, YUV444P14LE); - // public static AV_PIX_FMT_YUV444P16 = AV_PIX_FMT_NE(YUV444P16BE, YUV444P16LE); - // public static AV_PIX_FMT_YUV444P9 = AV_PIX_FMT_NE(YUV444P9BE, YUV444P9LE); - // public static AV_PIX_FMT_YUVA420P10 = AV_PIX_FMT_NE(YUVA420P10BE, YUVA420P10LE); - // public static AV_PIX_FMT_YUVA420P16 = AV_PIX_FMT_NE(YUVA420P16BE, YUVA420P16LE); - // public static AV_PIX_FMT_YUVA420P9 = AV_PIX_FMT_NE(YUVA420P9BE, YUVA420P9LE); - // public static AV_PIX_FMT_YUVA422P10 = AV_PIX_FMT_NE(YUVA422P10BE, YUVA422P10LE); - // public static AV_PIX_FMT_YUVA422P12 = AV_PIX_FMT_NE(YUVA422P12BE, YUVA422P12LE); - // public static AV_PIX_FMT_YUVA422P16 = AV_PIX_FMT_NE(YUVA422P16BE, YUVA422P16LE); - // public static AV_PIX_FMT_YUVA422P9 = AV_PIX_FMT_NE(YUVA422P9BE, YUVA422P9LE); - // public static AV_PIX_FMT_YUVA444P10 = AV_PIX_FMT_NE(YUVA444P10BE, YUVA444P10LE); - // public static AV_PIX_FMT_YUVA444P12 = AV_PIX_FMT_NE(YUVA444P12BE, YUVA444P12LE); - // public static AV_PIX_FMT_YUVA444P16 = AV_PIX_FMT_NE(YUVA444P16BE, YUVA444P16LE); - // public static AV_PIX_FMT_YUVA444P9 = AV_PIX_FMT_NE(YUVA444P9BE, YUVA444P9LE); - /// AV_PKT_DATA_QUALITY_FACTOR = AV_PKT_DATA_QUALITY_STATS - public static readonly int AV_PKT_DATA_QUALITY_FACTOR = 8; - /// AV_PKT_FLAG_CORRUPT = 0x0002 - public const int AV_PKT_FLAG_CORRUPT = 0x2; - /// AV_PKT_FLAG_DISCARD = 0x0004 - public const int AV_PKT_FLAG_DISCARD = 0x4; - /// AV_PKT_FLAG_DISPOSABLE = 0x0010 - public const int AV_PKT_FLAG_DISPOSABLE = 0x10; - /// AV_PKT_FLAG_KEY = 0x0001 - public const int AV_PKT_FLAG_KEY = 0x1; - /// AV_PKT_FLAG_TRUSTED = 0x0008 - public const int AV_PKT_FLAG_TRUSTED = 0x8; - // public static av_popcount = av_popcount_c; - // public static av_popcount64 = av_popcount64_c; - // public static AV_PRAGMA = (s) _Pragma(#s); - // public static av_printf_format = fmtpos; - /// AV_PROGRAM_RUNNING = 1 - public const int AV_PROGRAM_RUNNING = 0x1; - /// AV_PTS_WRAP_ADD_OFFSET = 1 - public const int AV_PTS_WRAP_ADD_OFFSET = 0x1; - /// AV_PTS_WRAP_IGNORE = 0 - public const int AV_PTS_WRAP_IGNORE = 0x0; - /// AV_PTS_WRAP_SUB_OFFSET = -1 - public const int AV_PTS_WRAP_SUB_OFFSET = -0x1; - // public static av_pure = __attribute__((pure)); - // public static av_sat_add32 = av_sat_add32_c; - // public static av_sat_add64 = av_sat_add64_c; - // public static av_sat_dadd32 = av_sat_dadd32_c; - // public static av_sat_dsub32 = av_sat_dsub32_c; - // public static av_sat_sub32 = av_sat_sub32_c; - // public static av_sat_sub64 = av_sat_sub64_c; - // public static AV_STRINGIFY = (s)(AV_TOSTRING(s)); - /// AV_SUBTITLE_FLAG_FORCED = 0x1 - public const int AV_SUBTITLE_FLAG_FORCED = 0x1; - /// AV_TIME_BASE = 1000000 - public const int AV_TIME_BASE = 0xf4240; - // public static AV_TIME_BASE_Q = (AVRational){1, AV_TIME_BASE}; - /// AV_TIMECODE_STR_SIZE = 0x17 - public const int AV_TIMECODE_STR_SIZE = 0x17; - // public static AV_TOSTRING = (s) #s; - // public static av_uninit = (x) x=x; - // public static av_unused = __attribute__((unused)); - // public static av_used = __attribute__((used)); - // public static AV_VERSION = a; - // public static AV_VERSION_DOT = (a, b, c) a ##.## b ##.## c; - // public static AV_VERSION_INT = a; - // public static AV_VERSION_MAJOR = (a)((a)(>>0x10)); - // public static AV_VERSION_MICRO = (a)((a)(&0xff)); - // public static AV_VERSION_MINOR = (a)((a)(&0xff00) >> 0x8); - // public static AVERROR = (e) (-(e)); - /// AVERROR_BSF_NOT_FOUND = FFERRTAG(0xF8,'B','S','F') - public static readonly int AVERROR_BSF_NOT_FOUND = FFERRTAG(0xf8, 'B', 'S', 'F'); - /// AVERROR_BUFFER_TOO_SMALL = FFERRTAG( 'B','U','F','S') - public static readonly int AVERROR_BUFFER_TOO_SMALL = FFERRTAG('B', 'U', 'F', 'S'); - /// AVERROR_BUG = FFERRTAG( 'B','U','G','!') - public static readonly int AVERROR_BUG = FFERRTAG('B', 'U', 'G', '!'); - /// AVERROR_BUG2 = FFERRTAG( 'B','U','G',' ') - public static readonly int AVERROR_BUG2 = FFERRTAG('B', 'U', 'G', ' '); - /// AVERROR_DECODER_NOT_FOUND = FFERRTAG(0xF8,'D','E','C') - public static readonly int AVERROR_DECODER_NOT_FOUND = FFERRTAG(0xf8, 'D', 'E', 'C'); - /// AVERROR_DEMUXER_NOT_FOUND = FFERRTAG(0xF8,'D','E','M') - public static readonly int AVERROR_DEMUXER_NOT_FOUND = FFERRTAG(0xf8, 'D', 'E', 'M'); - /// AVERROR_ENCODER_NOT_FOUND = FFERRTAG(0xF8,'E','N','C') - public static readonly int AVERROR_ENCODER_NOT_FOUND = FFERRTAG(0xf8, 'E', 'N', 'C'); - /// AVERROR_EOF = FFERRTAG( 'E','O','F',' ') - public static readonly int AVERROR_EOF = FFERRTAG('E', 'O', 'F', ' '); - /// AVERROR_EXIT = FFERRTAG( 'E','X','I','T') - public static readonly int AVERROR_EXIT = FFERRTAG('E', 'X', 'I', 'T'); - /// AVERROR_EXPERIMENTAL = (-0x2bb2afa8) - public const int AVERROR_EXPERIMENTAL = -0x2bb2afa8; - /// AVERROR_EXTERNAL = FFERRTAG( 'E','X','T',' ') - public static readonly int AVERROR_EXTERNAL = FFERRTAG('E', 'X', 'T', ' '); - /// AVERROR_FILTER_NOT_FOUND = FFERRTAG(0xF8,'F','I','L') - public static readonly int AVERROR_FILTER_NOT_FOUND = FFERRTAG(0xf8, 'F', 'I', 'L'); - /// AVERROR_HTTP_BAD_REQUEST = FFERRTAG(0xF8,'4','0','0') - public static readonly int AVERROR_HTTP_BAD_REQUEST = FFERRTAG(0xf8, '4', '0', '0'); - /// AVERROR_HTTP_FORBIDDEN = FFERRTAG(0xF8,'4','0','3') - public static readonly int AVERROR_HTTP_FORBIDDEN = FFERRTAG(0xf8, '4', '0', '3'); - /// AVERROR_HTTP_NOT_FOUND = FFERRTAG(0xF8,'4','0','4') - public static readonly int AVERROR_HTTP_NOT_FOUND = FFERRTAG(0xf8, '4', '0', '4'); - /// AVERROR_HTTP_OTHER_4XX = FFERRTAG(0xF8,'4','X','X') - public static readonly int AVERROR_HTTP_OTHER_4XX = FFERRTAG(0xf8, '4', 'X', 'X'); - /// AVERROR_HTTP_SERVER_ERROR = FFERRTAG(0xF8,'5','X','X') - public static readonly int AVERROR_HTTP_SERVER_ERROR = FFERRTAG(0xf8, '5', 'X', 'X'); - /// AVERROR_HTTP_UNAUTHORIZED = FFERRTAG(0xF8,'4','0','1') - public static readonly int AVERROR_HTTP_UNAUTHORIZED = FFERRTAG(0xf8, '4', '0', '1'); - /// AVERROR_INPUT_CHANGED = (-0x636e6701) - public const int AVERROR_INPUT_CHANGED = -0x636e6701; - /// AVERROR_INVALIDDATA = FFERRTAG( 'I','N','D','A') - public static readonly int AVERROR_INVALIDDATA = FFERRTAG('I', 'N', 'D', 'A'); - /// AVERROR_MUXER_NOT_FOUND = FFERRTAG(0xF8,'M','U','X') - public static readonly int AVERROR_MUXER_NOT_FOUND = FFERRTAG(0xf8, 'M', 'U', 'X'); - /// AVERROR_OPTION_NOT_FOUND = FFERRTAG(0xF8,'O','P','T') - public static readonly int AVERROR_OPTION_NOT_FOUND = FFERRTAG(0xf8, 'O', 'P', 'T'); - /// AVERROR_OUTPUT_CHANGED = (-0x636e6702) - public const int AVERROR_OUTPUT_CHANGED = -0x636e6702; - /// AVERROR_PATCHWELCOME = FFERRTAG( 'P','A','W','E') - public static readonly int AVERROR_PATCHWELCOME = FFERRTAG('P', 'A', 'W', 'E'); - /// AVERROR_PROTOCOL_NOT_FOUND = FFERRTAG(0xF8,'P','R','O') - public static readonly int AVERROR_PROTOCOL_NOT_FOUND = FFERRTAG(0xf8, 'P', 'R', 'O'); - /// AVERROR_STREAM_NOT_FOUND = FFERRTAG(0xF8,'S','T','R') - public static readonly int AVERROR_STREAM_NOT_FOUND = FFERRTAG(0xf8, 'S', 'T', 'R'); - /// AVERROR_UNKNOWN = FFERRTAG( 'U','N','K','N') - public static readonly int AVERROR_UNKNOWN = FFERRTAG('U', 'N', 'K', 'N'); - /// AVFILTER_CMD_FLAG_FAST = 0x2 - public const int AVFILTER_CMD_FLAG_FAST = 0x2; - /// AVFILTER_CMD_FLAG_ONE = 0x1 - public const int AVFILTER_CMD_FLAG_ONE = 0x1; - /// AVFILTER_FLAG_DYNAMIC_INPUTS = 0x1 << 0x0 - public const int AVFILTER_FLAG_DYNAMIC_INPUTS = 0x1 << 0x0; - /// AVFILTER_FLAG_DYNAMIC_OUTPUTS = 0x1 << 0x1 - public const int AVFILTER_FLAG_DYNAMIC_OUTPUTS = 0x1 << 0x1; - /// AVFILTER_FLAG_METADATA_ONLY = 0x1 << 0x3 - public const int AVFILTER_FLAG_METADATA_ONLY = 0x1 << 0x3; - /// AVFILTER_FLAG_SLICE_THREADS = 0x1 << 0x2 - public const int AVFILTER_FLAG_SLICE_THREADS = 0x1 << 0x2; - /// AVFILTER_FLAG_SUPPORT_TIMELINE = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL - public const int AVFILTER_FLAG_SUPPORT_TIMELINE = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL; - /// AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC = 0x1 << 0x10 - public const int AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC = 0x1 << 0x10; - /// AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL = 0x1 << 0x11 - public const int AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL = 0x1 << 0x11; - /// AVFILTER_THREAD_SLICE = 0x1 << 0x0 - public const int AVFILTER_THREAD_SLICE = 0x1 << 0x0; - /// AVFMT_ALLOW_FLUSH = 0x10000 - public const int AVFMT_ALLOW_FLUSH = 0x10000; - /// AVFMT_AVOID_NEG_TS_AUTO = -1 - public const int AVFMT_AVOID_NEG_TS_AUTO = -0x1; - /// AVFMT_AVOID_NEG_TS_DISABLED = 0 - public const int AVFMT_AVOID_NEG_TS_DISABLED = 0x0; - /// AVFMT_AVOID_NEG_TS_MAKE_NON_NEGATIVE = 1 - public const int AVFMT_AVOID_NEG_TS_MAKE_NON_NEGATIVE = 0x1; - /// AVFMT_AVOID_NEG_TS_MAKE_ZERO = 2 - public const int AVFMT_AVOID_NEG_TS_MAKE_ZERO = 0x2; - /// AVFMT_EVENT_FLAG_METADATA_UPDATED = 0x0001 - public const int AVFMT_EVENT_FLAG_METADATA_UPDATED = 0x1; - /// AVFMT_EXPERIMENTAL = 0x0004 - public const int AVFMT_EXPERIMENTAL = 0x4; - /// AVFMT_FLAG_AUTO_BSF = 0x200000 - public const int AVFMT_FLAG_AUTO_BSF = 0x200000; - /// AVFMT_FLAG_BITEXACT = 0x0400 - public const int AVFMT_FLAG_BITEXACT = 0x400; - /// AVFMT_FLAG_CUSTOM_IO = 0x0080 - public const int AVFMT_FLAG_CUSTOM_IO = 0x80; - /// AVFMT_FLAG_DISCARD_CORRUPT = 0x0100 - public const int AVFMT_FLAG_DISCARD_CORRUPT = 0x100; - /// AVFMT_FLAG_FAST_SEEK = 0x80000 - public const int AVFMT_FLAG_FAST_SEEK = 0x80000; - /// AVFMT_FLAG_FLUSH_PACKETS = 0x0200 - public const int AVFMT_FLAG_FLUSH_PACKETS = 0x200; - /// AVFMT_FLAG_GENPTS = 0x0001 - public const int AVFMT_FLAG_GENPTS = 0x1; - /// AVFMT_FLAG_IGNDTS = 0x0008 - public const int AVFMT_FLAG_IGNDTS = 0x8; - /// AVFMT_FLAG_IGNIDX = 0x0002 - public const int AVFMT_FLAG_IGNIDX = 0x2; - /// AVFMT_FLAG_NOBUFFER = 0x0040 - public const int AVFMT_FLAG_NOBUFFER = 0x40; - /// AVFMT_FLAG_NOFILLIN = 0x0010 - public const int AVFMT_FLAG_NOFILLIN = 0x10; - /// AVFMT_FLAG_NONBLOCK = 0x0004 - public const int AVFMT_FLAG_NONBLOCK = 0x4; - /// AVFMT_FLAG_NOPARSE = 0x0020 - public const int AVFMT_FLAG_NOPARSE = 0x20; - /// AVFMT_FLAG_PRIV_OPT = 0x20000 - public const int AVFMT_FLAG_PRIV_OPT = 0x20000; - /// AVFMT_FLAG_SHORTEST = 0x100000 - public const int AVFMT_FLAG_SHORTEST = 0x100000; - /// AVFMT_FLAG_SORT_DTS = 0x10000 - public const int AVFMT_FLAG_SORT_DTS = 0x10000; - /// AVFMT_GENERIC_INDEX = 0x0100 - public const int AVFMT_GENERIC_INDEX = 0x100; - /// AVFMT_GLOBALHEADER = 0x0040 - public const int AVFMT_GLOBALHEADER = 0x40; - /// AVFMT_NEEDNUMBER = 0x0002 - public const int AVFMT_NEEDNUMBER = 0x2; - /// AVFMT_NO_BYTE_SEEK = 0x8000 - public const int AVFMT_NO_BYTE_SEEK = 0x8000; - /// AVFMT_NOBINSEARCH = 0x2000 - public const int AVFMT_NOBINSEARCH = 0x2000; - /// AVFMT_NODIMENSIONS = 0x0800 - public const int AVFMT_NODIMENSIONS = 0x800; - /// AVFMT_NOFILE = 0x0001 - public const int AVFMT_NOFILE = 0x1; - /// AVFMT_NOGENSEARCH = 0x4000 - public const int AVFMT_NOGENSEARCH = 0x4000; - /// AVFMT_NOSTREAMS = 0x1000 - public const int AVFMT_NOSTREAMS = 0x1000; - /// AVFMT_NOTIMESTAMPS = 0x0080 - public const int AVFMT_NOTIMESTAMPS = 0x80; - /// AVFMT_SEEK_TO_PTS = 0x4000000 - public const int AVFMT_SEEK_TO_PTS = 0x4000000; - /// AVFMT_SHOW_IDS = 0x0008 - public const int AVFMT_SHOW_IDS = 0x8; - /// AVFMT_TS_DISCONT = 0x0200 - public const int AVFMT_TS_DISCONT = 0x200; - /// AVFMT_TS_NEGATIVE = 0x40000 - public const int AVFMT_TS_NEGATIVE = 0x40000; - /// AVFMT_TS_NONSTRICT = 0x20000 - public const int AVFMT_TS_NONSTRICT = 0x20000; - /// AVFMT_VARIABLE_FPS = 0x0400 - public const int AVFMT_VARIABLE_FPS = 0x400; - /// AVFMTCTX_NOHEADER = 0x0001 - public const int AVFMTCTX_NOHEADER = 0x1; - /// AVFMTCTX_UNSEEKABLE = 0x0002 - public const int AVFMTCTX_UNSEEKABLE = 0x2; - /// AVINDEX_DISCARD_FRAME = 0x0002 - public const int AVINDEX_DISCARD_FRAME = 0x2; - /// AVINDEX_KEYFRAME = 0x0001 - public const int AVINDEX_KEYFRAME = 0x1; - /// AVIO_FLAG_DIRECT = 0x8000 - public const int AVIO_FLAG_DIRECT = 0x8000; - /// AVIO_FLAG_NONBLOCK = 8 - public const int AVIO_FLAG_NONBLOCK = 0x8; - /// AVIO_FLAG_READ = 1 - public const int AVIO_FLAG_READ = 0x1; - /// AVIO_FLAG_READ_WRITE = (AVIO_FLAG_READ|AVIO_FLAG_WRITE) - public const int AVIO_FLAG_READ_WRITE = AVIO_FLAG_READ | AVIO_FLAG_WRITE; - /// AVIO_FLAG_WRITE = 2 - public const int AVIO_FLAG_WRITE = 0x2; - // public static avio_print = s; - /// AVIO_SEEKABLE_NORMAL = (1 << 0) - public const int AVIO_SEEKABLE_NORMAL = 0x1 << 0x0; - /// AVIO_SEEKABLE_TIME = (1 << 1) - public const int AVIO_SEEKABLE_TIME = 0x1 << 0x1; - /// AVPALETTE_COUNT = 256 - public const int AVPALETTE_COUNT = 0x100; - /// AVPALETTE_SIZE = 1024 - public const int AVPALETTE_SIZE = 0x400; - /// AVPROBE_PADDING_SIZE = 32 - public const int AVPROBE_PADDING_SIZE = 0x20; - /// AVPROBE_SCORE_EXTENSION = 50 - public const int AVPROBE_SCORE_EXTENSION = 0x32; - /// AVPROBE_SCORE_MAX = 100 - public const int AVPROBE_SCORE_MAX = 0x64; - /// AVPROBE_SCORE_MIME = 75 - public const int AVPROBE_SCORE_MIME = 0x4b; - /// AVPROBE_SCORE_RETRY = (AVPROBE_SCORE_MAX/4) - public const int AVPROBE_SCORE_RETRY = AVPROBE_SCORE_MAX / 0x4; - /// AVPROBE_SCORE_STREAM_RETRY = (AVPROBE_SCORE_MAX/4-1) - public const int AVPROBE_SCORE_STREAM_RETRY = AVPROBE_SCORE_MAX / 0x4 - 0x1; - /// AVSEEK_FLAG_ANY = 4 - public const int AVSEEK_FLAG_ANY = 0x4; - /// AVSEEK_FLAG_BACKWARD = 1 - public const int AVSEEK_FLAG_BACKWARD = 0x1; - /// AVSEEK_FLAG_BYTE = 2 - public const int AVSEEK_FLAG_BYTE = 0x2; - /// AVSEEK_FLAG_FRAME = 8 - public const int AVSEEK_FLAG_FRAME = 0x8; - /// AVSEEK_FORCE = 0x20000 - public const int AVSEEK_FORCE = 0x20000; - /// AVSEEK_SIZE = 0x10000 - public const int AVSEEK_SIZE = 0x10000; - /// AVSTREAM_EVENT_FLAG_METADATA_UPDATED = 0x0001 - public const int AVSTREAM_EVENT_FLAG_METADATA_UPDATED = 0x1; - /// AVSTREAM_EVENT_FLAG_NEW_PACKETS = (1 << 1) - public const int AVSTREAM_EVENT_FLAG_NEW_PACKETS = 0x1 << 0x1; - /// AVSTREAM_INIT_IN_INIT_OUTPUT = 1 - public const int AVSTREAM_INIT_IN_INIT_OUTPUT = 0x1; - /// AVSTREAM_INIT_IN_WRITE_HEADER = 0 - public const int AVSTREAM_INIT_IN_WRITE_HEADER = 0x0; - // public static AVUNERROR = (e) (-(e)); - // public static DECLARE_ALIGNED = n; - // public static DECLARE_ASM_ALIGNED = n; - // public static DECLARE_ASM_CONST = n; - /// FF_API_AUTO_THREADS = (LIBAVCODEC_VERSION_MAJOR < 60) - public const bool FF_API_AUTO_THREADS = LIBAVCODEC_VERSION_MAJOR < 0x3c; - /// FF_API_AV_FOPEN_UTF8 = (LIBAVUTIL_VERSION_MAJOR < 58) - public const bool FF_API_AV_FOPEN_UTF8 = LIBAVUTIL_VERSION_MAJOR < 0x3a; - /// FF_API_AV_MALLOCZ_ARRAY = (LIBAVUTIL_VERSION_MAJOR < 58) - public const bool FF_API_AV_MALLOCZ_ARRAY = LIBAVUTIL_VERSION_MAJOR < 0x3a; - /// FF_API_AVCTX_TIMEBASE = (LIBAVCODEC_VERSION_MAJOR < 60) - public const bool FF_API_AVCTX_TIMEBASE = LIBAVCODEC_VERSION_MAJOR < 0x3c; - /// FF_API_AVIOCONTEXT_WRITTEN = (LIBAVFORMAT_VERSION_MAJOR < 60) - public const bool FF_API_AVIOCONTEXT_WRITTEN = LIBAVFORMAT_VERSION_MAJOR < 0x3c; - /// FF_API_AVSTREAM_CLASS = (LIBAVFORMAT_VERSION_MAJOR > 59) - public const bool FF_API_AVSTREAM_CLASS = LIBAVFORMAT_VERSION_MAJOR > 0x3b; - /// FF_API_BUFFERSINK_ALLOC = LIBAVFILTER_VERSION_MAJOR < 0x9 - public const bool FF_API_BUFFERSINK_ALLOC = LIBAVFILTER_VERSION_MAJOR < 0x9; - /// FF_API_COLORSPACE_NAME = (LIBAVUTIL_VERSION_MAJOR < 58) - public const bool FF_API_COLORSPACE_NAME = LIBAVUTIL_VERSION_MAJOR < 0x3a; - /// FF_API_COMPUTE_PKT_FIELDS2 = (LIBAVFORMAT_VERSION_MAJOR < 60) - public const bool FF_API_COMPUTE_PKT_FIELDS2 = LIBAVFORMAT_VERSION_MAJOR < 0x3c; - /// FF_API_D2STR = (LIBAVUTIL_VERSION_MAJOR < 58) - public const bool FF_API_D2STR = LIBAVUTIL_VERSION_MAJOR < 0x3a; - /// FF_API_DEBUG_MV = (LIBAVCODEC_VERSION_MAJOR < 60) - public const bool FF_API_DEBUG_MV = LIBAVCODEC_VERSION_MAJOR < 0x3c; - /// FF_API_DECLARE_ALIGNED = (LIBAVUTIL_VERSION_MAJOR < 58) - public const bool FF_API_DECLARE_ALIGNED = LIBAVUTIL_VERSION_MAJOR < 0x3a; - /// FF_API_DEVICE_CAPABILITIES = (LIBAVDEVICE_VERSION_MAJOR < 60) - public const bool FF_API_DEVICE_CAPABILITIES = LIBAVDEVICE_VERSION_MAJOR < 0x3c; - /// FF_API_FIFO_OLD_API = (LIBAVUTIL_VERSION_MAJOR < 58) - public const bool FF_API_FIFO_OLD_API = LIBAVUTIL_VERSION_MAJOR < 0x3a; - /// FF_API_FIFO_PEEK2 = (LIBAVUTIL_VERSION_MAJOR < 58) - public const bool FF_API_FIFO_PEEK2 = LIBAVUTIL_VERSION_MAJOR < 0x3a; - /// FF_API_FLAG_TRUNCATED = (LIBAVCODEC_VERSION_MAJOR < 60) - public const bool FF_API_FLAG_TRUNCATED = LIBAVCODEC_VERSION_MAJOR < 0x3c; - /// FF_API_GET_FRAME_CLASS = (LIBAVCODEC_VERSION_MAJOR < 60) - public const bool FF_API_GET_FRAME_CLASS = LIBAVCODEC_VERSION_MAJOR < 0x3c; - /// FF_API_IDCT_NONE = (LIBAVCODEC_VERSION_MAJOR < 60) - public const bool FF_API_IDCT_NONE = LIBAVCODEC_VERSION_MAJOR < 0x3c; - /// FF_API_INIT_PACKET = (LIBAVCODEC_VERSION_MAJOR < 60) - public const bool FF_API_INIT_PACKET = LIBAVCODEC_VERSION_MAJOR < 0x3c; - /// FF_API_LAVF_PRIV_OPT = (LIBAVFORMAT_VERSION_MAJOR < 60) - public const bool FF_API_LAVF_PRIV_OPT = LIBAVFORMAT_VERSION_MAJOR < 0x3c; - /// FF_API_OLD_CHANNEL_LAYOUT = (LIBAVUTIL_VERSION_MAJOR < 58) - public const bool FF_API_OLD_CHANNEL_LAYOUT = LIBAVUTIL_VERSION_MAJOR < 0x3a; - /// FF_API_OPENH264_CABAC = (LIBAVCODEC_VERSION_MAJOR < 60) - public const bool FF_API_OPENH264_CABAC = LIBAVCODEC_VERSION_MAJOR < 0x3c; - /// FF_API_OPENH264_SLICE_MODE = (LIBAVCODEC_VERSION_MAJOR < 60) - public const bool FF_API_OPENH264_SLICE_MODE = LIBAVCODEC_VERSION_MAJOR < 0x3c; - /// FF_API_PAD_COUNT = LIBAVFILTER_VERSION_MAJOR < 0x9 - public const bool FF_API_PAD_COUNT = LIBAVFILTER_VERSION_MAJOR < 0x9; - /// FF_API_R_FRAME_RATE = 1 - public const int FF_API_R_FRAME_RATE = 0x1; - /// FF_API_SUB_TEXT_FORMAT = (LIBAVCODEC_VERSION_MAJOR < 60) - public const bool FF_API_SUB_TEXT_FORMAT = LIBAVCODEC_VERSION_MAJOR < 0x3c; - /// FF_API_SVTAV1_OPTS = (LIBAVCODEC_VERSION_MAJOR < 60) - public const bool FF_API_SVTAV1_OPTS = LIBAVCODEC_VERSION_MAJOR < 0x3c; - /// FF_API_SWS_PARAM_OPTION = LIBAVFILTER_VERSION_MAJOR < 0x9 - public const bool FF_API_SWS_PARAM_OPTION = LIBAVFILTER_VERSION_MAJOR < 0x9; - /// FF_API_THREAD_SAFE_CALLBACKS = (LIBAVCODEC_VERSION_MAJOR < 60) - public const bool FF_API_THREAD_SAFE_CALLBACKS = LIBAVCODEC_VERSION_MAJOR < 0x3c; - /// FF_API_UNUSED_CODEC_CAPS = (LIBAVCODEC_VERSION_MAJOR < 60) - public const bool FF_API_UNUSED_CODEC_CAPS = LIBAVCODEC_VERSION_MAJOR < 0x3c; - /// FF_API_XVMC = (LIBAVUTIL_VERSION_MAJOR < 58) - public const bool FF_API_XVMC = LIBAVUTIL_VERSION_MAJOR < 0x3a; - // public static FF_ARRAY_ELEMS = (a) (sizeof(a) / sizeof((a)[0])); - /// FF_BUG_AMV = 0x20 - public const int FF_BUG_AMV = 0x20; - /// FF_BUG_AUTODETECT = 0x1 - public const int FF_BUG_AUTODETECT = 0x1; - /// FF_BUG_DC_CLIP = 0x1000 - public const int FF_BUG_DC_CLIP = 0x1000; - /// FF_BUG_DIRECT_BLOCKSIZE = 0x200 - public const int FF_BUG_DIRECT_BLOCKSIZE = 0x200; - /// FF_BUG_EDGE = 0x400 - public const int FF_BUG_EDGE = 0x400; - /// FF_BUG_HPEL_CHROMA = 0x800 - public const int FF_BUG_HPEL_CHROMA = 0x800; - /// FF_BUG_IEDGE = 0x8000 - public const int FF_BUG_IEDGE = 0x8000; - /// FF_BUG_MS = 0x2000 - public const int FF_BUG_MS = 0x2000; - /// FF_BUG_NO_PADDING = 0x10 - public const int FF_BUG_NO_PADDING = 0x10; - /// FF_BUG_QPEL_CHROMA = 0x40 - public const int FF_BUG_QPEL_CHROMA = 0x40; - /// FF_BUG_QPEL_CHROMA2 = 0x100 - public const int FF_BUG_QPEL_CHROMA2 = 0x100; - /// FF_BUG_STD_QPEL = 0x80 - public const int FF_BUG_STD_QPEL = 0x80; - /// FF_BUG_TRUNCATED = 0x4000 - public const int FF_BUG_TRUNCATED = 0x4000; - /// FF_BUG_UMP4 = 0x8 - public const int FF_BUG_UMP4 = 0x8; - /// FF_BUG_XVID_ILACE = 0x4 - public const int FF_BUG_XVID_ILACE = 0x4; - // public static FF_CEIL_RSHIFT = AV_CEIL_RSHIFT; - /// FF_CMP_BIT = 0x5 - public const int FF_CMP_BIT = 0x5; - /// FF_CMP_CHROMA = 0x100 - public const int FF_CMP_CHROMA = 0x100; - /// FF_CMP_DCT = 0x3 - public const int FF_CMP_DCT = 0x3; - /// FF_CMP_DCT264 = 0xe - public const int FF_CMP_DCT264 = 0xe; - /// FF_CMP_DCTMAX = 0xd - public const int FF_CMP_DCTMAX = 0xd; - /// FF_CMP_MEDIAN_SAD = 0xf - public const int FF_CMP_MEDIAN_SAD = 0xf; - /// FF_CMP_NSSE = 0xa - public const int FF_CMP_NSSE = 0xa; - /// FF_CMP_PSNR = 0x4 - public const int FF_CMP_PSNR = 0x4; - /// FF_CMP_RD = 0x6 - public const int FF_CMP_RD = 0x6; - /// FF_CMP_SAD = 0x0 - public const int FF_CMP_SAD = 0x0; - /// FF_CMP_SATD = 0x2 - public const int FF_CMP_SATD = 0x2; - /// FF_CMP_SSE = 0x1 - public const int FF_CMP_SSE = 0x1; - /// FF_CMP_VSAD = 0x8 - public const int FF_CMP_VSAD = 0x8; - /// FF_CMP_VSSE = 0x9 - public const int FF_CMP_VSSE = 0x9; - /// FF_CMP_W53 = 0xb - public const int FF_CMP_W53 = 0xb; - /// FF_CMP_W97 = 0xc - public const int FF_CMP_W97 = 0xc; - /// FF_CMP_ZERO = 0x7 - public const int FF_CMP_ZERO = 0x7; - /// FF_CODEC_PROPERTY_CLOSED_CAPTIONS = 0x2 - public const int FF_CODEC_PROPERTY_CLOSED_CAPTIONS = 0x2; - /// FF_CODEC_PROPERTY_FILM_GRAIN = 0x4 - public const int FF_CODEC_PROPERTY_FILM_GRAIN = 0x4; - /// FF_CODEC_PROPERTY_LOSSLESS = 0x1 - public const int FF_CODEC_PROPERTY_LOSSLESS = 0x1; - /// FF_COMPLIANCE_EXPERIMENTAL = -0x2 - public const int FF_COMPLIANCE_EXPERIMENTAL = -0x2; - /// FF_COMPLIANCE_NORMAL = 0x0 - public const int FF_COMPLIANCE_NORMAL = 0x0; - /// FF_COMPLIANCE_STRICT = 0x1 - public const int FF_COMPLIANCE_STRICT = 0x1; - /// FF_COMPLIANCE_UNOFFICIAL = -0x1 - public const int FF_COMPLIANCE_UNOFFICIAL = -0x1; - /// FF_COMPLIANCE_VERY_STRICT = 0x2 - public const int FF_COMPLIANCE_VERY_STRICT = 0x2; - /// FF_COMPRESSION_DEFAULT = -0x1 - public const int FF_COMPRESSION_DEFAULT = -0x1; - /// FF_DCT_ALTIVEC = 0x5 - public const int FF_DCT_ALTIVEC = 0x5; - /// FF_DCT_AUTO = 0x0 - public const int FF_DCT_AUTO = 0x0; - /// FF_DCT_FAAN = 0x6 - public const int FF_DCT_FAAN = 0x6; - /// FF_DCT_FASTINT = 0x1 - public const int FF_DCT_FASTINT = 0x1; - /// FF_DCT_INT = 0x2 - public const int FF_DCT_INT = 0x2; - /// FF_DCT_MMX = 0x3 - public const int FF_DCT_MMX = 0x3; - /// FF_DEBUG_BITSTREAM = 0x4 - public const int FF_DEBUG_BITSTREAM = 0x4; - /// FF_DEBUG_BUFFERS = 0x8000 - public const int FF_DEBUG_BUFFERS = 0x8000; - /// FF_DEBUG_BUGS = 0x1000 - public const int FF_DEBUG_BUGS = 0x1000; - /// FF_DEBUG_DCT_COEFF = 0x40 - public const int FF_DEBUG_DCT_COEFF = 0x40; - /// FF_DEBUG_ER = 0x400 - public const int FF_DEBUG_ER = 0x400; - /// FF_DEBUG_GREEN_MD = 0x800000 - public const int FF_DEBUG_GREEN_MD = 0x800000; - /// FF_DEBUG_MB_TYPE = 0x8 - public const int FF_DEBUG_MB_TYPE = 0x8; - /// FF_DEBUG_MMCO = 0x800 - public const int FF_DEBUG_MMCO = 0x800; - /// FF_DEBUG_NOMC = 0x1000000 - public const int FF_DEBUG_NOMC = 0x1000000; - /// FF_DEBUG_PICT_INFO = 0x1 - public const int FF_DEBUG_PICT_INFO = 0x1; - /// FF_DEBUG_QP = 0x10 - public const int FF_DEBUG_QP = 0x10; - /// FF_DEBUG_RC = 0x2 - public const int FF_DEBUG_RC = 0x2; - /// FF_DEBUG_SKIP = 0x80 - public const int FF_DEBUG_SKIP = 0x80; - /// FF_DEBUG_STARTCODE = 0x100 - public const int FF_DEBUG_STARTCODE = 0x100; - /// FF_DEBUG_THREADS = 0x10000 - public const int FF_DEBUG_THREADS = 0x10000; - /// FF_DEBUG_VIS_MV_B_BACK = 0x4 - public const int FF_DEBUG_VIS_MV_B_BACK = 0x4; - /// FF_DEBUG_VIS_MV_B_FOR = 0x2 - public const int FF_DEBUG_VIS_MV_B_FOR = 0x2; - /// FF_DEBUG_VIS_MV_P_FOR = 0x1 - public const int FF_DEBUG_VIS_MV_P_FOR = 0x1; - /// FF_DECODE_ERROR_CONCEALMENT_ACTIVE = 4 - public const int FF_DECODE_ERROR_CONCEALMENT_ACTIVE = 0x4; - /// FF_DECODE_ERROR_DECODE_SLICES = 8 - public const int FF_DECODE_ERROR_DECODE_SLICES = 0x8; - /// FF_DECODE_ERROR_INVALID_BITSTREAM = 1 - public const int FF_DECODE_ERROR_INVALID_BITSTREAM = 0x1; - /// FF_DECODE_ERROR_MISSING_REFERENCE = 2 - public const int FF_DECODE_ERROR_MISSING_REFERENCE = 0x2; - /// FF_DXVA2_WORKAROUND_INTEL_CLEARVIDEO = 0x2 - public const int FF_DXVA2_WORKAROUND_INTEL_CLEARVIDEO = 0x2; - /// FF_DXVA2_WORKAROUND_SCALING_LIST_ZIGZAG = 0x1 - public const int FF_DXVA2_WORKAROUND_SCALING_LIST_ZIGZAG = 0x1; - /// FF_EC_DEBLOCK = 0x2 - public const int FF_EC_DEBLOCK = 0x2; - /// FF_EC_FAVOR_INTER = 0x100 - public const int FF_EC_FAVOR_INTER = 0x100; - /// FF_EC_GUESS_MVS = 0x1 - public const int FF_EC_GUESS_MVS = 0x1; - /// FF_FDEBUG_TS = 0x0001 - public const int FF_FDEBUG_TS = 0x1; - /// FF_HLS_TS_OPTIONS = (LIBAVFORMAT_VERSION_MAJOR < 60) - public const bool FF_HLS_TS_OPTIONS = LIBAVFORMAT_VERSION_MAJOR < 0x3c; - /// FF_IDCT_ALTIVEC = 0x8 - public const int FF_IDCT_ALTIVEC = 0x8; - /// FF_IDCT_ARM = 0x7 - public const int FF_IDCT_ARM = 0x7; - /// FF_IDCT_AUTO = 0x0 - public const int FF_IDCT_AUTO = 0x0; - /// FF_IDCT_FAAN = 0x14 - public const int FF_IDCT_FAAN = 0x14; - /// FF_IDCT_INT = 0x1 - public const int FF_IDCT_INT = 0x1; - /// FF_IDCT_NONE = 0x18 - public const int FF_IDCT_NONE = 0x18; - /// FF_IDCT_SIMPLE = 0x2 - public const int FF_IDCT_SIMPLE = 0x2; - /// FF_IDCT_SIMPLEARM = 0xa - public const int FF_IDCT_SIMPLEARM = 0xa; - /// FF_IDCT_SIMPLEARMV5TE = 0x10 - public const int FF_IDCT_SIMPLEARMV5TE = 0x10; - /// FF_IDCT_SIMPLEARMV6 = 0x11 - public const int FF_IDCT_SIMPLEARMV6 = 0x11; - /// FF_IDCT_SIMPLEAUTO = 0x80 - public const int FF_IDCT_SIMPLEAUTO = 0x80; - /// FF_IDCT_SIMPLEMMX = 0x3 - public const int FF_IDCT_SIMPLEMMX = 0x3; - /// FF_IDCT_SIMPLENEON = 0x16 - public const int FF_IDCT_SIMPLENEON = 0x16; - /// FF_IDCT_XVID = 0xe - public const int FF_IDCT_XVID = 0xe; - /// FF_LAMBDA_MAX = (256*128-1) - public const int FF_LAMBDA_MAX = 0x100 * 0x80 - 0x1; - /// FF_LAMBDA_SCALE = (1<<FF_LAMBDA_SHIFT) - public const int FF_LAMBDA_SCALE = 0x1 << FF_LAMBDA_SHIFT; - /// FF_LAMBDA_SHIFT = 7 - public const int FF_LAMBDA_SHIFT = 0x7; - /// FF_LEVEL_UNKNOWN = -0x63 - public const int FF_LEVEL_UNKNOWN = -0x63; - /// FF_LOSS_ALPHA = 0x8 - public const int FF_LOSS_ALPHA = 0x8; - /// FF_LOSS_CHROMA = 0x20 - public const int FF_LOSS_CHROMA = 0x20; - /// FF_LOSS_COLORQUANT = 0x10 - public const int FF_LOSS_COLORQUANT = 0x10; - /// FF_LOSS_COLORSPACE = 0x4 - public const int FF_LOSS_COLORSPACE = 0x4; - /// FF_LOSS_DEPTH = 0x2 - public const int FF_LOSS_DEPTH = 0x2; - /// FF_LOSS_RESOLUTION = 0x1 - public const int FF_LOSS_RESOLUTION = 0x1; - /// FF_MB_DECISION_BITS = 0x1 - public const int FF_MB_DECISION_BITS = 0x1; - /// FF_MB_DECISION_RD = 0x2 - public const int FF_MB_DECISION_RD = 0x2; - /// FF_MB_DECISION_SIMPLE = 0x0 - public const int FF_MB_DECISION_SIMPLE = 0x0; - /// FF_PROFILE_AAC_ELD = 0x26 - public const int FF_PROFILE_AAC_ELD = 0x26; - /// FF_PROFILE_AAC_HE = 0x4 - public const int FF_PROFILE_AAC_HE = 0x4; - /// FF_PROFILE_AAC_HE_V2 = 0x1c - public const int FF_PROFILE_AAC_HE_V2 = 0x1c; - /// FF_PROFILE_AAC_LD = 0x16 - public const int FF_PROFILE_AAC_LD = 0x16; - /// FF_PROFILE_AAC_LOW = 0x1 - public const int FF_PROFILE_AAC_LOW = 0x1; - /// FF_PROFILE_AAC_LTP = 0x3 - public const int FF_PROFILE_AAC_LTP = 0x3; - /// FF_PROFILE_AAC_MAIN = 0x0 - public const int FF_PROFILE_AAC_MAIN = 0x0; - /// FF_PROFILE_AAC_SSR = 0x2 - public const int FF_PROFILE_AAC_SSR = 0x2; - /// FF_PROFILE_ARIB_PROFILE_A = 0x0 - public const int FF_PROFILE_ARIB_PROFILE_A = 0x0; - /// FF_PROFILE_ARIB_PROFILE_C = 0x1 - public const int FF_PROFILE_ARIB_PROFILE_C = 0x1; - /// FF_PROFILE_AV1_HIGH = 0x1 - public const int FF_PROFILE_AV1_HIGH = 0x1; - /// FF_PROFILE_AV1_MAIN = 0x0 - public const int FF_PROFILE_AV1_MAIN = 0x0; - /// FF_PROFILE_AV1_PROFESSIONAL = 0x2 - public const int FF_PROFILE_AV1_PROFESSIONAL = 0x2; - /// FF_PROFILE_DNXHD = 0x0 - public const int FF_PROFILE_DNXHD = 0x0; - /// FF_PROFILE_DNXHR_444 = 0x5 - public const int FF_PROFILE_DNXHR_444 = 0x5; - /// FF_PROFILE_DNXHR_HQ = 0x3 - public const int FF_PROFILE_DNXHR_HQ = 0x3; - /// FF_PROFILE_DNXHR_HQX = 0x4 - public const int FF_PROFILE_DNXHR_HQX = 0x4; - /// FF_PROFILE_DNXHR_LB = 0x1 - public const int FF_PROFILE_DNXHR_LB = 0x1; - /// FF_PROFILE_DNXHR_SQ = 0x2 - public const int FF_PROFILE_DNXHR_SQ = 0x2; - /// FF_PROFILE_DTS = 0x14 - public const int FF_PROFILE_DTS = 0x14; - /// FF_PROFILE_DTS_96_24 = 0x28 - public const int FF_PROFILE_DTS_96_24 = 0x28; - /// FF_PROFILE_DTS_ES = 0x1e - public const int FF_PROFILE_DTS_ES = 0x1e; - /// FF_PROFILE_DTS_EXPRESS = 0x46 - public const int FF_PROFILE_DTS_EXPRESS = 0x46; - /// FF_PROFILE_DTS_HD_HRA = 0x32 - public const int FF_PROFILE_DTS_HD_HRA = 0x32; - /// FF_PROFILE_DTS_HD_MA = 0x3c - public const int FF_PROFILE_DTS_HD_MA = 0x3c; - /// FF_PROFILE_H264_BASELINE = 0x42 - public const int FF_PROFILE_H264_BASELINE = 0x42; - /// FF_PROFILE_H264_CAVLC_444 = 0x2c - public const int FF_PROFILE_H264_CAVLC_444 = 0x2c; - /// FF_PROFILE_H264_CONSTRAINED = 0x1 << 0x9 - public const int FF_PROFILE_H264_CONSTRAINED = 0x1 << 0x9; - /// FF_PROFILE_H264_CONSTRAINED_BASELINE = 0x42 | FF_PROFILE_H264_CONSTRAINED - public const int FF_PROFILE_H264_CONSTRAINED_BASELINE = 0x42 | FF_PROFILE_H264_CONSTRAINED; - /// FF_PROFILE_H264_EXTENDED = 0x58 - public const int FF_PROFILE_H264_EXTENDED = 0x58; - /// FF_PROFILE_H264_HIGH = 0x64 - public const int FF_PROFILE_H264_HIGH = 0x64; - /// FF_PROFILE_H264_HIGH_10 = 0x6e - public const int FF_PROFILE_H264_HIGH_10 = 0x6e; - /// FF_PROFILE_H264_HIGH_10_INTRA = 0x6e | FF_PROFILE_H264_INTRA - public const int FF_PROFILE_H264_HIGH_10_INTRA = 0x6e | FF_PROFILE_H264_INTRA; - /// FF_PROFILE_H264_HIGH_422 = 0x7a - public const int FF_PROFILE_H264_HIGH_422 = 0x7a; - /// FF_PROFILE_H264_HIGH_422_INTRA = 0x7a | FF_PROFILE_H264_INTRA - public const int FF_PROFILE_H264_HIGH_422_INTRA = 0x7a | FF_PROFILE_H264_INTRA; - /// FF_PROFILE_H264_HIGH_444 = 0x90 - public const int FF_PROFILE_H264_HIGH_444 = 0x90; - /// FF_PROFILE_H264_HIGH_444_INTRA = 0xf4 | FF_PROFILE_H264_INTRA - public const int FF_PROFILE_H264_HIGH_444_INTRA = 0xf4 | FF_PROFILE_H264_INTRA; - /// FF_PROFILE_H264_HIGH_444_PREDICTIVE = 0xf4 - public const int FF_PROFILE_H264_HIGH_444_PREDICTIVE = 0xf4; - /// FF_PROFILE_H264_INTRA = 0x1 << 0xb - public const int FF_PROFILE_H264_INTRA = 0x1 << 0xb; - /// FF_PROFILE_H264_MAIN = 0x4d - public const int FF_PROFILE_H264_MAIN = 0x4d; - /// FF_PROFILE_H264_MULTIVIEW_HIGH = 0x76 - public const int FF_PROFILE_H264_MULTIVIEW_HIGH = 0x76; - /// FF_PROFILE_H264_STEREO_HIGH = 0x80 - public const int FF_PROFILE_H264_STEREO_HIGH = 0x80; - /// FF_PROFILE_HEVC_MAIN = 0x1 - public const int FF_PROFILE_HEVC_MAIN = 0x1; - /// FF_PROFILE_HEVC_MAIN_10 = 0x2 - public const int FF_PROFILE_HEVC_MAIN_10 = 0x2; - /// FF_PROFILE_HEVC_MAIN_STILL_PICTURE = 0x3 - public const int FF_PROFILE_HEVC_MAIN_STILL_PICTURE = 0x3; - /// FF_PROFILE_HEVC_REXT = 0x4 - public const int FF_PROFILE_HEVC_REXT = 0x4; - /// FF_PROFILE_JPEG2000_CSTREAM_NO_RESTRICTION = 0x8000 - public const int FF_PROFILE_JPEG2000_CSTREAM_NO_RESTRICTION = 0x8000; - /// FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_0 = 0x1 - public const int FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_0 = 0x1; - /// FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_1 = 0x2 - public const int FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_1 = 0x2; - /// FF_PROFILE_JPEG2000_DCINEMA_2K = 0x3 - public const int FF_PROFILE_JPEG2000_DCINEMA_2K = 0x3; - /// FF_PROFILE_JPEG2000_DCINEMA_4K = 0x4 - public const int FF_PROFILE_JPEG2000_DCINEMA_4K = 0x4; - /// FF_PROFILE_KLVA_ASYNC = 0x1 - public const int FF_PROFILE_KLVA_ASYNC = 0x1; - /// FF_PROFILE_KLVA_SYNC = 0x0 - public const int FF_PROFILE_KLVA_SYNC = 0x0; - /// FF_PROFILE_MJPEG_HUFFMAN_BASELINE_DCT = 0xc0 - public const int FF_PROFILE_MJPEG_HUFFMAN_BASELINE_DCT = 0xc0; - /// FF_PROFILE_MJPEG_HUFFMAN_EXTENDED_SEQUENTIAL_DCT = 0xc1 - public const int FF_PROFILE_MJPEG_HUFFMAN_EXTENDED_SEQUENTIAL_DCT = 0xc1; - /// FF_PROFILE_MJPEG_HUFFMAN_LOSSLESS = 0xc3 - public const int FF_PROFILE_MJPEG_HUFFMAN_LOSSLESS = 0xc3; - /// FF_PROFILE_MJPEG_HUFFMAN_PROGRESSIVE_DCT = 0xc2 - public const int FF_PROFILE_MJPEG_HUFFMAN_PROGRESSIVE_DCT = 0xc2; - /// FF_PROFILE_MJPEG_JPEG_LS = 0xf7 - public const int FF_PROFILE_MJPEG_JPEG_LS = 0xf7; - /// FF_PROFILE_MPEG2_422 = 0x0 - public const int FF_PROFILE_MPEG2_422 = 0x0; - /// FF_PROFILE_MPEG2_AAC_HE = 0x83 - public const int FF_PROFILE_MPEG2_AAC_HE = 0x83; - /// FF_PROFILE_MPEG2_AAC_LOW = 0x80 - public const int FF_PROFILE_MPEG2_AAC_LOW = 0x80; - /// FF_PROFILE_MPEG2_HIGH = 0x1 - public const int FF_PROFILE_MPEG2_HIGH = 0x1; - /// FF_PROFILE_MPEG2_MAIN = 0x4 - public const int FF_PROFILE_MPEG2_MAIN = 0x4; - /// FF_PROFILE_MPEG2_SIMPLE = 0x5 - public const int FF_PROFILE_MPEG2_SIMPLE = 0x5; - /// FF_PROFILE_MPEG2_SNR_SCALABLE = 0x3 - public const int FF_PROFILE_MPEG2_SNR_SCALABLE = 0x3; - /// FF_PROFILE_MPEG2_SS = 0x2 - public const int FF_PROFILE_MPEG2_SS = 0x2; - /// FF_PROFILE_MPEG4_ADVANCED_CODING = 0xb - public const int FF_PROFILE_MPEG4_ADVANCED_CODING = 0xb; - /// FF_PROFILE_MPEG4_ADVANCED_CORE = 0xc - public const int FF_PROFILE_MPEG4_ADVANCED_CORE = 0xc; - /// FF_PROFILE_MPEG4_ADVANCED_REAL_TIME = 0x9 - public const int FF_PROFILE_MPEG4_ADVANCED_REAL_TIME = 0x9; - /// FF_PROFILE_MPEG4_ADVANCED_SCALABLE_TEXTURE = 0xd - public const int FF_PROFILE_MPEG4_ADVANCED_SCALABLE_TEXTURE = 0xd; - /// FF_PROFILE_MPEG4_ADVANCED_SIMPLE = 0xf - public const int FF_PROFILE_MPEG4_ADVANCED_SIMPLE = 0xf; - /// FF_PROFILE_MPEG4_BASIC_ANIMATED_TEXTURE = 0x7 - public const int FF_PROFILE_MPEG4_BASIC_ANIMATED_TEXTURE = 0x7; - /// FF_PROFILE_MPEG4_CORE = 0x2 - public const int FF_PROFILE_MPEG4_CORE = 0x2; - /// FF_PROFILE_MPEG4_CORE_SCALABLE = 0xa - public const int FF_PROFILE_MPEG4_CORE_SCALABLE = 0xa; - /// FF_PROFILE_MPEG4_HYBRID = 0x8 - public const int FF_PROFILE_MPEG4_HYBRID = 0x8; - /// FF_PROFILE_MPEG4_MAIN = 0x3 - public const int FF_PROFILE_MPEG4_MAIN = 0x3; - /// FF_PROFILE_MPEG4_N_BIT = 0x4 - public const int FF_PROFILE_MPEG4_N_BIT = 0x4; - /// FF_PROFILE_MPEG4_SCALABLE_TEXTURE = 0x5 - public const int FF_PROFILE_MPEG4_SCALABLE_TEXTURE = 0x5; - /// FF_PROFILE_MPEG4_SIMPLE = 0x0 - public const int FF_PROFILE_MPEG4_SIMPLE = 0x0; - /// FF_PROFILE_MPEG4_SIMPLE_FACE_ANIMATION = 0x6 - public const int FF_PROFILE_MPEG4_SIMPLE_FACE_ANIMATION = 0x6; - /// FF_PROFILE_MPEG4_SIMPLE_SCALABLE = 0x1 - public const int FF_PROFILE_MPEG4_SIMPLE_SCALABLE = 0x1; - /// FF_PROFILE_MPEG4_SIMPLE_STUDIO = 0xe - public const int FF_PROFILE_MPEG4_SIMPLE_STUDIO = 0xe; - /// FF_PROFILE_PRORES_4444 = 0x4 - public const int FF_PROFILE_PRORES_4444 = 0x4; - /// FF_PROFILE_PRORES_HQ = 0x3 - public const int FF_PROFILE_PRORES_HQ = 0x3; - /// FF_PROFILE_PRORES_LT = 0x1 - public const int FF_PROFILE_PRORES_LT = 0x1; - /// FF_PROFILE_PRORES_PROXY = 0x0 - public const int FF_PROFILE_PRORES_PROXY = 0x0; - /// FF_PROFILE_PRORES_STANDARD = 0x2 - public const int FF_PROFILE_PRORES_STANDARD = 0x2; - /// FF_PROFILE_PRORES_XQ = 0x5 - public const int FF_PROFILE_PRORES_XQ = 0x5; - /// FF_PROFILE_RESERVED = -0x64 - public const int FF_PROFILE_RESERVED = -0x64; - /// FF_PROFILE_SBC_MSBC = 0x1 - public const int FF_PROFILE_SBC_MSBC = 0x1; - /// FF_PROFILE_UNKNOWN = -0x63 - public const int FF_PROFILE_UNKNOWN = -0x63; - /// FF_PROFILE_VC1_ADVANCED = 0x3 - public const int FF_PROFILE_VC1_ADVANCED = 0x3; - /// FF_PROFILE_VC1_COMPLEX = 0x2 - public const int FF_PROFILE_VC1_COMPLEX = 0x2; - /// FF_PROFILE_VC1_MAIN = 0x1 - public const int FF_PROFILE_VC1_MAIN = 0x1; - /// FF_PROFILE_VC1_SIMPLE = 0x0 - public const int FF_PROFILE_VC1_SIMPLE = 0x0; - /// FF_PROFILE_VP9_0 = 0x0 - public const int FF_PROFILE_VP9_0 = 0x0; - /// FF_PROFILE_VP9_1 = 0x1 - public const int FF_PROFILE_VP9_1 = 0x1; - /// FF_PROFILE_VP9_2 = 0x2 - public const int FF_PROFILE_VP9_2 = 0x2; - /// FF_PROFILE_VP9_3 = 0x3 - public const int FF_PROFILE_VP9_3 = 0x3; - /// FF_PROFILE_VVC_MAIN_10 = 0x1 - public const int FF_PROFILE_VVC_MAIN_10 = 0x1; - /// FF_PROFILE_VVC_MAIN_10_444 = 0x21 - public const int FF_PROFILE_VVC_MAIN_10_444 = 0x21; - /// FF_QP2LAMBDA = 118 - public const int FF_QP2LAMBDA = 0x76; - /// FF_QUALITY_SCALE = FF_LAMBDA_SCALE - public const int FF_QUALITY_SCALE = FF_LAMBDA_SCALE; - /// FF_SUB_CHARENC_MODE_AUTOMATIC = 0x0 - public const int FF_SUB_CHARENC_MODE_AUTOMATIC = 0x0; - /// FF_SUB_CHARENC_MODE_DO_NOTHING = -0x1 - public const int FF_SUB_CHARENC_MODE_DO_NOTHING = -0x1; - /// FF_SUB_CHARENC_MODE_IGNORE = 0x2 - public const int FF_SUB_CHARENC_MODE_IGNORE = 0x2; - /// FF_SUB_CHARENC_MODE_PRE_DECODER = 0x1 - public const int FF_SUB_CHARENC_MODE_PRE_DECODER = 0x1; - /// FF_SUB_TEXT_FMT_ASS = 0x0 - public const int FF_SUB_TEXT_FMT_ASS = 0x0; - /// FF_THREAD_FRAME = 0x1 - public const int FF_THREAD_FRAME = 0x1; - /// FF_THREAD_SLICE = 0x2 - public const int FF_THREAD_SLICE = 0x2; - // public static FFABS = (a) ((a) >= 0 ? (a) : (-(a))); - // public static FFABS64U = (a) ((a) <= 0 ? -(uint64_t)(a) : (uint64_t)(a)); - // public static FFABSU = (a) ((a) <= 0 ? -(unsigned)(a) : (unsigned)(a)); - // public static FFALIGN = x; - // public static FFDIFFSIGN = x; - // public static FFERRTAG = a; - // public static FFMAX = (a,b) ((a) > (b) ? (a) : (b)); - // public static FFMAX3 = a; - // public static FFMIN = (a,b) ((a) > (b) ? (b) : (a)); - // public static FFMIN3 = a; - // public static FFNABS = (a) ((a) <= 0 ? (a) : (-(a))); - // public static FFSIGN = (a) ((a) > 0 ? 1 : -1); - // public static FFSWAP = (type,a,b) do{type SWAP_tmp= b; b= a; a= SWAP_tmp;}while(0); - // public static FFUDIV = (a,b) (((a)>0 ?(a):(a)-(b)+1) / (b)); - // public static FFUMOD = a; - // public static GET_UTF16 = (val, GET_16BIT, ERROR)val = (GET_16BIT);{unsigned int hi = val - 0xD800;if (hi < 0x800) {val = (GET_16BIT) - 0xDC00;if (val > 0x3FFU || hi > 0x3FFU){ERROR}val += (hi<<10) + 0x10000;}}; - // public static GET_UTF8 = (val, GET_BYTE, ERROR)val= (GET_BYTE);{uint32_t top = (val & 128) >> 1;if ((val & 0xc0) == 0x80 || val >= 0xFE){ERROR}while (val & top) {unsigned int tmp = (GET_BYTE) - 128;if(tmp>>6){ERROR}val= (val<<6) + tmp;top <<= 5;}val &= (top << 1) - 1;}; - /// LIBAVCODEC_BUILD = LIBAVCODEC_VERSION_INT - public static readonly int LIBAVCODEC_BUILD = LIBAVCODEC_VERSION_INT; - /// LIBAVCODEC_IDENT = "Lavc" - public const string LIBAVCODEC_IDENT = "Lavc"; - /// LIBAVCODEC_VERSION = AV_VERSION(LIBAVCODEC_VERSION_MAJOR, LIBAVCODEC_VERSION_MINOR, LIBAVCODEC_VERSION_MICRO) - public static readonly string LIBAVCODEC_VERSION = AV_VERSION(LIBAVCODEC_VERSION_MAJOR, LIBAVCODEC_VERSION_MINOR, LIBAVCODEC_VERSION_MICRO); - /// LIBAVCODEC_VERSION_INT = AV_VERSION_INT(LIBAVCODEC_VERSION_MAJOR, LIBAVCODEC_VERSION_MINOR, LIBAVCODEC_VERSION_MICRO) - public static readonly int LIBAVCODEC_VERSION_INT = AV_VERSION_INT(LIBAVCODEC_VERSION_MAJOR, LIBAVCODEC_VERSION_MINOR, LIBAVCODEC_VERSION_MICRO); - /// LIBAVCODEC_VERSION_MAJOR = 59 - public const int LIBAVCODEC_VERSION_MAJOR = 0x3b; - /// LIBAVCODEC_VERSION_MICRO = 0x64 - public const int LIBAVCODEC_VERSION_MICRO = 0x64; - /// LIBAVCODEC_VERSION_MINOR = 0x25 - public const int LIBAVCODEC_VERSION_MINOR = 0x25; - /// LIBAVDEVICE_BUILD = LIBAVDEVICE_VERSION_INT - public static readonly int LIBAVDEVICE_BUILD = LIBAVDEVICE_VERSION_INT; - /// LIBAVDEVICE_IDENT = "Lavd" AV_STRINGIFY(LIBAVDEVICE_VERSION) - public const string LIBAVDEVICE_IDENT = "Lavd"; - /// LIBAVDEVICE_VERSION = AV_VERSION(LIBAVDEVICE_VERSION_MAJOR, LIBAVDEVICE_VERSION_MINOR, LIBAVDEVICE_VERSION_MICRO) - public static readonly string LIBAVDEVICE_VERSION = AV_VERSION(LIBAVDEVICE_VERSION_MAJOR, LIBAVDEVICE_VERSION_MINOR, LIBAVDEVICE_VERSION_MICRO); - /// LIBAVDEVICE_VERSION_INT = AV_VERSION_INT(LIBAVDEVICE_VERSION_MAJOR, LIBAVDEVICE_VERSION_MINOR, LIBAVDEVICE_VERSION_MICRO) - public static readonly int LIBAVDEVICE_VERSION_INT = AV_VERSION_INT(LIBAVDEVICE_VERSION_MAJOR, LIBAVDEVICE_VERSION_MINOR, LIBAVDEVICE_VERSION_MICRO); - /// LIBAVDEVICE_VERSION_MAJOR = 59 - public const int LIBAVDEVICE_VERSION_MAJOR = 0x3b; - /// LIBAVDEVICE_VERSION_MICRO = 100 - public const int LIBAVDEVICE_VERSION_MICRO = 0x64; - /// LIBAVDEVICE_VERSION_MINOR = 7 - public const int LIBAVDEVICE_VERSION_MINOR = 0x7; - /// LIBAVFILTER_BUILD = LIBAVFILTER_VERSION_INT - public static readonly int LIBAVFILTER_BUILD = LIBAVFILTER_VERSION_INT; - /// LIBAVFILTER_IDENT = "Lavfi" - public const string LIBAVFILTER_IDENT = "Lavfi"; - /// LIBAVFILTER_VERSION = AV_VERSION(LIBAVFILTER_VERSION_MAJOR, LIBAVFILTER_VERSION_MINOR, LIBAVFILTER_VERSION_MICRO) - public static readonly string LIBAVFILTER_VERSION = AV_VERSION(LIBAVFILTER_VERSION_MAJOR, LIBAVFILTER_VERSION_MINOR, LIBAVFILTER_VERSION_MICRO); - /// LIBAVFILTER_VERSION_INT = AV_VERSION_INT(LIBAVFILTER_VERSION_MAJOR, LIBAVFILTER_VERSION_MINOR, LIBAVFILTER_VERSION_MICRO) - public static readonly int LIBAVFILTER_VERSION_INT = AV_VERSION_INT(LIBAVFILTER_VERSION_MAJOR, LIBAVFILTER_VERSION_MINOR, LIBAVFILTER_VERSION_MICRO); - /// LIBAVFILTER_VERSION_MAJOR = 0x8 - public const int LIBAVFILTER_VERSION_MAJOR = 0x8; - /// LIBAVFILTER_VERSION_MICRO = 0x64 - public const int LIBAVFILTER_VERSION_MICRO = 0x64; - /// LIBAVFILTER_VERSION_MINOR = 0x2c - public const int LIBAVFILTER_VERSION_MINOR = 0x2c; - /// LIBAVFORMAT_BUILD = LIBAVFORMAT_VERSION_INT - public static readonly int LIBAVFORMAT_BUILD = LIBAVFORMAT_VERSION_INT; - /// LIBAVFORMAT_IDENT = "Lavf" AV_STRINGIFY(LIBAVFORMAT_VERSION) - public const string LIBAVFORMAT_IDENT = "Lavf"; - /// LIBAVFORMAT_VERSION = AV_VERSION(LIBAVFORMAT_VERSION_MAJOR, LIBAVFORMAT_VERSION_MINOR, LIBAVFORMAT_VERSION_MICRO) - public static readonly string LIBAVFORMAT_VERSION = AV_VERSION(LIBAVFORMAT_VERSION_MAJOR, LIBAVFORMAT_VERSION_MINOR, LIBAVFORMAT_VERSION_MICRO); - /// LIBAVFORMAT_VERSION_INT = AV_VERSION_INT(LIBAVFORMAT_VERSION_MAJOR, LIBAVFORMAT_VERSION_MINOR, LIBAVFORMAT_VERSION_MICRO) - public static readonly int LIBAVFORMAT_VERSION_INT = AV_VERSION_INT(LIBAVFORMAT_VERSION_MAJOR, LIBAVFORMAT_VERSION_MINOR, LIBAVFORMAT_VERSION_MICRO); - /// LIBAVFORMAT_VERSION_MAJOR = 59 - public const int LIBAVFORMAT_VERSION_MAJOR = 0x3b; - /// LIBAVFORMAT_VERSION_MICRO = 100 - public const int LIBAVFORMAT_VERSION_MICRO = 0x64; - /// LIBAVFORMAT_VERSION_MINOR = 27 - public const int LIBAVFORMAT_VERSION_MINOR = 0x1b; - /// LIBAVUTIL_BUILD = LIBAVUTIL_VERSION_INT - public static readonly int LIBAVUTIL_BUILD = LIBAVUTIL_VERSION_INT; - /// LIBAVUTIL_IDENT = "Lavu" AV_STRINGIFY(LIBAVUTIL_VERSION) - public const string LIBAVUTIL_IDENT = "Lavu"; - /// LIBAVUTIL_VERSION = AV_VERSION(LIBAVUTIL_VERSION_MAJOR, LIBAVUTIL_VERSION_MINOR, LIBAVUTIL_VERSION_MICRO) - public static readonly string LIBAVUTIL_VERSION = AV_VERSION(LIBAVUTIL_VERSION_MAJOR, LIBAVUTIL_VERSION_MINOR, LIBAVUTIL_VERSION_MICRO); - /// LIBAVUTIL_VERSION_INT = AV_VERSION_INT(LIBAVUTIL_VERSION_MAJOR, LIBAVUTIL_VERSION_MINOR, LIBAVUTIL_VERSION_MICRO) - public static readonly int LIBAVUTIL_VERSION_INT = AV_VERSION_INT(LIBAVUTIL_VERSION_MAJOR, LIBAVUTIL_VERSION_MINOR, LIBAVUTIL_VERSION_MICRO); - /// LIBAVUTIL_VERSION_MAJOR = 57 - public const int LIBAVUTIL_VERSION_MAJOR = 0x39; - /// LIBAVUTIL_VERSION_MICRO = 100 - public const int LIBAVUTIL_VERSION_MICRO = 0x64; - /// LIBAVUTIL_VERSION_MINOR = 28 - public const int LIBAVUTIL_VERSION_MINOR = 0x1c; - /// LIBPOSTPROC_BUILD = LIBPOSTPROC_VERSION_INT - public static readonly int LIBPOSTPROC_BUILD = LIBPOSTPROC_VERSION_INT; - /// LIBPOSTPROC_IDENT = "postproc" - public const string LIBPOSTPROC_IDENT = "postproc"; - /// LIBPOSTPROC_VERSION = AV_VERSION(LIBPOSTPROC_VERSION_MAJOR, LIBPOSTPROC_VERSION_MINOR, LIBPOSTPROC_VERSION_MICRO) - public static readonly string LIBPOSTPROC_VERSION = AV_VERSION(LIBPOSTPROC_VERSION_MAJOR, LIBPOSTPROC_VERSION_MINOR, LIBPOSTPROC_VERSION_MICRO); - /// LIBPOSTPROC_VERSION_INT = AV_VERSION_INT(LIBPOSTPROC_VERSION_MAJOR, LIBPOSTPROC_VERSION_MINOR, LIBPOSTPROC_VERSION_MICRO) - public static readonly int LIBPOSTPROC_VERSION_INT = AV_VERSION_INT(LIBPOSTPROC_VERSION_MAJOR, LIBPOSTPROC_VERSION_MINOR, LIBPOSTPROC_VERSION_MICRO); - /// LIBPOSTPROC_VERSION_MAJOR = 0x38 - public const int LIBPOSTPROC_VERSION_MAJOR = 0x38; - /// LIBPOSTPROC_VERSION_MICRO = 0x64 - public const int LIBPOSTPROC_VERSION_MICRO = 0x64; - /// LIBPOSTPROC_VERSION_MINOR = 0x6 - public const int LIBPOSTPROC_VERSION_MINOR = 0x6; - /// LIBSWRESAMPLE_BUILD = LIBSWRESAMPLE_VERSION_INT - public static readonly int LIBSWRESAMPLE_BUILD = LIBSWRESAMPLE_VERSION_INT; - /// LIBSWRESAMPLE_IDENT = "SwR" - public const string LIBSWRESAMPLE_IDENT = "SwR"; - /// LIBSWRESAMPLE_VERSION = AV_VERSION(LIBSWRESAMPLE_VERSION_MAJOR, LIBSWRESAMPLE_VERSION_MINOR, LIBSWRESAMPLE_VERSION_MICRO) - public static readonly string LIBSWRESAMPLE_VERSION = AV_VERSION(LIBSWRESAMPLE_VERSION_MAJOR, LIBSWRESAMPLE_VERSION_MINOR, LIBSWRESAMPLE_VERSION_MICRO); - /// LIBSWRESAMPLE_VERSION_INT = AV_VERSION_INT(LIBSWRESAMPLE_VERSION_MAJOR, LIBSWRESAMPLE_VERSION_MINOR, LIBSWRESAMPLE_VERSION_MICRO) - public static readonly int LIBSWRESAMPLE_VERSION_INT = AV_VERSION_INT(LIBSWRESAMPLE_VERSION_MAJOR, LIBSWRESAMPLE_VERSION_MINOR, LIBSWRESAMPLE_VERSION_MICRO); - /// LIBSWRESAMPLE_VERSION_MAJOR = 0x4 - public const int LIBSWRESAMPLE_VERSION_MAJOR = 0x4; - /// LIBSWRESAMPLE_VERSION_MICRO = 0x64 - public const int LIBSWRESAMPLE_VERSION_MICRO = 0x64; - /// LIBSWRESAMPLE_VERSION_MINOR = 0x7 - public const int LIBSWRESAMPLE_VERSION_MINOR = 0x7; - /// LIBSWSCALE_BUILD = LIBSWSCALE_VERSION_INT - public static readonly int LIBSWSCALE_BUILD = LIBSWSCALE_VERSION_INT; - /// LIBSWSCALE_IDENT = "SwS" - public const string LIBSWSCALE_IDENT = "SwS"; - /// LIBSWSCALE_VERSION = AV_VERSION(LIBSWSCALE_VERSION_MAJOR, LIBSWSCALE_VERSION_MINOR, LIBSWSCALE_VERSION_MICRO) - public static readonly string LIBSWSCALE_VERSION = AV_VERSION(LIBSWSCALE_VERSION_MAJOR, LIBSWSCALE_VERSION_MINOR, LIBSWSCALE_VERSION_MICRO); - /// LIBSWSCALE_VERSION_INT = AV_VERSION_INT(LIBSWSCALE_VERSION_MAJOR, LIBSWSCALE_VERSION_MINOR, LIBSWSCALE_VERSION_MICRO) - public static readonly int LIBSWSCALE_VERSION_INT = AV_VERSION_INT(LIBSWSCALE_VERSION_MAJOR, LIBSWSCALE_VERSION_MINOR, LIBSWSCALE_VERSION_MICRO); - /// LIBSWSCALE_VERSION_MAJOR = 0x6 - public const int LIBSWSCALE_VERSION_MAJOR = 0x6; - /// LIBSWSCALE_VERSION_MICRO = 0x64 - public const int LIBSWSCALE_VERSION_MICRO = 0x64; - /// LIBSWSCALE_VERSION_MINOR = 0x7 - public const int LIBSWSCALE_VERSION_MINOR = 0x7; - /// M_E = 2.7182818284590452354 - public const double M_E = 2.718281828459045D; - /// M_LN10 = 2.30258509299404568402 - public const double M_LN10 = 2.302585092994046D; - /// M_LN2 = 0.69314718055994530942 - public const double M_LN2 = 0.6931471805599453D; - /// M_LOG2_10 = 3.32192809488736234787 - public const double M_LOG2_10 = 3.321928094887362D; - /// M_PHI = 1.61803398874989484820 - public const double M_PHI = 1.618033988749895D; - /// M_PI = 3.14159265358979323846 - public const double M_PI = 3.141592653589793D; - /// M_PI_2 = 1.57079632679489661923 - public const double M_PI_2 = 1.5707963267948966D; - /// M_SQRT1_2 = 0.70710678118654752440 - public const double M_SQRT1_2 = 0.7071067811865476D; - /// M_SQRT2 = 1.41421356237309504880 - public const double M_SQRT2 = 1.4142135623730951D; - // public static MKBETAG = a; - // public static MKTAG = a; - /// PARSER_FLAG_COMPLETE_FRAMES = 0x1 - public const int PARSER_FLAG_COMPLETE_FRAMES = 0x1; - /// PARSER_FLAG_FETCHED_OFFSET = 0x4 - public const int PARSER_FLAG_FETCHED_OFFSET = 0x4; - /// PARSER_FLAG_ONCE = 0x2 - public const int PARSER_FLAG_ONCE = 0x2; - /// PARSER_FLAG_USE_CODEC_TS = 0x1000 - public const int PARSER_FLAG_USE_CODEC_TS = 0x1000; - /// PP_CPU_CAPS_3DNOW = 0x40000000 - public const int PP_CPU_CAPS_3DNOW = 0x40000000; - /// PP_CPU_CAPS_ALTIVEC = 0x10000000 - public const int PP_CPU_CAPS_ALTIVEC = 0x10000000; - /// PP_CPU_CAPS_AUTO = 0x80000 - public const int PP_CPU_CAPS_AUTO = 0x80000; - /// PP_CPU_CAPS_MMX = 0x80000000U - public const uint PP_CPU_CAPS_MMX = 0x80000000U; - /// PP_CPU_CAPS_MMX2 = 0x20000000 - public const int PP_CPU_CAPS_MMX2 = 0x20000000; - /// PP_FORMAT = 0x8 - public const int PP_FORMAT = 0x8; - /// PP_FORMAT_411 = 0x2 | PP_FORMAT - public const int PP_FORMAT_411 = 0x2 | PP_FORMAT; - /// PP_FORMAT_420 = 0x11 | PP_FORMAT - public const int PP_FORMAT_420 = 0x11 | PP_FORMAT; - /// PP_FORMAT_422 = 0x1 | PP_FORMAT - public const int PP_FORMAT_422 = 0x1 | PP_FORMAT; - /// PP_FORMAT_440 = 0x10 | PP_FORMAT - public const int PP_FORMAT_440 = 0x10 | PP_FORMAT; - /// PP_FORMAT_444 = 0x0 | PP_FORMAT - public const int PP_FORMAT_444 = 0x0 | PP_FORMAT; - /// PP_PICT_TYPE_QP2 = 0x10 - public const int PP_PICT_TYPE_QP2 = 0x10; - /// PP_QUALITY_MAX = 0x6 - public const int PP_QUALITY_MAX = 0x6; - // public static PUT_UTF16 = (val, tmp, PUT_16BIT){uint32_t in = val;if (in < 0x10000) {tmp = in;PUT_16BIT} else {tmp = 0xD800 | ((in - 0x10000) >> 10);PUT_16BITtmp = 0xDC00 | ((in - 0x10000) & 0x3FF);PUT_16BIT}}; - // public static PUT_UTF8 = (val, tmp, PUT_BYTE){int bytes, shift;uint32_t in = val;if (in < 0x80) {tmp = in;PUT_BYTE} else {bytes = (av_log2(in) + 4) / 5;shift = (bytes - 1) * 6;tmp = (256 - (256 >> bytes)) | (in >> shift);PUT_BYTEwhile (shift >= 6) {shift -= 6;tmp = 0x80 | ((in >> shift) & 0x3f);PUT_BYTE}}}; - // public static ROUNDED_DIV = (a,b) (((a)>=0 ? (a) + ((b)>>1) : (a) - ((b)>>1))/(b)); - // public static RSHIFT = (a,b) ((a) > 0 ? ((a) + ((1<<(b))>>1))>>(b) : ((a) + ((1<<(b))>>1)-1)>>(b)); - /// SLICE_FLAG_ALLOW_FIELD = 0x2 - public const int SLICE_FLAG_ALLOW_FIELD = 0x2; - /// SLICE_FLAG_ALLOW_PLANE = 0x4 - public const int SLICE_FLAG_ALLOW_PLANE = 0x4; - /// SLICE_FLAG_CODED_ORDER = 0x1 - public const int SLICE_FLAG_CODED_ORDER = 0x1; - /// SWR_FLAG_RESAMPLE = 0x1 - public const int SWR_FLAG_RESAMPLE = 0x1; - /// SWS_ACCURATE_RND = 0x40000 - public const int SWS_ACCURATE_RND = 0x40000; - /// SWS_AREA = 0x20 - public const int SWS_AREA = 0x20; - /// SWS_BICUBIC = 0x4 - public const int SWS_BICUBIC = 0x4; - /// SWS_BICUBLIN = 0x40 - public const int SWS_BICUBLIN = 0x40; - /// SWS_BILINEAR = 0x2 - public const int SWS_BILINEAR = 0x2; - /// SWS_BITEXACT = 0x80000 - public const int SWS_BITEXACT = 0x80000; - /// SWS_CS_BT2020 = 0x9 - public const int SWS_CS_BT2020 = 0x9; - /// SWS_CS_DEFAULT = 0x5 - public const int SWS_CS_DEFAULT = 0x5; - /// SWS_CS_FCC = 0x4 - public const int SWS_CS_FCC = 0x4; - /// SWS_CS_ITU601 = 0x5 - public const int SWS_CS_ITU601 = 0x5; - /// SWS_CS_ITU624 = 0x5 - public const int SWS_CS_ITU624 = 0x5; - /// SWS_CS_ITU709 = 0x1 - public const int SWS_CS_ITU709 = 0x1; - /// SWS_CS_SMPTE170M = 0x5 - public const int SWS_CS_SMPTE170M = 0x5; - /// SWS_CS_SMPTE240M = 0x7 - public const int SWS_CS_SMPTE240M = 0x7; - /// SWS_DIRECT_BGR = 0x8000 - public const int SWS_DIRECT_BGR = 0x8000; - /// SWS_ERROR_DIFFUSION = 0x800000 - public const int SWS_ERROR_DIFFUSION = 0x800000; - /// SWS_FAST_BILINEAR = 0x1 - public const int SWS_FAST_BILINEAR = 0x1; - /// SWS_FULL_CHR_H_INP = 0x4000 - public const int SWS_FULL_CHR_H_INP = 0x4000; - /// SWS_FULL_CHR_H_INT = 0x2000 - public const int SWS_FULL_CHR_H_INT = 0x2000; - /// SWS_GAUSS = 0x80 - public const int SWS_GAUSS = 0x80; - /// SWS_LANCZOS = 0x200 - public const int SWS_LANCZOS = 0x200; - /// SWS_MAX_REDUCE_CUTOFF = 0.002D - public const double SWS_MAX_REDUCE_CUTOFF = 0.002D; - /// SWS_PARAM_DEFAULT = 0x1e240 - public const int SWS_PARAM_DEFAULT = 0x1e240; - /// SWS_POINT = 0x10 - public const int SWS_POINT = 0x10; - /// SWS_PRINT_INFO = 0x1000 - public const int SWS_PRINT_INFO = 0x1000; - /// SWS_SINC = 0x100 - public const int SWS_SINC = 0x100; - /// SWS_SPLINE = 0x400 - public const int SWS_SPLINE = 0x400; - /// SWS_SRC_V_CHR_DROP_MASK = 0x30000 - public const int SWS_SRC_V_CHR_DROP_MASK = 0x30000; - /// SWS_SRC_V_CHR_DROP_SHIFT = 0x10 - public const int SWS_SRC_V_CHR_DROP_SHIFT = 0x10; - /// SWS_X = 0x8 - public const int SWS_X = 0x8; - } -} diff --git a/FFmpeg.AutoGen/FFmpeg.structs.g.cs b/FFmpeg.AutoGen/FFmpeg.structs.g.cs deleted file mode 100644 index ae978bd0..00000000 --- a/FFmpeg.AutoGen/FFmpeg.structs.g.cs +++ /dev/null @@ -1,2453 +0,0 @@ -using System; -using System.Runtime.InteropServices; - -namespace FFmpeg.AutoGen -{ - public unsafe partial struct _iobuf - { - public void* @_Placeholder; - } - - /// Rational number (pair of numerator and denominator). - public unsafe partial struct AVRational - { - /// Numerator - public int @num; - /// Denominator - public int @den; - } - - /// Describe the class of an AVClass context structure. That is an arbitrary struct of which the first field is a pointer to an AVClass struct (e.g. AVCodecContext, AVFormatContext etc.). - public unsafe partial struct AVClass - { - /// The name of the class; usually it is the same name as the context structure type to which the AVClass is associated. - public byte* @class_name; - /// A pointer to a function which returns the name of a context instance ctx associated with the class. - public AVClass_item_name_func @item_name; - /// a pointer to the first option specified in the class if any or NULL - public AVOption* @option; - /// LIBAVUTIL_VERSION with which this structure was created. This is used to allow fields to be added without requiring major version bumps everywhere. - public int @version; - /// Offset in the structure where log_level_offset is stored. 0 means there is no such variable - public int @log_level_offset_offset; - /// Offset in the structure where a pointer to the parent context for logging is stored. For example a decoder could pass its AVCodecContext to eval as such a parent context, which an av_log() implementation could then leverage to display the parent context. The offset can be NULL. - public int @parent_log_context_offset; - /// Category used for visualization (like color) This is only set if the category is equal for all objects using this class. available since version (51 << 16 | 56 << 8 | 100) - public AVClassCategory @category; - /// Callback to return the category. available since version (51 << 16 | 59 << 8 | 100) - public AVClass_get_category_func @get_category; - /// Callback to return the supported/allowed ranges. available since version (52.12) - public AVClass_query_ranges_func @query_ranges; - /// Return next AVOptions-enabled child or NULL - public AVClass_child_next_func @child_next; - /// Iterate over the AVClasses corresponding to potential AVOptions-enabled children. - public AVClass_child_class_iterate_func @child_class_iterate; - } - - /// AVOption - public unsafe partial struct AVOption - { - public byte* @name; - /// short English help text - public byte* @help; - /// The offset relative to the context structure where the option value is stored. It should be 0 for named constants. - public int @offset; - public AVOptionType @type; - public AVOption_default_val @default_val; - /// minimum valid value for the option - public double @min; - /// maximum valid value for the option - public double @max; - public int @flags; - /// The logical unit to which the option belongs. Non-constant options and corresponding named constants share the same unit. May be NULL. - public byte* @unit; - } - - /// List of AVOptionRange structs. - public unsafe partial struct AVOptionRanges - { - /// Array of option ranges. - public AVOptionRange** @range; - /// Number of ranges per component. - public int @nb_ranges; - /// Number of componentes. - public int @nb_components; - } - - /// An AVChannelCustom defines a single channel within a custom order layout - public unsafe partial struct AVChannelCustom - { - public AVChannel @id; - public byte_array16 @name; - public void* @opaque; - } - - /// An AVChannelLayout holds information about the channel layout of audio data. - public unsafe partial struct AVChannelLayout - { - /// Channel order used in this layout. This is a mandatory field. - public AVChannelOrder @order; - /// Number of channels in this layout. Mandatory field. - public int @nb_channels; - public AVChannelLayout_u @u; - /// For some private data of the user. - public void* @opaque; - } - - /// Details about which channels are present in this layout. For AV_CHANNEL_ORDER_UNSPEC, this field is undefined and must not be used. - [StructLayout(LayoutKind.Explicit)] - public unsafe partial struct AVChannelLayout_u - { - /// This member must be used for AV_CHANNEL_ORDER_NATIVE, and may be used for AV_CHANNEL_ORDER_AMBISONIC to signal non-diegetic channels. It is a bitmask, where the position of each set bit means that the AVChannel with the corresponding value is present. - [FieldOffset(0)] - public ulong @mask; - /// This member must be used when the channel order is AV_CHANNEL_ORDER_CUSTOM. It is a nb_channels-sized array, with each element signalling the presence of the AVChannel with the corresponding value in map[i].id. - [FieldOffset(0)] - public AVChannelCustom* @map; - } - - /// Structure to hold side data for an AVFrame. - public unsafe partial struct AVFrameSideData - { - public AVFrameSideDataType @type; - public byte* @data; - public ulong @size; - public AVDictionary* @metadata; - public AVBufferRef* @buf; - } - - /// A reference to a data buffer. - public unsafe partial struct AVBufferRef - { - public AVBuffer* @buffer; - /// The data buffer. It is considered writable if and only if this is the only reference to the buffer, in which case av_buffer_is_writable() returns 1. - public byte* @data; - /// Size of data in bytes. - public ulong @size; - } - - /// Structure describing a single Region Of Interest. - public unsafe partial struct AVRegionOfInterest - { - /// Must be set to the size of this data structure (that is, sizeof(AVRegionOfInterest)). - public uint @self_size; - /// Distance in pixels from the top edge of the frame to the top and bottom edges and from the left edge of the frame to the left and right edges of the rectangle defining this region of interest. - public int @top; - public int @bottom; - public int @left; - public int @right; - /// Quantisation offset. - public AVRational @qoffset; - } - - /// This structure describes decoded (raw) audio or video data. - public unsafe partial struct AVFrame - { - /// pointer to the picture/channel planes. This might be different from the first allocated byte. For video, it could even point to the end of the image data. - public byte_ptrArray8 @data; - /// For video, a positive or negative value, which is typically indicating the size in bytes of each picture line, but it can also be: - the negative byte size of lines for vertical flipping (with data[n] pointing to the end of the data - a positive or negative multiple of the byte size as for accessing even and odd fields of a frame (possibly flipped) - public int_array8 @linesize; - /// pointers to the data planes/channels. - public byte** @extended_data; - /// Video frames only. The coded dimensions (in pixels) of the video frame, i.e. the size of the rectangle that contains some well-defined values. - public int @width; - /// Video frames only. The coded dimensions (in pixels) of the video frame, i.e. the size of the rectangle that contains some well-defined values. - public int @height; - /// number of audio samples (per channel) described by this frame - public int @nb_samples; - /// format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames, enum AVSampleFormat for audio) - public int @format; - /// 1 -> keyframe, 0-> not - public int @key_frame; - /// Picture type of the frame. - public AVPictureType @pict_type; - /// Sample aspect ratio for the video frame, 0/1 if unknown/unspecified. - public AVRational @sample_aspect_ratio; - /// Presentation timestamp in time_base units (time when frame should be shown to user). - public long @pts; - /// DTS copied from the AVPacket that triggered returning this frame. (if frame threading isn't used) This is also the Presentation time of this AVFrame calculated from only AVPacket.dts values without pts values. - public long @pkt_dts; - /// Time base for the timestamps in this frame. In the future, this field may be set on frames output by decoders or filters, but its value will be by default ignored on input to encoders or filters. - public AVRational @time_base; - /// picture number in bitstream order - public int @coded_picture_number; - /// picture number in display order - public int @display_picture_number; - /// quality (between 1 (good) and FF_LAMBDA_MAX (bad)) - public int @quality; - /// for some private data of the user - public void* @opaque; - /// When decoding, this signals how much the picture must be delayed. extra_delay = repeat_pict / (2*fps) - public int @repeat_pict; - /// The content of the picture is interlaced. - public int @interlaced_frame; - /// If the content is interlaced, is top field displayed first. - public int @top_field_first; - /// Tell user application that palette has changed from previous frame. - public int @palette_has_changed; - /// reordered opaque 64 bits (generally an integer or a double precision float PTS but can be anything). The user sets AVCodecContext.reordered_opaque to represent the input at that time, the decoder reorders values as needed and sets AVFrame.reordered_opaque to exactly one of the values provided by the user through AVCodecContext.reordered_opaque - public long @reordered_opaque; - /// Sample rate of the audio data. - public int @sample_rate; - /// Channel layout of the audio data. - [Obsolete("use ch_layout instead")] - public ulong @channel_layout; - /// AVBuffer references backing the data for this frame. All the pointers in data and extended_data must point inside one of the buffers in buf or extended_buf. This array must be filled contiguously -- if buf[i] is non-NULL then buf[j] must also be non-NULL for all j < i. - public AVBufferRef_ptrArray8 @buf; - /// For planar audio which requires more than AV_NUM_DATA_POINTERS AVBufferRef pointers, this array will hold all the references which cannot fit into AVFrame.buf. - public AVBufferRef** @extended_buf; - /// Number of elements in extended_buf. - public int @nb_extended_buf; - public AVFrameSideData** @side_data; - public int @nb_side_data; - /// Frame flags, a combination of lavu_frame_flags - public int @flags; - /// MPEG vs JPEG YUV range. - encoding: Set by user - decoding: Set by libavcodec - public AVColorRange @color_range; - public AVColorPrimaries @color_primaries; - public AVColorTransferCharacteristic @color_trc; - /// YUV colorspace type. - encoding: Set by user - decoding: Set by libavcodec - public AVColorSpace @colorspace; - public AVChromaLocation @chroma_location; - /// frame timestamp estimated using various heuristics, in stream time base - encoding: unused - decoding: set by libavcodec, read by user. - public long @best_effort_timestamp; - /// reordered pos from the last AVPacket that has been input into the decoder - encoding: unused - decoding: Read by user. - public long @pkt_pos; - /// duration of the corresponding packet, expressed in AVStream->time_base units, 0 if unknown. - encoding: unused - decoding: Read by user. - public long @pkt_duration; - /// metadata. - encoding: Set by user. - decoding: Set by libavcodec. - public AVDictionary* @metadata; - /// decode error flags of the frame, set to a combination of FF_DECODE_ERROR_xxx flags if the decoder produced a frame, but there were errors during the decoding. - encoding: unused - decoding: set by libavcodec, read by user. - public int @decode_error_flags; - /// number of audio channels, only used for audio. - encoding: unused - decoding: Read by user. - [Obsolete("use ch_layout instead")] - public int @channels; - /// size of the corresponding packet containing the compressed frame. It is set to a negative value if unknown. - encoding: unused - decoding: set by libavcodec, read by user. - public int @pkt_size; - /// For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame. - public AVBufferRef* @hw_frames_ctx; - /// AVBufferRef for free use by the API user. FFmpeg will never check the contents of the buffer ref. FFmpeg calls av_buffer_unref() on it when the frame is unreferenced. av_frame_copy_props() calls create a new reference with av_buffer_ref() for the target frame's opaque_ref field. - public AVBufferRef* @opaque_ref; - /// cropping Video frames only. The number of pixels to discard from the the top/bottom/left/right border of the frame to obtain the sub-rectangle of the frame intended for presentation. @{ - public ulong @crop_top; - public ulong @crop_bottom; - public ulong @crop_left; - public ulong @crop_right; - /// AVBufferRef for internal use by a single libav* library. Must not be used to transfer data between libraries. Has to be NULL when ownership of the frame leaves the respective library. - public AVBufferRef* @private_ref; - /// Channel layout of the audio data. - public AVChannelLayout @ch_layout; - } - - public unsafe partial struct AVDictionaryEntry - { - public byte* @key; - public byte* @value; - } - - /// A single allowed range of values, or a single allowed value. - public unsafe partial struct AVOptionRange - { - public byte* @str; - /// Value range. For string ranges this represents the min/max length. For dimensions this represents the min/max pixel count or width/height in multi-component case. - public double @value_min; - /// Value range. For string ranges this represents the min/max length. For dimensions this represents the min/max pixel count or width/height in multi-component case. - public double @value_max; - /// Value's component range. For string this represents the unicode range for chars, 0-127 limits to ASCII. - public double @component_min; - /// Value's component range. For string this represents the unicode range for chars, 0-127 limits to ASCII. - public double @component_max; - /// Range flag. If set to 1 the struct encodes a range, if set to 0 a single value. - public int @is_range; - } - - /// the default value for scalar options - [StructLayout(LayoutKind.Explicit)] - public unsafe partial struct AVOption_default_val - { - [FieldOffset(0)] - public long @i64; - [FieldOffset(0)] - public double @dbl; - [FieldOffset(0)] - public byte* @str; - [FieldOffset(0)] - public AVRational @q; - } - - /// Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes of an image. It also stores the subsampling factors and number of components. - public unsafe partial struct AVPixFmtDescriptor - { - public byte* @name; - /// The number of components each pixel has, (1-4) - public byte @nb_components; - /// Amount to shift the luma width right to find the chroma width. For YV12 this is 1 for example. chroma_width = AV_CEIL_RSHIFT(luma_width, log2_chroma_w) The note above is needed to ensure rounding up. This value only refers to the chroma components. - public byte @log2_chroma_w; - /// Amount to shift the luma height right to find the chroma height. For YV12 this is 1 for example. chroma_height= AV_CEIL_RSHIFT(luma_height, log2_chroma_h) The note above is needed to ensure rounding up. This value only refers to the chroma components. - public byte @log2_chroma_h; - /// Combination of AV_PIX_FMT_FLAG_... flags. - public ulong @flags; - /// Parameters that describe how pixels are packed. If the format has 1 or 2 components, then luma is 0. If the format has 3 or 4 components: if the RGB flag is set then 0 is red, 1 is green and 2 is blue; otherwise 0 is luma, 1 is chroma-U and 2 is chroma-V. - public AVComponentDescriptor_array4 @comp; - /// Alternative comma-separated names. - public byte* @alias; - } - - public unsafe partial struct AVComponentDescriptor - { - /// Which of the 4 planes contains the component. - public int @plane; - /// Number of elements between 2 horizontally consecutive pixels. Elements are bits for bitstream formats, bytes otherwise. - public int @step; - /// Number of elements before the component of the first pixel. Elements are bits for bitstream formats, bytes otherwise. - public int @offset; - /// Number of least significant bits that must be shifted away to get the value. - public int @shift; - /// Number of bits in the component. - public int @depth; - } - - public unsafe partial struct AVTimecode - { - /// timecode frame start (first base frame number) - public int @start; - /// flags such as drop frame, +24 hours support, ... - public uint @flags; - /// frame rate in rational form - public AVRational @rate; - /// frame per second; must be consistent with the rate field - public uint @fps; - } - - /// This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e. state that is not tied to a concrete processing configuration. E.g., in an API that supports hardware-accelerated encoding and decoding, this struct will (if possible) wrap the state that is common to both encoding and decoding and from which specific instances of encoders or decoders can be derived. - public unsafe partial struct AVHWDeviceContext - { - /// A class for logging. Set by av_hwdevice_ctx_alloc(). - public AVClass* @av_class; - /// Private data used internally by libavutil. Must not be accessed in any way by the caller. - public AVHWDeviceInternal* @internal; - /// This field identifies the underlying API used for hardware access. - public AVHWDeviceType @type; - /// The format-specific data, allocated and freed by libavutil along with this context. - public void* @hwctx; - /// This field may be set by the caller before calling av_hwdevice_ctx_init(). - public AVHWDeviceContext_free_func @free; - /// Arbitrary user data, to be used e.g. by the free() callback. - public void* @user_opaque; - } - - /// This struct describes a set or pool of "hardware" frames (i.e. those with data not located in normal system memory). All the frames in the pool are assumed to be allocated in the same way and interchangeable. - public unsafe partial struct AVHWFramesContext - { - /// A class for logging. - public AVClass* @av_class; - /// Private data used internally by libavutil. Must not be accessed in any way by the caller. - public AVHWFramesInternal* @internal; - /// A reference to the parent AVHWDeviceContext. This reference is owned and managed by the enclosing AVHWFramesContext, but the caller may derive additional references from it. - public AVBufferRef* @device_ref; - /// The parent AVHWDeviceContext. This is simply a pointer to device_ref->data provided for convenience. - public AVHWDeviceContext* @device_ctx; - /// The format-specific data, allocated and freed automatically along with this context. - public void* @hwctx; - /// This field may be set by the caller before calling av_hwframe_ctx_init(). - public AVHWFramesContext_free_func @free; - /// Arbitrary user data, to be used e.g. by the free() callback. - public void* @user_opaque; - /// A pool from which the frames are allocated by av_hwframe_get_buffer(). This field may be set by the caller before calling av_hwframe_ctx_init(). The buffers returned by calling av_buffer_pool_get() on this pool must have the properties described in the documentation in the corresponding hw type's header (hwcontext_*.h). The pool will be freed strictly before this struct's free() callback is invoked. - public AVBufferPool* @pool; - /// Initial size of the frame pool. If a device type does not support dynamically resizing the pool, then this is also the maximum pool size. - public int @initial_pool_size; - /// The pixel format identifying the underlying HW surface type. - public AVPixelFormat @format; - /// The pixel format identifying the actual data layout of the hardware frames. - public AVPixelFormat @sw_format; - /// The allocated dimensions of the frames in this pool. - public int @width; - /// The allocated dimensions of the frames in this pool. - public int @height; - } - - /// This struct describes the constraints on hardware frames attached to a given device with a hardware-specific configuration. This is returned by av_hwdevice_get_hwframe_constraints() and must be freed by av_hwframe_constraints_free() after use. - public unsafe partial struct AVHWFramesConstraints - { - /// A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE. This member will always be filled. - public AVPixelFormat* @valid_hw_formats; - /// A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE. Can be NULL if this information is not known. - public AVPixelFormat* @valid_sw_formats; - /// The minimum size of frames in this hw_frames_ctx. (Zero if not known.) - public int @min_width; - public int @min_height; - /// The maximum size of frames in this hw_frames_ctx. (INT_MAX if not known / no limit.) - public int @max_width; - public int @max_height; - } - - /// This struct is allocated as AVHWDeviceContext.hwctx - public unsafe partial struct AVDXVA2DeviceContext - { - public IDirect3DDeviceManager9* @devmgr; - } - - public unsafe partial struct IDirect3DDeviceManager9 - { - public IDirect3DDeviceManager9Vtbl* @lpVtbl; - } - - public unsafe partial struct IDirect3DDeviceManager9Vtbl - { - public void* @QueryInterface; - public void* @AddRef; - public void* @Release; - public void* @ResetDevice; - public void* @OpenDeviceHandle; - public void* @CloseDeviceHandle; - public void* @TestDevice; - public void* @LockDevice; - public void* @UnlockDevice; - public void* @GetVideoService; - } - - /// This struct is allocated as AVHWFramesContext.hwctx - public unsafe partial struct AVDXVA2FramesContext - { - /// The surface type (e.g. DXVA2_VideoProcessorRenderTarget or DXVA2_VideoDecoderRenderTarget). Must be set by the caller. - public ulong @surface_type; - /// The surface pool. When an external pool is not provided by the caller, this will be managed (allocated and filled on init, freed on uninit) by libavutil. - public IDirect3DSurface9** @surfaces; - public int @nb_surfaces; - /// Certain drivers require the decoder to be destroyed before the surfaces. To allow internally managed pools to work properly in such cases, this field is provided. - public IDirectXVideoDecoder* @decoder_to_release; - } - - public unsafe partial struct IDirect3DSurface9 - { - public IDirect3DSurface9Vtbl* @lpVtbl; - } - - public unsafe partial struct IDirect3DSurface9Vtbl - { - public void* @QueryInterface; - public void* @AddRef; - public void* @Release; - public void* @GetDevice; - public void* @SetPrivateData; - public void* @GetPrivateData; - public void* @FreePrivateData; - public void* @SetPriority; - public void* @GetPriority; - public void* @PreLoad; - public void* @GetType; - public void* @GetContainer; - public void* @GetDesc; - public void* @LockRect; - public void* @UnlockRect; - public void* @GetDC; - public void* @ReleaseDC; - } - - public unsafe partial struct IDirectXVideoDecoder - { - public IDirectXVideoDecoderVtbl* @lpVtbl; - } - - public unsafe partial struct IDirectXVideoDecoderVtbl - { - public void* @QueryInterface; - public void* @AddRef; - public void* @Release; - public void* @GetVideoDecoderService; - public void* @GetCreationParameters; - public void* @GetBuffer; - public void* @ReleaseBuffer; - public void* @BeginFrame; - public void* @EndFrame; - public void* @Execute; - } - - /// This struct is allocated as AVHWDeviceContext.hwctx - public unsafe partial struct AVD3D11VADeviceContext - { - /// Device used for texture creation and access. This can also be used to set the libavcodec decoding device. - public ID3D11Device* @device; - /// If unset, this will be set from the device field on init. - public ID3D11DeviceContext* @device_context; - /// If unset, this will be set from the device field on init. - public ID3D11VideoDevice* @video_device; - /// If unset, this will be set from the device_context field on init. - public ID3D11VideoContext* @video_context; - /// Callbacks for locking. They protect accesses to device_context and video_context calls. They also protect access to the internal staging texture (for av_hwframe_transfer_data() calls). They do NOT protect access to hwcontext or decoder state in general. - public AVD3D11VADeviceContext_lock_func @lock; - public AVD3D11VADeviceContext_unlock_func @unlock; - public void* @lock_ctx; - } - - public unsafe partial struct ID3D11Device - { - public ID3D11DeviceVtbl* @lpVtbl; - } - - public unsafe partial struct ID3D11DeviceVtbl - { - public void* @QueryInterface; - public void* @AddRef; - public void* @Release; - public void* @CreateBuffer; - public void* @CreateTexture1D; - public void* @CreateTexture2D; - public void* @CreateTexture3D; - public void* @CreateShaderResourceView; - public void* @CreateUnorderedAccessView; - public void* @CreateRenderTargetView; - public void* @CreateDepthStencilView; - public void* @CreateInputLayout; - public void* @CreateVertexShader; - public void* @CreateGeometryShader; - public void* @CreateGeometryShaderWithStreamOutput; - public void* @CreatePixelShader; - public void* @CreateHullShader; - public void* @CreateDomainShader; - public void* @CreateComputeShader; - public void* @CreateClassLinkage; - public void* @CreateBlendState; - public void* @CreateDepthStencilState; - public void* @CreateRasterizerState; - public void* @CreateSamplerState; - public void* @CreateQuery; - public void* @CreatePredicate; - public void* @CreateCounter; - public void* @CreateDeferredContext; - public void* @OpenSharedResource; - public void* @CheckFormatSupport; - public void* @CheckMultisampleQualityLevels; - public void* @CheckCounterInfo; - public void* @CheckCounter; - public void* @CheckFeatureSupport; - public void* @GetPrivateData; - public void* @SetPrivateData; - public void* @SetPrivateDataInterface; - public void* @GetFeatureLevel; - public void* @GetCreationFlags; - public void* @GetDeviceRemovedReason; - public void* @GetImmediateContext; - public void* @SetExceptionMode; - public void* @GetExceptionMode; - } - - public unsafe partial struct ID3D11DeviceContext - { - public ID3D11DeviceContextVtbl* @lpVtbl; - } - - public unsafe partial struct ID3D11DeviceContextVtbl - { - public void* @QueryInterface; - public void* @AddRef; - public void* @Release; - public void* @GetDevice; - public void* @GetPrivateData; - public void* @SetPrivateData; - public void* @SetPrivateDataInterface; - public void* @VSSetConstantBuffers; - public void* @PSSetShaderResources; - public void* @PSSetShader; - public void* @PSSetSamplers; - public void* @VSSetShader; - public void* @DrawIndexed; - public void* @Draw; - public void* @Map; - public void* @Unmap; - public void* @PSSetConstantBuffers; - public void* @IASetInputLayout; - public void* @IASetVertexBuffers; - public void* @IASetIndexBuffer; - public void* @DrawIndexedInstanced; - public void* @DrawInstanced; - public void* @GSSetConstantBuffers; - public void* @GSSetShader; - public void* @IASetPrimitiveTopology; - public void* @VSSetShaderResources; - public void* @VSSetSamplers; - public void* @Begin; - public void* @End; - public void* @GetData; - public void* @SetPredication; - public void* @GSSetShaderResources; - public void* @GSSetSamplers; - public void* @OMSetRenderTargets; - public void* @OMSetRenderTargetsAndUnorderedAccessViews; - public void* @OMSetBlendState; - public void* @OMSetDepthStencilState; - public void* @SOSetTargets; - public void* @DrawAuto; - public void* @DrawIndexedInstancedIndirect; - public void* @DrawInstancedIndirect; - public void* @Dispatch; - public void* @DispatchIndirect; - public void* @RSSetState; - public void* @RSSetViewports; - public void* @RSSetScissorRects; - public void* @CopySubresourceRegion; - public void* @CopyResource; - public void* @UpdateSubresource; - public void* @CopyStructureCount; - public void* @ClearRenderTargetView; - public void* @ClearUnorderedAccessViewUint; - public void* @ClearUnorderedAccessViewFloat; - public void* @ClearDepthStencilView; - public void* @GenerateMips; - public void* @SetResourceMinLOD; - public void* @GetResourceMinLOD; - public void* @ResolveSubresource; - public void* @ExecuteCommandList; - public void* @HSSetShaderResources; - public void* @HSSetShader; - public void* @HSSetSamplers; - public void* @HSSetConstantBuffers; - public void* @DSSetShaderResources; - public void* @DSSetShader; - public void* @DSSetSamplers; - public void* @DSSetConstantBuffers; - public void* @CSSetShaderResources; - public void* @CSSetUnorderedAccessViews; - public void* @CSSetShader; - public void* @CSSetSamplers; - public void* @CSSetConstantBuffers; - public void* @VSGetConstantBuffers; - public void* @PSGetShaderResources; - public void* @PSGetShader; - public void* @PSGetSamplers; - public void* @VSGetShader; - public void* @PSGetConstantBuffers; - public void* @IAGetInputLayout; - public void* @IAGetVertexBuffers; - public void* @IAGetIndexBuffer; - public void* @GSGetConstantBuffers; - public void* @GSGetShader; - public void* @IAGetPrimitiveTopology; - public void* @VSGetShaderResources; - public void* @VSGetSamplers; - public void* @GetPredication; - public void* @GSGetShaderResources; - public void* @GSGetSamplers; - public void* @OMGetRenderTargets; - public void* @OMGetRenderTargetsAndUnorderedAccessViews; - public void* @OMGetBlendState; - public void* @OMGetDepthStencilState; - public void* @SOGetTargets; - public void* @RSGetState; - public void* @RSGetViewports; - public void* @RSGetScissorRects; - public void* @HSGetShaderResources; - public void* @HSGetShader; - public void* @HSGetSamplers; - public void* @HSGetConstantBuffers; - public void* @DSGetShaderResources; - public void* @DSGetShader; - public void* @DSGetSamplers; - public void* @DSGetConstantBuffers; - public void* @CSGetShaderResources; - public void* @CSGetUnorderedAccessViews; - public void* @CSGetShader; - public void* @CSGetSamplers; - public void* @CSGetConstantBuffers; - public void* @ClearState; - public void* @Flush; - public void* @GetType; - public void* @GetContextFlags; - public void* @FinishCommandList; - } - - public unsafe partial struct ID3D11VideoDevice - { - public ID3D11VideoDeviceVtbl* @lpVtbl; - } - - public unsafe partial struct ID3D11VideoDeviceVtbl - { - public void* @QueryInterface; - public void* @AddRef; - public void* @Release; - public void* @CreateVideoDecoder; - public void* @CreateVideoProcessor; - public void* @CreateAuthenticatedChannel; - public void* @CreateCryptoSession; - public void* @CreateVideoDecoderOutputView; - public void* @CreateVideoProcessorInputView; - public void* @CreateVideoProcessorOutputView; - public void* @CreateVideoProcessorEnumerator; - public void* @GetVideoDecoderProfileCount; - public void* @GetVideoDecoderProfile; - public void* @CheckVideoDecoderFormat; - public void* @GetVideoDecoderConfigCount; - public void* @GetVideoDecoderConfig; - public void* @GetContentProtectionCaps; - public void* @CheckCryptoKeyExchange; - public void* @SetPrivateData; - public void* @SetPrivateDataInterface; - } - - public unsafe partial struct ID3D11VideoContext - { - public ID3D11VideoContextVtbl* @lpVtbl; - } - - public unsafe partial struct ID3D11VideoContextVtbl - { - public void* @QueryInterface; - public void* @AddRef; - public void* @Release; - public void* @GetDevice; - public void* @GetPrivateData; - public void* @SetPrivateData; - public void* @SetPrivateDataInterface; - public void* @GetDecoderBuffer; - public void* @ReleaseDecoderBuffer; - public void* @DecoderBeginFrame; - public void* @DecoderEndFrame; - public void* @SubmitDecoderBuffers; - public void* @DecoderExtension; - public void* @VideoProcessorSetOutputTargetRect; - public void* @VideoProcessorSetOutputBackgroundColor; - public void* @VideoProcessorSetOutputColorSpace; - public void* @VideoProcessorSetOutputAlphaFillMode; - public void* @VideoProcessorSetOutputConstriction; - public void* @VideoProcessorSetOutputStereoMode; - public void* @VideoProcessorSetOutputExtension; - public void* @VideoProcessorGetOutputTargetRect; - public void* @VideoProcessorGetOutputBackgroundColor; - public void* @VideoProcessorGetOutputColorSpace; - public void* @VideoProcessorGetOutputAlphaFillMode; - public void* @VideoProcessorGetOutputConstriction; - public void* @VideoProcessorGetOutputStereoMode; - public void* @VideoProcessorGetOutputExtension; - public void* @VideoProcessorSetStreamFrameFormat; - public void* @VideoProcessorSetStreamColorSpace; - public void* @VideoProcessorSetStreamOutputRate; - public void* @VideoProcessorSetStreamSourceRect; - public void* @VideoProcessorSetStreamDestRect; - public void* @VideoProcessorSetStreamAlpha; - public void* @VideoProcessorSetStreamPalette; - public void* @VideoProcessorSetStreamPixelAspectRatio; - public void* @VideoProcessorSetStreamLumaKey; - public void* @VideoProcessorSetStreamStereoFormat; - public void* @VideoProcessorSetStreamAutoProcessingMode; - public void* @VideoProcessorSetStreamFilter; - public void* @VideoProcessorSetStreamExtension; - public void* @VideoProcessorGetStreamFrameFormat; - public void* @VideoProcessorGetStreamColorSpace; - public void* @VideoProcessorGetStreamOutputRate; - public void* @VideoProcessorGetStreamSourceRect; - public void* @VideoProcessorGetStreamDestRect; - public void* @VideoProcessorGetStreamAlpha; - public void* @VideoProcessorGetStreamPalette; - public void* @VideoProcessorGetStreamPixelAspectRatio; - public void* @VideoProcessorGetStreamLumaKey; - public void* @VideoProcessorGetStreamStereoFormat; - public void* @VideoProcessorGetStreamAutoProcessingMode; - public void* @VideoProcessorGetStreamFilter; - public void* @VideoProcessorGetStreamExtension; - public void* @VideoProcessorBlt; - public void* @NegotiateCryptoSessionKeyExchange; - public void* @EncryptionBlt; - public void* @DecryptionBlt; - public void* @StartSessionKeyRefresh; - public void* @FinishSessionKeyRefresh; - public void* @GetEncryptionBltKey; - public void* @NegotiateAuthenticatedChannelKeyExchange; - public void* @QueryAuthenticatedChannel; - public void* @ConfigureAuthenticatedChannel; - public void* @VideoProcessorSetStreamRotation; - public void* @VideoProcessorGetStreamRotation; - } - - /// D3D11 frame descriptor for pool allocation. - public unsafe partial struct AVD3D11FrameDescriptor - { - /// The texture in which the frame is located. The reference count is managed by the AVBufferRef, and destroying the reference will release the interface. - public ID3D11Texture2D* @texture; - /// The index into the array texture element representing the frame, or 0 if the texture is not an array texture. - public long @index; - } - - public unsafe partial struct ID3D11Texture2D - { - public ID3D11Texture2DVtbl* @lpVtbl; - } - - public unsafe partial struct ID3D11Texture2DVtbl - { - public void* @QueryInterface; - public void* @AddRef; - public void* @Release; - public void* @GetDevice; - public void* @GetPrivateData; - public void* @SetPrivateData; - public void* @SetPrivateDataInterface; - public void* @GetType; - public void* @SetEvictionPriority; - public void* @GetEvictionPriority; - public void* @GetDesc; - } - - /// This struct is allocated as AVHWFramesContext.hwctx - public unsafe partial struct AVD3D11VAFramesContext - { - /// The canonical texture used for pool allocation. If this is set to NULL on init, the hwframes implementation will allocate and set an array texture if initial_pool_size > 0. - public ID3D11Texture2D* @texture; - /// D3D11_TEXTURE2D_DESC.BindFlags used for texture creation. The user must at least set D3D11_BIND_DECODER if the frames context is to be used for video decoding. This field is ignored/invalid if a user-allocated texture is provided. - public uint @BindFlags; - /// D3D11_TEXTURE2D_DESC.MiscFlags used for texture creation. This field is ignored/invalid if a user-allocated texture is provided. - public uint @MiscFlags; - /// In case if texture structure member above is not NULL contains the same texture pointer for all elements and different indexes into the array texture. In case if texture structure member above is NULL, all elements contains pointers to separate non-array textures and 0 indexes. This field is ignored/invalid if a user-allocated texture is provided. - public AVD3D11FrameDescriptor* @texture_infos; - } - - /// Represents the percentile at a specific percentage in a distribution. - public unsafe partial struct AVHDRPlusPercentile - { - /// The percentage value corresponding to a specific percentile linearized RGB value in the processing window in the scene. The value shall be in the range of 0 to100, inclusive. - public byte @percentage; - /// The linearized maxRGB value at a specific percentile in the processing window in the scene. The value shall be in the range of 0 to 1, inclusive and in multiples of 0.00001. - public AVRational @percentile; - } - - /// Color transform parameters at a processing window in a dynamic metadata for SMPTE 2094-40. - public unsafe partial struct AVHDRPlusColorTransformParams - { - /// The relative x coordinate of the top left pixel of the processing window. The value shall be in the range of 0 and 1, inclusive and in multiples of 1/(width of Picture - 1). The value 1 corresponds to the absolute coordinate of width of Picture - 1. The value for first processing window shall be 0. - public AVRational @window_upper_left_corner_x; - /// The relative y coordinate of the top left pixel of the processing window. The value shall be in the range of 0 and 1, inclusive and in multiples of 1/(height of Picture - 1). The value 1 corresponds to the absolute coordinate of height of Picture - 1. The value for first processing window shall be 0. - public AVRational @window_upper_left_corner_y; - /// The relative x coordinate of the bottom right pixel of the processing window. The value shall be in the range of 0 and 1, inclusive and in multiples of 1/(width of Picture - 1). The value 1 corresponds to the absolute coordinate of width of Picture - 1. The value for first processing window shall be 1. - public AVRational @window_lower_right_corner_x; - /// The relative y coordinate of the bottom right pixel of the processing window. The value shall be in the range of 0 and 1, inclusive and in multiples of 1/(height of Picture - 1). The value 1 corresponds to the absolute coordinate of height of Picture - 1. The value for first processing window shall be 1. - public AVRational @window_lower_right_corner_y; - /// The x coordinate of the center position of the concentric internal and external ellipses of the elliptical pixel selector in the processing window. The value shall be in the range of 0 to (width of Picture - 1), inclusive and in multiples of 1 pixel. - public ushort @center_of_ellipse_x; - /// The y coordinate of the center position of the concentric internal and external ellipses of the elliptical pixel selector in the processing window. The value shall be in the range of 0 to (height of Picture - 1), inclusive and in multiples of 1 pixel. - public ushort @center_of_ellipse_y; - /// The clockwise rotation angle in degree of arc with respect to the positive direction of the x-axis of the concentric internal and external ellipses of the elliptical pixel selector in the processing window. The value shall be in the range of 0 to 180, inclusive and in multiples of 1. - public byte @rotation_angle; - /// The semi-major axis value of the internal ellipse of the elliptical pixel selector in amount of pixels in the processing window. The value shall be in the range of 1 to 65535, inclusive and in multiples of 1 pixel. - public ushort @semimajor_axis_internal_ellipse; - /// The semi-major axis value of the external ellipse of the elliptical pixel selector in amount of pixels in the processing window. The value shall not be less than semimajor_axis_internal_ellipse of the current processing window. The value shall be in the range of 1 to 65535, inclusive and in multiples of 1 pixel. - public ushort @semimajor_axis_external_ellipse; - /// The semi-minor axis value of the external ellipse of the elliptical pixel selector in amount of pixels in the processing window. The value shall be in the range of 1 to 65535, inclusive and in multiples of 1 pixel. - public ushort @semiminor_axis_external_ellipse; - /// Overlap process option indicates one of the two methods of combining rendered pixels in the processing window in an image with at least one elliptical pixel selector. For overlapping elliptical pixel selectors in an image, overlap_process_option shall have the same value. - public AVHDRPlusOverlapProcessOption @overlap_process_option; - /// The maximum of the color components of linearized RGB values in the processing window in the scene. The values should be in the range of 0 to 1, inclusive and in multiples of 0.00001. maxscl[ 0 ], maxscl[ 1 ], and maxscl[ 2 ] are corresponding to R, G, B color components respectively. - public AVRational_array3 @maxscl; - /// The average of linearized maxRGB values in the processing window in the scene. The value should be in the range of 0 to 1, inclusive and in multiples of 0.00001. - public AVRational @average_maxrgb; - /// The number of linearized maxRGB values at given percentiles in the processing window in the scene. The maximum value shall be 15. - public byte @num_distribution_maxrgb_percentiles; - /// The linearized maxRGB values at given percentiles in the processing window in the scene. - public AVHDRPlusPercentile_array15 @distribution_maxrgb; - /// The fraction of selected pixels in the image that contains the brightest pixel in the scene. The value shall be in the range of 0 to 1, inclusive and in multiples of 0.001. - public AVRational @fraction_bright_pixels; - /// This flag indicates that the metadata for the tone mapping function in the processing window is present (for value of 1). - public byte @tone_mapping_flag; - /// The x coordinate of the separation point between the linear part and the curved part of the tone mapping function. The value shall be in the range of 0 to 1, excluding 0 and in multiples of 1/4095. - public AVRational @knee_point_x; - /// The y coordinate of the separation point between the linear part and the curved part of the tone mapping function. The value shall be in the range of 0 to 1, excluding 0 and in multiples of 1/4095. - public AVRational @knee_point_y; - /// The number of the intermediate anchor parameters of the tone mapping function in the processing window. The maximum value shall be 15. - public byte @num_bezier_curve_anchors; - /// The intermediate anchor parameters of the tone mapping function in the processing window in the scene. The values should be in the range of 0 to 1, inclusive and in multiples of 1/1023. - public AVRational_array15 @bezier_curve_anchors; - /// This flag shall be equal to 0 in bitstreams conforming to this version of this Specification. Other values are reserved for future use. - public byte @color_saturation_mapping_flag; - /// The color saturation gain in the processing window in the scene. The value shall be in the range of 0 to 63/8, inclusive and in multiples of 1/8. The default value shall be 1. - public AVRational @color_saturation_weight; - } - - /// This struct represents dynamic metadata for color volume transform - application 4 of SMPTE 2094-40:2016 standard. - public unsafe partial struct AVDynamicHDRPlus - { - /// Country code by Rec. ITU-T T.35 Annex A. The value shall be 0xB5. - public byte @itu_t_t35_country_code; - /// Application version in the application defining document in ST-2094 suite. The value shall be set to 0. - public byte @application_version; - /// The number of processing windows. The value shall be in the range of 1 to 3, inclusive. - public byte @num_windows; - /// The color transform parameters for every processing window. - public AVHDRPlusColorTransformParams_array3 @params; - /// The nominal maximum display luminance of the targeted system display, in units of 0.0001 candelas per square metre. The value shall be in the range of 0 to 10000, inclusive. - public AVRational @targeted_system_display_maximum_luminance; - /// This flag shall be equal to 0 in bit streams conforming to this version of this Specification. The value 1 is reserved for future use. - public byte @targeted_system_display_actual_peak_luminance_flag; - /// The number of rows in the targeted system_display_actual_peak_luminance array. The value shall be in the range of 2 to 25, inclusive. - public byte @num_rows_targeted_system_display_actual_peak_luminance; - /// The number of columns in the targeted_system_display_actual_peak_luminance array. The value shall be in the range of 2 to 25, inclusive. - public byte @num_cols_targeted_system_display_actual_peak_luminance; - /// The normalized actual peak luminance of the targeted system display. The values should be in the range of 0 to 1, inclusive and in multiples of 1/15. - public AVRational_array25x25 @targeted_system_display_actual_peak_luminance; - /// This flag shall be equal to 0 in bitstreams conforming to this version of this Specification. The value 1 is reserved for future use. - public byte @mastering_display_actual_peak_luminance_flag; - /// The number of rows in the mastering_display_actual_peak_luminance array. The value shall be in the range of 2 to 25, inclusive. - public byte @num_rows_mastering_display_actual_peak_luminance; - /// The number of columns in the mastering_display_actual_peak_luminance array. The value shall be in the range of 2 to 25, inclusive. - public byte @num_cols_mastering_display_actual_peak_luminance; - /// The normalized actual peak luminance of the mastering display used for mastering the image essence. The values should be in the range of 0 to 1, inclusive and in multiples of 1/15. - public AVRational_array25x25 @mastering_display_actual_peak_luminance; - } - - /// Mastering display metadata capable of representing the color volume of the display used to master the content (SMPTE 2086:2014). - public unsafe partial struct AVMasteringDisplayMetadata - { - /// CIE 1931 xy chromaticity coords of color primaries (r, g, b order). - public AVRational_array3x2 @display_primaries; - /// CIE 1931 xy chromaticity coords of white point. - public AVRational_array2 @white_point; - /// Min luminance of mastering display (cd/m^2). - public AVRational @min_luminance; - /// Max luminance of mastering display (cd/m^2). - public AVRational @max_luminance; - /// Flag indicating whether the display primaries (and white point) are set. - public int @has_primaries; - /// Flag indicating whether the luminance (min_ and max_) have been set. - public int @has_luminance; - } - - /// Content light level needed by to transmit HDR over HDMI (CTA-861.3). - public unsafe partial struct AVContentLightMetadata - { - /// Max content light level (cd/m^2). - public uint @MaxCLL; - /// Max average light level per frame (cd/m^2). - public uint @MaxFALL; - } - - public unsafe partial struct SwsVector - { - /// pointer to the list of coefficients - public double* @coeff; - /// number of coefficients in the vector - public int @length; - } - - public unsafe partial struct SwsFilter - { - public SwsVector* @lumH; - public SwsVector* @lumV; - public SwsVector* @chrH; - public SwsVector* @chrV; - } - - public unsafe partial struct RcOverride - { - public int @start_frame; - public int @end_frame; - public int @qscale; - public float @quality_factor; - } - - /// main external API structure. New fields can be added to the end with minor version bumps. Removal, reordering and changes to existing fields require a major version bump. You can use AVOptions (av_opt* / av_set/get*()) to access these fields from user applications. The name string for AVOptions options matches the associated command line parameter name and can be found in libavcodec/options_table.h The AVOption/command line parameter names differ in some cases from the C structure field names for historic reasons or brevity. sizeof(AVCodecContext) must not be used outside libav*. - public unsafe partial struct AVCodecContext - { - /// information on struct for av_log - set by avcodec_alloc_context3 - public AVClass* @av_class; - public int @log_level_offset; - public AVMediaType @codec_type; - public AVCodec* @codec; - public AVCodecID @codec_id; - /// fourcc (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + 'A'). This is used to work around some encoder bugs. A demuxer should set this to what is stored in the field used to identify the codec. If there are multiple such fields in a container then the demuxer should choose the one which maximizes the information about the used codec. If the codec tag field in a container is larger than 32 bits then the demuxer should remap the longer ID to 32 bits with a table or other structure. Alternatively a new extra_codec_tag + size could be added but for this a clear advantage must be demonstrated first. - encoding: Set by user, if not then the default based on codec_id will be used. - decoding: Set by user, will be converted to uppercase by libavcodec during init. - public uint @codec_tag; - public void* @priv_data; - /// Private context used for internal data. - public AVCodecInternal* @internal; - /// Private data of the user, can be used to carry app specific stuff. - encoding: Set by user. - decoding: Set by user. - public void* @opaque; - /// the average bitrate - encoding: Set by user; unused for constant quantizer encoding. - decoding: Set by user, may be overwritten by libavcodec if this info is available in the stream - public long @bit_rate; - /// number of bits the bitstream is allowed to diverge from the reference. the reference can be CBR (for CBR pass1) or VBR (for pass2) - encoding: Set by user; unused for constant quantizer encoding. - decoding: unused - public int @bit_rate_tolerance; - /// Global quality for codecs which cannot change it per frame. This should be proportional to MPEG-1/2/4 qscale. - encoding: Set by user. - decoding: unused - public int @global_quality; - /// - encoding: Set by user. - decoding: unused - public int @compression_level; - /// AV_CODEC_FLAG_*. - encoding: Set by user. - decoding: Set by user. - public int @flags; - /// AV_CODEC_FLAG2_* - encoding: Set by user. - decoding: Set by user. - public int @flags2; - /// some codecs need / can use extradata like Huffman tables. MJPEG: Huffman tables rv10: additional flags MPEG-4: global headers (they can be in the bitstream or here) The allocated memory should be AV_INPUT_BUFFER_PADDING_SIZE bytes larger than extradata_size to avoid problems if it is read with the bitstream reader. The bytewise contents of extradata must not depend on the architecture or CPU endianness. Must be allocated with the av_malloc() family of functions. - encoding: Set/allocated/freed by libavcodec. - decoding: Set/allocated/freed by user. - public byte* @extradata; - public int @extradata_size; - /// This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented. For fixed-fps content, timebase should be 1/framerate and timestamp increments should be identically 1. This often, but not always is the inverse of the frame rate or field rate for video. 1/time_base is not the average frame rate if the frame rate is not constant. - public AVRational @time_base; - /// For some codecs, the time base is closer to the field rate than the frame rate. Most notably, H.264 and MPEG-2 specify time_base as half of frame duration if no telecine is used ... - public int @ticks_per_frame; - /// Codec delay. - public int @delay; - /// picture width / height. - public int @width; - /// picture width / height. - public int @height; - /// Bitstream width / height, may be different from width/height e.g. when the decoded frame is cropped before being output or lowres is enabled. - public int @coded_width; - /// Bitstream width / height, may be different from width/height e.g. when the decoded frame is cropped before being output or lowres is enabled. - public int @coded_height; - /// the number of pictures in a group of pictures, or 0 for intra_only - encoding: Set by user. - decoding: unused - public int @gop_size; - /// Pixel format, see AV_PIX_FMT_xxx. May be set by the demuxer if known from headers. May be overridden by the decoder if it knows better. - public AVPixelFormat @pix_fmt; - /// If non NULL, 'draw_horiz_band' is called by the libavcodec decoder to draw a horizontal band. It improves cache usage. Not all codecs can do that. You must check the codec capabilities beforehand. When multithreading is used, it may be called from multiple threads at the same time; threads might draw different parts of the same AVFrame, or multiple AVFrames, and there is no guarantee that slices will be drawn in order. The function is also used by hardware acceleration APIs. It is called at least once during frame decoding to pass the data needed for hardware render. In that mode instead of pixel data, AVFrame points to a structure specific to the acceleration API. The application reads the structure and can change some fields to indicate progress or mark state. - encoding: unused - decoding: Set by user. - public AVCodecContext_draw_horiz_band_func @draw_horiz_band; - /// Callback to negotiate the pixel format. Decoding only, may be set by the caller before avcodec_open2(). - public AVCodecContext_get_format_func @get_format; - /// maximum number of B-frames between non-B-frames Note: The output will be delayed by max_b_frames+1 relative to the input. - encoding: Set by user. - decoding: unused - public int @max_b_frames; - /// qscale factor between IP and B-frames If > 0 then the last P-frame quantizer will be used (q= lastp_q*factor+offset). If < 0 then normal ratecontrol will be done (q= -normal_q*factor+offset). - encoding: Set by user. - decoding: unused - public float @b_quant_factor; - /// qscale offset between IP and B-frames - encoding: Set by user. - decoding: unused - public float @b_quant_offset; - /// Size of the frame reordering buffer in the decoder. For MPEG-2 it is 1 IPB or 0 low delay IP. - encoding: Set by libavcodec. - decoding: Set by libavcodec. - public int @has_b_frames; - /// qscale factor between P- and I-frames If > 0 then the last P-frame quantizer will be used (q = lastp_q * factor + offset). If < 0 then normal ratecontrol will be done (q= -normal_q*factor+offset). - encoding: Set by user. - decoding: unused - public float @i_quant_factor; - /// qscale offset between P and I-frames - encoding: Set by user. - decoding: unused - public float @i_quant_offset; - /// luminance masking (0-> disabled) - encoding: Set by user. - decoding: unused - public float @lumi_masking; - /// temporary complexity masking (0-> disabled) - encoding: Set by user. - decoding: unused - public float @temporal_cplx_masking; - /// spatial complexity masking (0-> disabled) - encoding: Set by user. - decoding: unused - public float @spatial_cplx_masking; - /// p block masking (0-> disabled) - encoding: Set by user. - decoding: unused - public float @p_masking; - /// darkness masking (0-> disabled) - encoding: Set by user. - decoding: unused - public float @dark_masking; - /// slice count - encoding: Set by libavcodec. - decoding: Set by user (or 0). - public int @slice_count; - /// slice offsets in the frame in bytes - encoding: Set/allocated by libavcodec. - decoding: Set/allocated by user (or NULL). - public int* @slice_offset; - /// sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel. Numerator and denominator must be relatively prime and smaller than 256 for some video standards. - encoding: Set by user. - decoding: Set by libavcodec. - public AVRational @sample_aspect_ratio; - /// motion estimation comparison function - encoding: Set by user. - decoding: unused - public int @me_cmp; - /// subpixel motion estimation comparison function - encoding: Set by user. - decoding: unused - public int @me_sub_cmp; - /// macroblock comparison function (not supported yet) - encoding: Set by user. - decoding: unused - public int @mb_cmp; - /// interlaced DCT comparison function - encoding: Set by user. - decoding: unused - public int @ildct_cmp; - /// ME diamond size & shape - encoding: Set by user. - decoding: unused - public int @dia_size; - /// amount of previous MV predictors (2a+1 x 2a+1 square) - encoding: Set by user. - decoding: unused - public int @last_predictor_count; - /// motion estimation prepass comparison function - encoding: Set by user. - decoding: unused - public int @me_pre_cmp; - /// ME prepass diamond size & shape - encoding: Set by user. - decoding: unused - public int @pre_dia_size; - /// subpel ME quality - encoding: Set by user. - decoding: unused - public int @me_subpel_quality; - /// maximum motion estimation search range in subpel units If 0 then no limit. - public int @me_range; - /// slice flags - encoding: unused - decoding: Set by user. - public int @slice_flags; - /// macroblock decision mode - encoding: Set by user. - decoding: unused - public int @mb_decision; - /// custom intra quantization matrix Must be allocated with the av_malloc() family of functions, and will be freed in avcodec_free_context(). - encoding: Set/allocated by user, freed by libavcodec. Can be NULL. - decoding: Set/allocated/freed by libavcodec. - public ushort* @intra_matrix; - /// custom inter quantization matrix Must be allocated with the av_malloc() family of functions, and will be freed in avcodec_free_context(). - encoding: Set/allocated by user, freed by libavcodec. Can be NULL. - decoding: Set/allocated/freed by libavcodec. - public ushort* @inter_matrix; - /// precision of the intra DC coefficient - 8 - encoding: Set by user. - decoding: Set by libavcodec - public int @intra_dc_precision; - /// Number of macroblock rows at the top which are skipped. - encoding: unused - decoding: Set by user. - public int @skip_top; - /// Number of macroblock rows at the bottom which are skipped. - encoding: unused - decoding: Set by user. - public int @skip_bottom; - /// minimum MB Lagrange multiplier - encoding: Set by user. - decoding: unused - public int @mb_lmin; - /// maximum MB Lagrange multiplier - encoding: Set by user. - decoding: unused - public int @mb_lmax; - /// - encoding: Set by user. - decoding: unused - public int @bidir_refine; - /// minimum GOP size - encoding: Set by user. - decoding: unused - public int @keyint_min; - /// number of reference frames - encoding: Set by user. - decoding: Set by lavc. - public int @refs; - /// Note: Value depends upon the compare function used for fullpel ME. - encoding: Set by user. - decoding: unused - public int @mv0_threshold; - /// Chromaticity coordinates of the source primaries. - encoding: Set by user - decoding: Set by libavcodec - public AVColorPrimaries @color_primaries; - /// Color Transfer Characteristic. - encoding: Set by user - decoding: Set by libavcodec - public AVColorTransferCharacteristic @color_trc; - /// YUV colorspace type. - encoding: Set by user - decoding: Set by libavcodec - public AVColorSpace @colorspace; - /// MPEG vs JPEG YUV range. - encoding: Set by user - decoding: Set by libavcodec - public AVColorRange @color_range; - /// This defines the location of chroma samples. - encoding: Set by user - decoding: Set by libavcodec - public AVChromaLocation @chroma_sample_location; - /// Number of slices. Indicates number of picture subdivisions. Used for parallelized decoding. - encoding: Set by user - decoding: unused - public int @slices; - /// Field order - encoding: set by libavcodec - decoding: Set by user. - public AVFieldOrder @field_order; - /// samples per second - public int @sample_rate; - /// number of audio channels - [Obsolete("use ch_layout.nb_channels")] - public int @channels; - /// sample format - public AVSampleFormat @sample_fmt; - /// Number of samples per channel in an audio frame. - public int @frame_size; - /// Frame counter, set by libavcodec. - public int @frame_number; - /// number of bytes per packet if constant and known or 0 Used by some WAV based audio codecs. - public int @block_align; - /// Audio cutoff bandwidth (0 means "automatic") - encoding: Set by user. - decoding: unused - public int @cutoff; - /// Audio channel layout. - encoding: set by user. - decoding: set by user, may be overwritten by libavcodec. - [Obsolete("use ch_layout")] - public ulong @channel_layout; - /// Request decoder to use this channel layout if it can (0 for default) - encoding: unused - decoding: Set by user. - [Obsolete("use \"downmix\" codec private option")] - public ulong @request_channel_layout; - /// Type of service that the audio stream conveys. - encoding: Set by user. - decoding: Set by libavcodec. - public AVAudioServiceType @audio_service_type; - /// desired sample format - encoding: Not used. - decoding: Set by user. Decoder will decode to this format if it can. - public AVSampleFormat @request_sample_fmt; - /// This callback is called at the beginning of each frame to get data buffer(s) for it. There may be one contiguous buffer for all the data or there may be a buffer per each data plane or anything in between. What this means is, you may set however many entries in buf[] you feel necessary. Each buffer must be reference-counted using the AVBuffer API (see description of buf[] below). - public AVCodecContext_get_buffer2_func @get_buffer2; - /// amount of qscale change between easy & hard scenes (0.0-1.0) - public float @qcompress; - /// amount of qscale smoothing over time (0.0-1.0) - public float @qblur; - /// minimum quantizer - encoding: Set by user. - decoding: unused - public int @qmin; - /// maximum quantizer - encoding: Set by user. - decoding: unused - public int @qmax; - /// maximum quantizer difference between frames - encoding: Set by user. - decoding: unused - public int @max_qdiff; - /// decoder bitstream buffer size - encoding: Set by user. - decoding: unused - public int @rc_buffer_size; - /// ratecontrol override, see RcOverride - encoding: Allocated/set/freed by user. - decoding: unused - public int @rc_override_count; - public RcOverride* @rc_override; - /// maximum bitrate - encoding: Set by user. - decoding: Set by user, may be overwritten by libavcodec. - public long @rc_max_rate; - /// minimum bitrate - encoding: Set by user. - decoding: unused - public long @rc_min_rate; - /// Ratecontrol attempt to use, at maximum, <value> of what can be used without an underflow. - encoding: Set by user. - decoding: unused. - public float @rc_max_available_vbv_use; - /// Ratecontrol attempt to use, at least, <value> times the amount needed to prevent a vbv overflow. - encoding: Set by user. - decoding: unused. - public float @rc_min_vbv_overflow_use; - /// Number of bits which should be loaded into the rc buffer before decoding starts. - encoding: Set by user. - decoding: unused - public int @rc_initial_buffer_occupancy; - /// trellis RD quantization - encoding: Set by user. - decoding: unused - public int @trellis; - /// pass1 encoding statistics output buffer - encoding: Set by libavcodec. - decoding: unused - public byte* @stats_out; - /// pass2 encoding statistics input buffer Concatenated stuff from stats_out of pass1 should be placed here. - encoding: Allocated/set/freed by user. - decoding: unused - public byte* @stats_in; - /// Work around bugs in encoders which sometimes cannot be detected automatically. - encoding: Set by user - decoding: Set by user - public int @workaround_bugs; - /// strictly follow the standard (MPEG-4, ...). - encoding: Set by user. - decoding: Set by user. Setting this to STRICT or higher means the encoder and decoder will generally do stupid things, whereas setting it to unofficial or lower will mean the encoder might produce output that is not supported by all spec-compliant decoders. Decoders don't differentiate between normal, unofficial and experimental (that is, they always try to decode things when they can) unless they are explicitly asked to behave stupidly (=strictly conform to the specs) - public int @strict_std_compliance; - /// error concealment flags - encoding: unused - decoding: Set by user. - public int @error_concealment; - /// debug - encoding: Set by user. - decoding: Set by user. - public int @debug; - /// Error recognition; may misdetect some more or less valid parts as errors. - encoding: Set by user. - decoding: Set by user. - public int @err_recognition; - /// opaque 64-bit number (generally a PTS) that will be reordered and output in AVFrame.reordered_opaque - encoding: Set by libavcodec to the reordered_opaque of the input frame corresponding to the last returned packet. Only supported by encoders with the AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE capability. - decoding: Set by user. - public long @reordered_opaque; - /// Hardware accelerator in use - encoding: unused. - decoding: Set by libavcodec - public AVHWAccel* @hwaccel; - /// Hardware accelerator context. For some hardware accelerators, a global context needs to be provided by the user. In that case, this holds display-dependent data FFmpeg cannot instantiate itself. Please refer to the FFmpeg HW accelerator documentation to know how to fill this. - encoding: unused - decoding: Set by user - public void* @hwaccel_context; - /// error - encoding: Set by libavcodec if flags & AV_CODEC_FLAG_PSNR. - decoding: unused - public ulong_array8 @error; - /// DCT algorithm, see FF_DCT_* below - encoding: Set by user. - decoding: unused - public int @dct_algo; - /// IDCT algorithm, see FF_IDCT_* below. - encoding: Set by user. - decoding: Set by user. - public int @idct_algo; - /// bits per sample/pixel from the demuxer (needed for huffyuv). - encoding: Set by libavcodec. - decoding: Set by user. - public int @bits_per_coded_sample; - /// Bits per sample/pixel of internal libavcodec pixel/sample format. - encoding: set by user. - decoding: set by libavcodec. - public int @bits_per_raw_sample; - /// low resolution decoding, 1-> 1/2 size, 2->1/4 size - encoding: unused - decoding: Set by user. - public int @lowres; - /// thread count is used to decide how many independent tasks should be passed to execute() - encoding: Set by user. - decoding: Set by user. - public int @thread_count; - /// Which multithreading methods to use. Use of FF_THREAD_FRAME will increase decoding delay by one frame per thread, so clients which cannot provide future frames should not use it. - public int @thread_type; - /// Which multithreading methods are in use by the codec. - encoding: Set by libavcodec. - decoding: Set by libavcodec. - public int @active_thread_type; - /// Set by the client if its custom get_buffer() callback can be called synchronously from another thread, which allows faster multithreaded decoding. draw_horiz_band() will be called from other threads regardless of this setting. Ignored if the default get_buffer() is used. - encoding: Set by user. - decoding: Set by user. - [Obsolete("the custom get_buffer2() callback should always be thread-safe. Thread-unsafe get_buffer2() implementations will be invalid starting with LIBAVCODEC_VERSION_MAJOR=60; in other words, libavcodec will behave as if this field was always set to 1. Callers that want to be forward compatible with future libavcodec versions should wrap access to this field in #if LIBAVCODEC_VERSION_MAJOR < 60")] - public int @thread_safe_callbacks; - /// The codec may call this to execute several independent things. It will return only after finishing all tasks. The user may replace this with some multithreaded implementation, the default implementation will execute the parts serially. - public AVCodecContext_execute_func @execute; - /// The codec may call this to execute several independent things. It will return only after finishing all tasks. The user may replace this with some multithreaded implementation, the default implementation will execute the parts serially. - public AVCodecContext_execute2_func @execute2; - /// noise vs. sse weight for the nsse comparison function - encoding: Set by user. - decoding: unused - public int @nsse_weight; - /// profile - encoding: Set by user. - decoding: Set by libavcodec. - public int @profile; - /// level - encoding: Set by user. - decoding: Set by libavcodec. - public int @level; - /// Skip loop filtering for selected frames. - encoding: unused - decoding: Set by user. - public AVDiscard @skip_loop_filter; - /// Skip IDCT/dequantization for selected frames. - encoding: unused - decoding: Set by user. - public AVDiscard @skip_idct; - /// Skip decoding for selected frames. - encoding: unused - decoding: Set by user. - public AVDiscard @skip_frame; - /// Header containing style information for text subtitles. For SUBTITLE_ASS subtitle type, it should contain the whole ASS [Script Info] and [V4+ Styles] section, plus the [Events] line and the Format line following. It shouldn't include any Dialogue line. - encoding: Set/allocated/freed by user (before avcodec_open2()) - decoding: Set/allocated/freed by libavcodec (by avcodec_open2()) - public byte* @subtitle_header; - public int @subtitle_header_size; - /// Audio only. The number of "priming" samples (padding) inserted by the encoder at the beginning of the audio. I.e. this number of leading decoded samples must be discarded by the caller to get the original audio without leading padding. - public int @initial_padding; - /// - decoding: For codecs that store a framerate value in the compressed bitstream, the decoder may export it here. { 0, 1} when unknown. - encoding: May be used to signal the framerate of CFR content to an encoder. - public AVRational @framerate; - /// Nominal unaccelerated pixel format, see AV_PIX_FMT_xxx. - encoding: unused. - decoding: Set by libavcodec before calling get_format() - public AVPixelFormat @sw_pix_fmt; - /// Timebase in which pkt_dts/pts and AVPacket.dts/pts are. - encoding unused. - decoding set by user. - public AVRational @pkt_timebase; - /// AVCodecDescriptor - encoding: unused. - decoding: set by libavcodec. - public AVCodecDescriptor* @codec_descriptor; - /// Current statistics for PTS correction. - decoding: maintained and used by libavcodec, not intended to be used by user apps - encoding: unused - public long @pts_correction_num_faulty_pts; - /// Number of incorrect PTS values so far - public long @pts_correction_num_faulty_dts; - /// Number of incorrect DTS values so far - public long @pts_correction_last_pts; - /// PTS of the last frame - public long @pts_correction_last_dts; - /// Character encoding of the input subtitles file. - decoding: set by user - encoding: unused - public byte* @sub_charenc; - /// Subtitles character encoding mode. Formats or codecs might be adjusting this setting (if they are doing the conversion themselves for instance). - decoding: set by libavcodec - encoding: unused - public int @sub_charenc_mode; - /// Skip processing alpha if supported by codec. Note that if the format uses pre-multiplied alpha (common with VP6, and recommended due to better video quality/compression) the image will look as if alpha-blended onto a black background. However for formats that do not use pre-multiplied alpha there might be serious artefacts (though e.g. libswscale currently assumes pre-multiplied alpha anyway). - public int @skip_alpha; - /// Number of samples to skip after a discontinuity - decoding: unused - encoding: set by libavcodec - public int @seek_preroll; - [Obsolete("unused")] - public int @debug_mv; - /// custom intra quantization matrix - encoding: Set by user, can be NULL. - decoding: unused. - public ushort* @chroma_intra_matrix; - /// dump format separator. can be ", " or " " or anything else - encoding: Set by user. - decoding: Set by user. - public byte* @dump_separator; - /// ',' separated list of allowed decoders. If NULL then all are allowed - encoding: unused - decoding: set by user - public byte* @codec_whitelist; - /// Properties of the stream that gets decoded - encoding: unused - decoding: set by libavcodec - public uint @properties; - /// Additional data associated with the entire coded stream. - public AVPacketSideData* @coded_side_data; - public int @nb_coded_side_data; - /// A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames. The reference is set by the caller and afterwards owned (and freed) by libavcodec - it should never be read by the caller after being set. - public AVBufferRef* @hw_frames_ctx; - [Obsolete("unused")] - public int @sub_text_format; - /// Audio only. The amount of padding (in samples) appended by the encoder to the end of the audio. I.e. this number of decoded samples must be discarded by the caller from the end of the stream to get the original audio without any trailing padding. - public int @trailing_padding; - /// The number of pixels per image to maximally accept. - public long @max_pixels; - /// A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/decoder. The reference is set by the caller and afterwards owned (and freed) by libavcodec. - public AVBufferRef* @hw_device_ctx; - /// Bit set of AV_HWACCEL_FLAG_* flags, which affect hardware accelerated decoding (if active). - encoding: unused - decoding: Set by user (either before avcodec_open2(), or in the AVCodecContext.get_format callback) - public int @hwaccel_flags; - /// Video decoding only. Certain video codecs support cropping, meaning that only a sub-rectangle of the decoded frame is intended for display. This option controls how cropping is handled by libavcodec. - public int @apply_cropping; - public int @extra_hw_frames; - /// The percentage of damaged samples to discard a frame. - public int @discard_damaged_percentage; - /// The number of samples per frame to maximally accept. - public long @max_samples; - /// Bit set of AV_CODEC_EXPORT_DATA_* flags, which affects the kind of metadata exported in frame, packet, or coded stream side data by decoders and encoders. - public int @export_side_data; - /// This callback is called at the beginning of each packet to get a data buffer for it. - public AVCodecContext_get_encode_buffer_func @get_encode_buffer; - /// Audio channel layout. - encoding: must be set by the caller, to one of AVCodec.ch_layouts. - decoding: may be set by the caller if known e.g. from the container. The decoder can then override during decoding as needed. - public AVChannelLayout @ch_layout; - } - - /// AVCodec. - public unsafe partial struct AVCodec - { - /// Name of the codec implementation. The name is globally unique among encoders and among decoders (but an encoder and a decoder can share the same name). This is the primary way to find a codec from the user perspective. - public byte* @name; - /// Descriptive name for the codec, meant to be more human readable than name. You should use the NULL_IF_CONFIG_SMALL() macro to define it. - public byte* @long_name; - public AVMediaType @type; - public AVCodecID @id; - /// Codec capabilities. see AV_CODEC_CAP_* - public int @capabilities; - /// maximum value for lowres supported by the decoder - public byte @max_lowres; - /// array of supported framerates, or NULL if any, array is terminated by {0,0} - public AVRational* @supported_framerates; - /// array of supported pixel formats, or NULL if unknown, array is terminated by -1 - public AVPixelFormat* @pix_fmts; - /// array of supported audio samplerates, or NULL if unknown, array is terminated by 0 - public int* @supported_samplerates; - /// array of supported sample formats, or NULL if unknown, array is terminated by -1 - public AVSampleFormat* @sample_fmts; - /// array of support channel layouts, or NULL if unknown. array is terminated by 0 - public ulong* @channel_layouts; - /// AVClass for the private context - public AVClass* @priv_class; - /// array of recognized profiles, or NULL if unknown, array is terminated by {FF_PROFILE_UNKNOWN} - public AVProfile* @profiles; - /// Group name of the codec implementation. This is a short symbolic name of the wrapper backing this codec. A wrapper uses some kind of external implementation for the codec, such as an external library, or a codec implementation provided by the OS or the hardware. If this field is NULL, this is a builtin, libavcodec native codec. If non-NULL, this will be the suffix in AVCodec.name in most cases (usually AVCodec.name will be of the form "<codec_name>_<wrapper_name>"). - public byte* @wrapper_name; - /// Array of supported channel layouts, terminated with a zeroed layout. - public AVChannelLayout* @ch_layouts; - } - - /// AVProfile. - public unsafe partial struct AVProfile - { - public int @profile; - /// short name for the profile - public byte* @name; - } - - public unsafe partial struct AVHWAccel - { - /// Name of the hardware accelerated codec. The name is globally unique among encoders and among decoders (but an encoder and a decoder can share the same name). - public byte* @name; - /// Type of codec implemented by the hardware accelerator. - public AVMediaType @type; - /// Codec implemented by the hardware accelerator. - public AVCodecID @id; - /// Supported pixel format. - public AVPixelFormat @pix_fmt; - /// Hardware accelerated codec capabilities. see AV_HWACCEL_CODEC_CAP_* - public int @capabilities; - /// Allocate a custom buffer - public AVHWAccel_alloc_frame_func @alloc_frame; - /// Called at the beginning of each frame or field picture. - public AVHWAccel_start_frame_func @start_frame; - /// Callback for parameter data (SPS/PPS/VPS etc). - public AVHWAccel_decode_params_func @decode_params; - /// Callback for each slice. - public AVHWAccel_decode_slice_func @decode_slice; - /// Called at the end of each frame or field picture. - public AVHWAccel_end_frame_func @end_frame; - /// Size of per-frame hardware accelerator private data. - public int @frame_priv_data_size; - /// Initialize the hwaccel private data. - public AVHWAccel_init_func @init; - /// Uninitialize the hwaccel private data. - public AVHWAccel_uninit_func @uninit; - /// Size of the private data to allocate in AVCodecInternal.hwaccel_priv_data. - public int @priv_data_size; - /// Internal hwaccel capabilities. - public int @caps_internal; - /// Fill the given hw_frames context with current codec parameters. Called from get_format. Refer to avcodec_get_hw_frames_parameters() for details. - public AVHWAccel_frame_params_func @frame_params; - } - - /// This struct describes the properties of a single codec described by an AVCodecID. - public unsafe partial struct AVCodecDescriptor - { - public AVCodecID @id; - public AVMediaType @type; - /// Name of the codec described by this descriptor. It is non-empty and unique for each codec descriptor. It should contain alphanumeric characters and '_' only. - public byte* @name; - /// A more descriptive name for this codec. May be NULL. - public byte* @long_name; - /// Codec properties, a combination of AV_CODEC_PROP_* flags. - public int @props; - /// MIME type(s) associated with the codec. May be NULL; if not, a NULL-terminated array of MIME types. The first item is always non-NULL and is the preferred MIME type. - public byte** @mime_types; - /// If non-NULL, an array of profiles recognized for this codec. Terminated with FF_PROFILE_UNKNOWN. - public AVProfile* @profiles; - } - - public unsafe partial struct AVPacketSideData - { - public byte* @data; - public ulong @size; - public AVPacketSideDataType @type; - } - - /// This structure stores compressed data. It is typically exported by demuxers and then passed as input to decoders, or received as output from encoders and then passed to muxers. - public unsafe partial struct AVPacket - { - /// A reference to the reference-counted buffer where the packet data is stored. May be NULL, then the packet data is not reference-counted. - public AVBufferRef* @buf; - /// Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will be presented to the user. Can be AV_NOPTS_VALUE if it is not stored in the file. pts MUST be larger or equal to dts as presentation cannot happen before decompression, unless one wants to view hex dumps. Some formats misuse the terms dts and pts/cts to mean something different. Such timestamps must be converted to true pts/dts before they are stored in AVPacket. - public long @pts; - /// Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed. Can be AV_NOPTS_VALUE if it is not stored in the file. - public long @dts; - public byte* @data; - public int @size; - public int @stream_index; - /// A combination of AV_PKT_FLAG values - public int @flags; - /// Additional packet data that can be provided by the container. Packet can contain several types of side information. - public AVPacketSideData* @side_data; - public int @side_data_elems; - /// Duration of this packet in AVStream->time_base units, 0 if unknown. Equals next_pts - this_pts in presentation order. - public long @duration; - /// byte position in stream, -1 if unknown - public long @pos; - /// for some private data of the user - public void* @opaque; - /// AVBufferRef for free use by the API user. FFmpeg will never check the contents of the buffer ref. FFmpeg calls av_buffer_unref() on it when the packet is unreferenced. av_packet_copy_props() calls create a new reference with av_buffer_ref() for the target packet's opaque_ref field. - public AVBufferRef* @opaque_ref; - /// Time base of the packet's timestamps. In the future, this field may be set on packets output by encoders or demuxers, but its value will be by default ignored on input to decoders or muxers. - public AVRational @time_base; - } - - public unsafe partial struct AVSubtitleRect - { - /// top left corner of pict, undefined when pict is not set - public int @x; - /// top left corner of pict, undefined when pict is not set - public int @y; - /// width of pict, undefined when pict is not set - public int @w; - /// height of pict, undefined when pict is not set - public int @h; - /// number of colors in pict, undefined when pict is not set - public int @nb_colors; - /// data+linesize for the bitmap of this subtitle. Can be set for text/ass as well once they are rendered. - public byte_ptrArray4 @data; - public int_array4 @linesize; - public AVSubtitleType @type; - /// 0 terminated plain UTF-8 text - public byte* @text; - /// 0 terminated ASS/SSA compatible event line. The presentation of this is unaffected by the other values in this struct. - public byte* @ass; - public int @flags; - } - - public unsafe partial struct AVSubtitle - { - public ushort @format; - public uint @start_display_time; - public uint @end_display_time; - public uint @num_rects; - public AVSubtitleRect** @rects; - /// Same as packet pts, in AV_TIME_BASE - public long @pts; - } - - public unsafe partial struct AVCodecParserContext - { - public void* @priv_data; - public AVCodecParser* @parser; - public long @frame_offset; - public long @cur_offset; - public long @next_frame_offset; - public int @pict_type; - /// This field is used for proper frame duration computation in lavf. It signals, how much longer the frame duration of the current frame is compared to normal frame duration. - public int @repeat_pict; - public long @pts; - public long @dts; - public long @last_pts; - public long @last_dts; - public int @fetch_timestamp; - public int @cur_frame_start_index; - public long_array4 @cur_frame_offset; - public long_array4 @cur_frame_pts; - public long_array4 @cur_frame_dts; - public int @flags; - /// byte offset from starting packet start - public long @offset; - public long_array4 @cur_frame_end; - /// Set by parser to 1 for key frames and 0 for non-key frames. It is initialized to -1, so if the parser doesn't set this flag, old-style fallback using AV_PICTURE_TYPE_I picture type as key frames will be used. - public int @key_frame; - /// Synchronization point for start of timestamp generation. - public int @dts_sync_point; - /// Offset of the current timestamp against last timestamp sync point in units of AVCodecContext.time_base. - public int @dts_ref_dts_delta; - /// Presentation delay of current frame in units of AVCodecContext.time_base. - public int @pts_dts_delta; - /// Position of the packet in file. - public long_array4 @cur_frame_pos; - /// Byte position of currently parsed frame in stream. - public long @pos; - /// Previous frame byte position. - public long @last_pos; - /// Duration of the current frame. For audio, this is in units of 1 / AVCodecContext.sample_rate. For all other types, this is in units of AVCodecContext.time_base. - public int @duration; - public AVFieldOrder @field_order; - /// Indicate whether a picture is coded as a frame, top field or bottom field. - public AVPictureStructure @picture_structure; - /// Picture number incremented in presentation or output order. This field may be reinitialized at the first picture of a new sequence. - public int @output_picture_number; - /// Dimensions of the decoded video intended for presentation. - public int @width; - public int @height; - /// Dimensions of the coded video. - public int @coded_width; - public int @coded_height; - /// The format of the coded data, corresponds to enum AVPixelFormat for video and for enum AVSampleFormat for audio. - public int @format; - } - - public unsafe partial struct AVCodecParser - { - public int_array7 @codec_ids; - public int @priv_data_size; - public AVCodecParser_parser_init_func @parser_init; - public AVCodecParser_parser_parse_func @parser_parse; - public AVCodecParser_parser_close_func @parser_close; - public AVCodecParser_split_func @split; - } - - /// This struct describes the properties of an encoded stream. - public unsafe partial struct AVCodecParameters - { - /// General type of the encoded data. - public AVMediaType @codec_type; - /// Specific type of the encoded data (the codec used). - public AVCodecID @codec_id; - /// Additional information about the codec (corresponds to the AVI FOURCC). - public uint @codec_tag; - /// Extra binary data needed for initializing the decoder, codec-dependent. - public byte* @extradata; - /// Size of the extradata content in bytes. - public int @extradata_size; - /// - video: the pixel format, the value corresponds to enum AVPixelFormat. - audio: the sample format, the value corresponds to enum AVSampleFormat. - public int @format; - /// The average bitrate of the encoded data (in bits per second). - public long @bit_rate; - /// The number of bits per sample in the codedwords. - public int @bits_per_coded_sample; - /// This is the number of valid bits in each output sample. If the sample format has more bits, the least significant bits are additional padding bits, which are always 0. Use right shifts to reduce the sample to its actual size. For example, audio formats with 24 bit samples will have bits_per_raw_sample set to 24, and format set to AV_SAMPLE_FMT_S32. To get the original sample use "(int32_t)sample >> 8"." - public int @bits_per_raw_sample; - /// Codec-specific bitstream restrictions that the stream conforms to. - public int @profile; - public int @level; - /// Video only. The dimensions of the video frame in pixels. - public int @width; - public int @height; - /// Video only. The aspect ratio (width / height) which a single pixel should have when displayed. - public AVRational @sample_aspect_ratio; - /// Video only. The order of the fields in interlaced video. - public AVFieldOrder @field_order; - /// Video only. Additional colorspace characteristics. - public AVColorRange @color_range; - public AVColorPrimaries @color_primaries; - public AVColorTransferCharacteristic @color_trc; - public AVColorSpace @color_space; - public AVChromaLocation @chroma_location; - /// Video only. Number of delayed frames. - public int @video_delay; - /// Audio only. The channel layout bitmask. May be 0 if the channel layout is unknown or unspecified, otherwise the number of bits set must be equal to the channels field. - [Obsolete("use ch_layout")] - public ulong @channel_layout; - /// Audio only. The number of audio channels. - [Obsolete("use ch_layout.nb_channels")] - public int @channels; - /// Audio only. The number of audio samples per second. - public int @sample_rate; - /// Audio only. The number of bytes per coded audio frame, required by some formats. - public int @block_align; - /// Audio only. Audio frame size, if known. Required by some formats to be static. - public int @frame_size; - /// Audio only. The amount of padding (in samples) inserted by the encoder at the beginning of the audio. I.e. this number of leading decoded samples must be discarded by the caller to get the original audio without leading padding. - public int @initial_padding; - /// Audio only. The amount of padding (in samples) appended by the encoder to the end of the audio. I.e. this number of decoded samples must be discarded by the caller from the end of the stream to get the original audio without any trailing padding. - public int @trailing_padding; - /// Audio only. Number of samples to skip after a discontinuity. - public int @seek_preroll; - /// Audio only. The channel layout and number of channels. - public AVChannelLayout @ch_layout; - } - - public unsafe partial struct AVCodecHWConfig - { - /// For decoders, a hardware pixel format which that decoder may be able to decode to if suitable hardware is available. - public AVPixelFormat @pix_fmt; - /// Bit set of AV_CODEC_HW_CONFIG_METHOD_* flags, describing the possible setup methods which can be used with this configuration. - public int @methods; - /// The device type associated with the configuration. - public AVHWDeviceType @device_type; - } - - /// Pan Scan area. This specifies the area which should be displayed. Note there may be multiple such areas for one frame. - public unsafe partial struct AVPanScan - { - /// id - encoding: Set by user. - decoding: Set by libavcodec. - public int @id; - /// width and height in 1/16 pel - encoding: Set by user. - decoding: Set by libavcodec. - public int @width; - public int @height; - /// position of the top left corner in 1/16 pel for up to 3 fields/frames - encoding: Set by user. - decoding: Set by libavcodec. - public short_array3x2 @position; - } - - /// This structure describes the bitrate properties of an encoded bitstream. It roughly corresponds to a subset the VBV parameters for MPEG-2 or HRD parameters for H.264/HEVC. - public unsafe partial struct AVCPBProperties - { - /// Maximum bitrate of the stream, in bits per second. Zero if unknown or unspecified. - public long @max_bitrate; - /// Minimum bitrate of the stream, in bits per second. Zero if unknown or unspecified. - public long @min_bitrate; - /// Average bitrate of the stream, in bits per second. Zero if unknown or unspecified. - public long @avg_bitrate; - /// The size of the buffer to which the ratecontrol is applied, in bits. Zero if unknown or unspecified. - public long @buffer_size; - /// The delay between the time the packet this structure is associated with is received and the time when it should be decoded, in periods of a 27MHz clock. - public ulong @vbv_delay; - } - - /// This structure supplies correlation between a packet timestamp and a wall clock production time. The definition follows the Producer Reference Time ('prft') as defined in ISO/IEC 14496-12 - public unsafe partial struct AVProducerReferenceTime - { - /// A UTC timestamp, in microseconds, since Unix epoch (e.g, av_gettime()). - public long @wallclock; - public int @flags; - } - - public unsafe partial struct AVPacketList - { - public AVPacket @pkt; - public AVPacketList* @next; - } - - /// This structure is used to provides the necessary configurations and data to the Direct3D11 FFmpeg HWAccel implementation. - public unsafe partial struct AVD3D11VAContext - { - /// D3D11 decoder object - public ID3D11VideoDecoder* @decoder; - /// D3D11 VideoContext - public ID3D11VideoContext* @video_context; - /// D3D11 configuration used to create the decoder - public D3D11_VIDEO_DECODER_CONFIG* @cfg; - /// The number of surface in the surface array - public uint @surface_count; - /// The array of Direct3D surfaces used to create the decoder - public ID3D11VideoDecoderOutputView** @surface; - /// A bit field configuring the workarounds needed for using the decoder - public ulong @workaround; - /// Private to the FFmpeg AVHWAccel implementation - public uint @report_id; - /// Mutex to access video_context - public void* @context_mutex; - } - - public unsafe partial struct ID3D11VideoDecoder - { - public ID3D11VideoDecoderVtbl* @lpVtbl; - } - - public unsafe partial struct ID3D11VideoDecoderVtbl - { - public void* @QueryInterface; - public void* @AddRef; - public void* @Release; - public void* @GetDevice; - public void* @GetPrivateData; - public void* @SetPrivateData; - public void* @SetPrivateDataInterface; - public void* @GetCreationParameters; - public void* @GetDriverHandle; - } - - public unsafe partial struct D3D11_VIDEO_DECODER_CONFIG - { - public _GUID @guidConfigBitstreamEncryption; - public _GUID @guidConfigMBcontrolEncryption; - public _GUID @guidConfigResidDiffEncryption; - public uint @ConfigBitstreamRaw; - public uint @ConfigMBcontrolRasterOrder; - public uint @ConfigResidDiffHost; - public uint @ConfigSpatialResid8; - public uint @ConfigResid8Subtraction; - public uint @ConfigSpatialHost8or9Clipping; - public uint @ConfigSpatialResidInterleaved; - public uint @ConfigIntraResidUnsigned; - public uint @ConfigResidDiffAccelerator; - public uint @ConfigHostInverseScan; - public uint @ConfigSpecificIDCT; - public uint @Config4GroupedCoefs; - public ushort @ConfigMinRenderTargetBuffCount; - public ushort @ConfigDecoderSpecific; - } - - public unsafe partial struct _GUID - { - public ulong @Data1; - public ushort @Data2; - public ushort @Data3; - public byte_array8 @Data4; - } - - public unsafe partial struct ID3D11VideoDecoderOutputView - { - public ID3D11VideoDecoderOutputViewVtbl* @lpVtbl; - } - - public unsafe partial struct ID3D11VideoDecoderOutputViewVtbl - { - public void* @QueryInterface; - public void* @AddRef; - public void* @Release; - public void* @GetDevice; - public void* @GetPrivateData; - public void* @SetPrivateData; - public void* @SetPrivateDataInterface; - public void* @GetResource; - public void* @GetDesc; - } - - /// This structure contains the data a format has to probe a file. - public unsafe partial struct AVProbeData - { - public byte* @filename; - /// Buffer must have AVPROBE_PADDING_SIZE of extra allocated bytes filled with zero. - public byte* @buf; - /// Size of buf except extra allocated bytes - public int @buf_size; - /// mime_type, when known. - public byte* @mime_type; - } - - /// Stream structure. New fields can be added to the end with minor version bumps. Removal, reordering and changes to existing fields require a major version bump. sizeof(AVStream) must not be used outside libav*. - public unsafe partial struct AVStream - { - /// stream index in AVFormatContext - public int @index; - /// Format-specific stream ID. decoding: set by libavformat encoding: set by the user, replaced by libavformat if left unset - public int @id; - public void* @priv_data; - /// This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented. - public AVRational @time_base; - /// Decoding: pts of the first frame of the stream in presentation order, in stream time base. Only set this if you are absolutely 100% sure that the value you set it to really is the pts of the first frame. This may be undefined (AV_NOPTS_VALUE). - public long @start_time; - /// Decoding: duration of the stream, in stream time base. If a source file does not specify a duration, but does specify a bitrate, this value will be estimated from bitrate and file size. - public long @duration; - /// number of frames in this stream if known or 0 - public long @nb_frames; - /// Stream disposition - a combination of AV_DISPOSITION_* flags. - demuxing: set by libavformat when creating the stream or in avformat_find_stream_info(). - muxing: may be set by the caller before avformat_write_header(). - public int @disposition; - /// Selects which packets can be discarded at will and do not need to be demuxed. - public AVDiscard @discard; - /// sample aspect ratio (0 if unknown) - encoding: Set by user. - decoding: Set by libavformat. - public AVRational @sample_aspect_ratio; - public AVDictionary* @metadata; - /// Average framerate - public AVRational @avg_frame_rate; - /// For streams with AV_DISPOSITION_ATTACHED_PIC disposition, this packet will contain the attached picture. - public AVPacket @attached_pic; - /// An array of side data that applies to the whole stream (i.e. the container does not allow it to change between packets). - public AVPacketSideData* @side_data; - /// The number of elements in the AVStream.side_data array. - public int @nb_side_data; - /// Flags indicating events happening on the stream, a combination of AVSTREAM_EVENT_FLAG_*. - public int @event_flags; - /// Real base framerate of the stream. This is the lowest framerate with which all timestamps can be represented accurately (it is the least common multiple of all framerates in the stream). Note, this value is just a guess! For example, if the time base is 1/90000 and all frames have either approximately 3600 or 1800 timer ticks, then r_frame_rate will be 50/1. - public AVRational @r_frame_rate; - /// Codec parameters associated with this stream. Allocated and freed by libavformat in avformat_new_stream() and avformat_free_context() respectively. - public AVCodecParameters* @codecpar; - /// Number of bits in timestamps. Used for wrapping control. - public int @pts_wrap_bits; - } - - /// New fields can be added to the end with minor version bumps. Removal, reordering and changes to existing fields require a major version bump. sizeof(AVProgram) must not be used outside libav*. - public unsafe partial struct AVProgram - { - public int @id; - public int @flags; - /// selects which program to discard and which to feed to the caller - public AVDiscard @discard; - public uint* @stream_index; - public uint @nb_stream_indexes; - public AVDictionary* @metadata; - public int @program_num; - public int @pmt_pid; - public int @pcr_pid; - public int @pmt_version; - /// *************************************************************** All fields below this line are not part of the public API. They may not be used outside of libavformat and can be changed and removed at will. New public fields should be added right above. **************************************************************** - public long @start_time; - public long @end_time; - /// reference dts for wrap detection - public long @pts_wrap_reference; - /// behavior on wrap detection - public int @pts_wrap_behavior; - } - - public unsafe partial struct AVChapter - { - /// unique ID to identify the chapter - public long @id; - /// time base in which the start/end timestamps are specified - public AVRational @time_base; - /// chapter start/end time in time_base units - public long @start; - /// chapter start/end time in time_base units - public long @end; - public AVDictionary* @metadata; - } - - /// @{ - public unsafe partial struct AVOutputFormat - { - public byte* @name; - /// Descriptive name for the format, meant to be more human-readable than name. You should use the NULL_IF_CONFIG_SMALL() macro to define it. - public byte* @long_name; - public byte* @mime_type; - /// comma-separated filename extensions - public byte* @extensions; - /// default audio codec - public AVCodecID @audio_codec; - /// default video codec - public AVCodecID @video_codec; - /// default subtitle codec - public AVCodecID @subtitle_codec; - /// can use flags: AVFMT_NOFILE, AVFMT_NEEDNUMBER, AVFMT_GLOBALHEADER, AVFMT_NOTIMESTAMPS, AVFMT_VARIABLE_FPS, AVFMT_NODIMENSIONS, AVFMT_NOSTREAMS, AVFMT_ALLOW_FLUSH, AVFMT_TS_NONSTRICT, AVFMT_TS_NEGATIVE - public int @flags; - /// List of supported codec_id-codec_tag pairs, ordered by "better choice first". The arrays are all terminated by AV_CODEC_ID_NONE. - public AVCodecTag** @codec_tag; - /// AVClass for the private context - public AVClass* @priv_class; - /// *************************************************************** No fields below this line are part of the public API. They may not be used outside of libavformat and can be changed and removed at will. New public fields should be added right above. **************************************************************** - public int @priv_data_size; - /// Internal flags. See FF_FMT_FLAG_* in internal.h. - public int @flags_internal; - public AVOutputFormat_write_header_func @write_header; - /// Write a packet. If AVFMT_ALLOW_FLUSH is set in flags, pkt can be NULL in order to flush data buffered in the muxer. When flushing, return 0 if there still is more data to flush, or 1 if everything was flushed and there is no more buffered data. - public AVOutputFormat_write_packet_func @write_packet; - public AVOutputFormat_write_trailer_func @write_trailer; - /// A format-specific function for interleavement. If unset, packets will be interleaved by dts. - public AVOutputFormat_interleave_packet_func @interleave_packet; - /// Test if the given codec can be stored in this container. - public AVOutputFormat_query_codec_func @query_codec; - public AVOutputFormat_get_output_timestamp_func @get_output_timestamp; - /// Allows sending messages from application to device. - public AVOutputFormat_control_message_func @control_message; - /// Write an uncoded AVFrame. - public AVOutputFormat_write_uncoded_frame_func @write_uncoded_frame; - /// Returns device list with it properties. - public AVOutputFormat_get_device_list_func @get_device_list; - /// default data codec - public AVCodecID @data_codec; - /// Initialize format. May allocate data here, and set any AVFormatContext or AVStream parameters that need to be set before packets are sent. This method must not write output. - public AVOutputFormat_init_func @init; - /// Deinitialize format. If present, this is called whenever the muxer is being destroyed, regardless of whether or not the header has been written. - public AVOutputFormat_deinit_func @deinit; - /// Set up any necessary bitstream filtering and extract any extra data needed for the global header. - public AVOutputFormat_check_bitstream_func @check_bitstream; - } - - /// Format I/O context. New fields can be added to the end with minor version bumps. Removal, reordering and changes to existing fields require a major version bump. sizeof(AVFormatContext) must not be used outside libav*, use avformat_alloc_context() to create an AVFormatContext. - public unsafe partial struct AVFormatContext - { - /// A class for logging and avoptions. Set by avformat_alloc_context(). Exports (de)muxer private options if they exist. - public AVClass* @av_class; - /// The input container format. - public AVInputFormat* @iformat; - /// The output container format. - public AVOutputFormat* @oformat; - /// Format private data. This is an AVOptions-enabled struct if and only if iformat/oformat.priv_class is not NULL. - public void* @priv_data; - /// I/O context. - public AVIOContext* @pb; - /// Flags signalling stream properties. A combination of AVFMTCTX_*. Set by libavformat. - public int @ctx_flags; - /// Number of elements in AVFormatContext.streams. - public uint @nb_streams; - /// A list of all streams in the file. New streams are created with avformat_new_stream(). - public AVStream** @streams; - /// input or output URL. Unlike the old filename field, this field has no length restriction. - public byte* @url; - /// Position of the first frame of the component, in AV_TIME_BASE fractional seconds. NEVER set this value directly: It is deduced from the AVStream values. - public long @start_time; - /// Duration of the stream, in AV_TIME_BASE fractional seconds. Only set this value if you know none of the individual stream durations and also do not set any of them. This is deduced from the AVStream values if not set. - public long @duration; - /// Total stream bitrate in bit/s, 0 if not available. Never set it directly if the file_size and the duration are known as FFmpeg can compute it automatically. - public long @bit_rate; - public uint @packet_size; - public int @max_delay; - /// Flags modifying the (de)muxer behaviour. A combination of AVFMT_FLAG_*. Set by the user before avformat_open_input() / avformat_write_header(). - public int @flags; - /// Maximum number of bytes read from input in order to determine stream properties. Used when reading the global header and in avformat_find_stream_info(). - public long @probesize; - /// Maximum duration (in AV_TIME_BASE units) of the data read from input in avformat_find_stream_info(). Demuxing only, set by the caller before avformat_find_stream_info(). Can be set to 0 to let avformat choose using a heuristic. - public long @max_analyze_duration; - public byte* @key; - public int @keylen; - public uint @nb_programs; - public AVProgram** @programs; - /// Forced video codec_id. Demuxing: Set by user. - public AVCodecID @video_codec_id; - /// Forced audio codec_id. Demuxing: Set by user. - public AVCodecID @audio_codec_id; - /// Forced subtitle codec_id. Demuxing: Set by user. - public AVCodecID @subtitle_codec_id; - /// Maximum amount of memory in bytes to use for the index of each stream. If the index exceeds this size, entries will be discarded as needed to maintain a smaller size. This can lead to slower or less accurate seeking (depends on demuxer). Demuxers for which a full in-memory index is mandatory will ignore this. - muxing: unused - demuxing: set by user - public uint @max_index_size; - /// Maximum amount of memory in bytes to use for buffering frames obtained from realtime capture devices. - public uint @max_picture_buffer; - /// Number of chapters in AVChapter array. When muxing, chapters are normally written in the file header, so nb_chapters should normally be initialized before write_header is called. Some muxers (e.g. mov and mkv) can also write chapters in the trailer. To write chapters in the trailer, nb_chapters must be zero when write_header is called and non-zero when write_trailer is called. - muxing: set by user - demuxing: set by libavformat - public uint @nb_chapters; - public AVChapter** @chapters; - /// Metadata that applies to the whole file. - public AVDictionary* @metadata; - /// Start time of the stream in real world time, in microseconds since the Unix epoch (00:00 1st January 1970). That is, pts=0 in the stream was captured at this real world time. - muxing: Set by the caller before avformat_write_header(). If set to either 0 or AV_NOPTS_VALUE, then the current wall-time will be used. - demuxing: Set by libavformat. AV_NOPTS_VALUE if unknown. Note that the value may become known after some number of frames have been received. - public long @start_time_realtime; - /// The number of frames used for determining the framerate in avformat_find_stream_info(). Demuxing only, set by the caller before avformat_find_stream_info(). - public int @fps_probe_size; - /// Error recognition; higher values will detect more errors but may misdetect some more or less valid parts as errors. Demuxing only, set by the caller before avformat_open_input(). - public int @error_recognition; - /// Custom interrupt callbacks for the I/O layer. - public AVIOInterruptCB @interrupt_callback; - /// Flags to enable debugging. - public int @debug; - /// Maximum buffering duration for interleaving. - public long @max_interleave_delta; - /// Allow non-standard and experimental extension - public int @strict_std_compliance; - /// Flags indicating events happening on the file, a combination of AVFMT_EVENT_FLAG_*. - public int @event_flags; - /// Maximum number of packets to read while waiting for the first timestamp. Decoding only. - public int @max_ts_probe; - /// Avoid negative timestamps during muxing. Any value of the AVFMT_AVOID_NEG_TS_* constants. Note, this works better when using av_interleaved_write_frame(). - muxing: Set by user - demuxing: unused - public int @avoid_negative_ts; - /// Transport stream id. This will be moved into demuxer private options. Thus no API/ABI compatibility - public int @ts_id; - /// Audio preload in microseconds. Note, not all formats support this and unpredictable things may happen if it is used when not supported. - encoding: Set by user - decoding: unused - public int @audio_preload; - /// Max chunk time in microseconds. Note, not all formats support this and unpredictable things may happen if it is used when not supported. - encoding: Set by user - decoding: unused - public int @max_chunk_duration; - /// Max chunk size in bytes Note, not all formats support this and unpredictable things may happen if it is used when not supported. - encoding: Set by user - decoding: unused - public int @max_chunk_size; - /// forces the use of wallclock timestamps as pts/dts of packets This has undefined results in the presence of B frames. - encoding: unused - decoding: Set by user - public int @use_wallclock_as_timestamps; - /// avio flags, used to force AVIO_FLAG_DIRECT. - encoding: unused - decoding: Set by user - public int @avio_flags; - /// The duration field can be estimated through various ways, and this field can be used to know how the duration was estimated. - encoding: unused - decoding: Read by user - public AVDurationEstimationMethod @duration_estimation_method; - /// Skip initial bytes when opening stream - encoding: unused - decoding: Set by user - public long @skip_initial_bytes; - /// Correct single timestamp overflows - encoding: unused - decoding: Set by user - public uint @correct_ts_overflow; - /// Force seeking to any (also non key) frames. - encoding: unused - decoding: Set by user - public int @seek2any; - /// Flush the I/O context after each packet. - encoding: Set by user - decoding: unused - public int @flush_packets; - /// format probing score. The maximal score is AVPROBE_SCORE_MAX, its set when the demuxer probes the format. - encoding: unused - decoding: set by avformat, read by user - public int @probe_score; - /// Maximum number of bytes read from input in order to identify the AVInputFormat "input format". Only used when the format is not set explicitly by the caller. - public int @format_probesize; - /// ',' separated list of allowed decoders. If NULL then all are allowed - encoding: unused - decoding: set by user - public byte* @codec_whitelist; - /// ',' separated list of allowed demuxers. If NULL then all are allowed - encoding: unused - decoding: set by user - public byte* @format_whitelist; - /// IO repositioned flag. This is set by avformat when the underlaying IO context read pointer is repositioned, for example when doing byte based seeking. Demuxers can use the flag to detect such changes. - public int @io_repositioned; - /// Forced video codec. This allows forcing a specific decoder, even when there are multiple with the same codec_id. Demuxing: Set by user - public AVCodec* @video_codec; - /// Forced audio codec. This allows forcing a specific decoder, even when there are multiple with the same codec_id. Demuxing: Set by user - public AVCodec* @audio_codec; - /// Forced subtitle codec. This allows forcing a specific decoder, even when there are multiple with the same codec_id. Demuxing: Set by user - public AVCodec* @subtitle_codec; - /// Forced data codec. This allows forcing a specific decoder, even when there are multiple with the same codec_id. Demuxing: Set by user - public AVCodec* @data_codec; - /// Number of bytes to be written as padding in a metadata header. Demuxing: Unused. Muxing: Set by user via av_format_set_metadata_header_padding. - public int @metadata_header_padding; - /// User data. This is a place for some private data of the user. - public void* @opaque; - /// Callback used by devices to communicate with application. - public AVFormatContext_control_message_cb_func @control_message_cb; - /// Output timestamp offset, in microseconds. Muxing: set by user - public long @output_ts_offset; - /// dump format separator. can be ", " or " " or anything else - muxing: Set by user. - demuxing: Set by user. - public byte* @dump_separator; - /// Forced Data codec_id. Demuxing: Set by user. - public AVCodecID @data_codec_id; - /// ',' separated list of allowed protocols. - encoding: unused - decoding: set by user - public byte* @protocol_whitelist; - /// A callback for opening new IO streams. - public AVFormatContext_io_open_func @io_open; - /// A callback for closing the streams opened with AVFormatContext.io_open(). - public AVFormatContext_io_close_func @io_close; - /// ',' separated list of disallowed protocols. - encoding: unused - decoding: set by user - public byte* @protocol_blacklist; - /// The maximum number of streams. - encoding: unused - decoding: set by user - public int @max_streams; - /// Skip duration calcuation in estimate_timings_from_pts. - encoding: unused - decoding: set by user - public int @skip_estimate_duration_from_pts; - /// Maximum number of packets that can be probed - encoding: unused - decoding: set by user - public int @max_probe_packets; - /// A callback for closing the streams opened with AVFormatContext.io_open(). - public AVFormatContext_io_close2_func @io_close2; - } - - /// @{ - public unsafe partial struct AVInputFormat - { - /// A comma separated list of short names for the format. New names may be appended with a minor bump. - public byte* @name; - /// Descriptive name for the format, meant to be more human-readable than name. You should use the NULL_IF_CONFIG_SMALL() macro to define it. - public byte* @long_name; - /// Can use flags: AVFMT_NOFILE, AVFMT_NEEDNUMBER, AVFMT_SHOW_IDS, AVFMT_NOTIMESTAMPS, AVFMT_GENERIC_INDEX, AVFMT_TS_DISCONT, AVFMT_NOBINSEARCH, AVFMT_NOGENSEARCH, AVFMT_NO_BYTE_SEEK, AVFMT_SEEK_TO_PTS. - public int @flags; - /// If extensions are defined, then no probe is done. You should usually not use extension format guessing because it is not reliable enough - public byte* @extensions; - public AVCodecTag** @codec_tag; - /// AVClass for the private context - public AVClass* @priv_class; - /// Comma-separated list of mime types. It is used check for matching mime types while probing. - public byte* @mime_type; - /// *************************************************************** No fields below this line are part of the public API. They may not be used outside of libavformat and can be changed and removed at will. New public fields should be added right above. **************************************************************** - public int @raw_codec_id; - /// Size of private data so that it can be allocated in the wrapper. - public int @priv_data_size; - /// Internal flags. See FF_FMT_FLAG_* in internal.h. - public int @flags_internal; - /// Tell if a given file has a chance of being parsed as this format. The buffer provided is guaranteed to be AVPROBE_PADDING_SIZE bytes big so you do not have to check for that unless you need more. - public AVInputFormat_read_probe_func @read_probe; - /// Read the format header and initialize the AVFormatContext structure. Return 0 if OK. 'avformat_new_stream' should be called to create new streams. - public AVInputFormat_read_header_func @read_header; - /// Read one packet and put it in 'pkt'. pts and flags are also set. 'avformat_new_stream' can be called only if the flag AVFMTCTX_NOHEADER is used and only in the calling thread (not in a background thread). - public AVInputFormat_read_packet_func @read_packet; - /// Close the stream. The AVFormatContext and AVStreams are not freed by this function - public AVInputFormat_read_close_func @read_close; - /// Seek to a given timestamp relative to the frames in stream component stream_index. - public AVInputFormat_read_seek_func @read_seek; - /// Get the next timestamp in stream[stream_index].time_base units. - public AVInputFormat_read_timestamp_func @read_timestamp; - /// Start/resume playing - only meaningful if using a network-based format (RTSP). - public AVInputFormat_read_play_func @read_play; - /// Pause playing - only meaningful if using a network-based format (RTSP). - public AVInputFormat_read_pause_func @read_pause; - /// Seek to timestamp ts. Seeking will be done so that the point from which all active streams can be presented successfully will be closest to ts and within min/max_ts. Active streams are all streams that have AVStream.discard < AVDISCARD_ALL. - public AVInputFormat_read_seek2_func @read_seek2; - /// Returns device list with it properties. - public AVInputFormat_get_device_list_func @get_device_list; - } - - /// List of devices. - public unsafe partial struct AVDeviceInfoList - { - /// list of autodetected devices - public AVDeviceInfo** @devices; - /// number of autodetected devices - public int @nb_devices; - /// index of default device or -1 if no default - public int @default_device; - } - - /// Bytestream IO Context. New public fields can be added with minor version bumps. Removal, reordering and changes to existing public fields require a major version bump. sizeof(AVIOContext) must not be used outside libav*. - public unsafe partial struct AVIOContext - { - /// A class for private options. - public AVClass* @av_class; - /// Start of the buffer. - public byte* @buffer; - /// Maximum buffer size - public int @buffer_size; - /// Current position in the buffer - public byte* @buf_ptr; - /// End of the data, may be less than buffer+buffer_size if the read function returned less data than requested, e.g. for streams where no more data has been received yet. - public byte* @buf_end; - /// A private pointer, passed to the read/write/seek/... functions. - public void* @opaque; - public AVIOContext_read_packet_func @read_packet; - public AVIOContext_write_packet_func @write_packet; - public AVIOContext_seek_func @seek; - /// position in the file of the current buffer - public long @pos; - /// true if was unable to read due to error or eof - public int @eof_reached; - /// contains the error code or 0 if no error happened - public int @error; - /// true if open for writing - public int @write_flag; - public int @max_packet_size; - /// Try to buffer at least this amount of data before flushing it. - public int @min_packet_size; - public ulong @checksum; - public byte* @checksum_ptr; - public AVIOContext_update_checksum_func @update_checksum; - /// Pause or resume playback for network streaming protocols - e.g. MMS. - public AVIOContext_read_pause_func @read_pause; - /// Seek to a given timestamp in stream with the specified stream_index. Needed for some network streaming protocols which don't support seeking to byte position. - public AVIOContext_read_seek_func @read_seek; - /// A combination of AVIO_SEEKABLE_ flags or 0 when the stream is not seekable. - public int @seekable; - /// avio_read and avio_write should if possible be satisfied directly instead of going through a buffer, and avio_seek will always call the underlying seek function directly. - public int @direct; - /// ',' separated list of allowed protocols. - public byte* @protocol_whitelist; - /// ',' separated list of disallowed protocols. - public byte* @protocol_blacklist; - /// A callback that is used instead of write_packet. - public AVIOContext_write_data_type_func @write_data_type; - /// If set, don't call write_data_type separately for AVIO_DATA_MARKER_BOUNDARY_POINT, but ignore them and treat them as AVIO_DATA_MARKER_UNKNOWN (to avoid needlessly small chunks of data returned from the callback). - public int @ignore_boundary_point; - [Obsolete("field utilized privately by libavformat. For a public statistic of how many bytes were written out, see AVIOContext::bytes_written.")] - public long @written; - /// Maximum reached position before a backward seek in the write buffer, used keeping track of already written data for a later flush. - public byte* @buf_ptr_max; - /// Read-only statistic of bytes read for this AVIOContext. - public long @bytes_read; - /// Read-only statistic of bytes written for this AVIOContext. - public long @bytes_written; - } - - /// Callback for checking whether to abort blocking functions. AVERROR_EXIT is returned in this case by the interrupted function. During blocking operations, callback is called with opaque as parameter. If the callback returns 1, the blocking operation will be aborted. - public unsafe partial struct AVIOInterruptCB - { - public AVIOInterruptCB_callback_func @callback; - public void* @opaque; - } - - public unsafe partial struct AVIndexEntry - { - public long @pos; - /// Timestamp in AVStream.time_base units, preferably the time from which on correctly decoded frames are available when seeking to this entry. That means preferable PTS on keyframe based formats. But demuxers can choose to store a different timestamp, if it is more convenient for the implementation or nothing better is known - public long @timestamp; - /// Flag is used to indicate which frame should be discarded after decoding. - public int @flags2_size30; - /// Minimum distance between this and the previous keyframe, used to avoid unneeded searching. - public int @min_distance; - } - - /// Describes single entry of the directory. - public unsafe partial struct AVIODirEntry - { - /// Filename - public byte* @name; - /// Type of the entry - public int @type; - /// Set to 1 when name is encoded with UTF-8, 0 otherwise. Name can be encoded with UTF-8 even though 0 is set. - public int @utf8; - /// File size in bytes, -1 if unknown. - public long @size; - /// Time of last modification in microseconds since unix epoch, -1 if unknown. - public long @modification_timestamp; - /// Time of last access in microseconds since unix epoch, -1 if unknown. - public long @access_timestamp; - /// Time of last status change in microseconds since unix epoch, -1 if unknown. - public long @status_change_timestamp; - /// User ID of owner, -1 if unknown. - public long @user_id; - /// Group ID of owner, -1 if unknown. - public long @group_id; - /// Unix file mode, -1 if unknown. - public long @filemode; - } - - public unsafe partial struct AVIODirContext - { - public URLContext* @url_context; - } - - /// An instance of a filter - public unsafe partial struct AVFilterContext - { - /// needed for av_log() and filters common options - public AVClass* @av_class; - /// the AVFilter of which this is an instance - public AVFilter* @filter; - /// name of this filter instance - public byte* @name; - /// array of input pads - public AVFilterPad* @input_pads; - /// array of pointers to input links - public AVFilterLink** @inputs; - /// number of input pads - public uint @nb_inputs; - /// array of output pads - public AVFilterPad* @output_pads; - /// array of pointers to output links - public AVFilterLink** @outputs; - /// number of output pads - public uint @nb_outputs; - /// private data for use by the filter - public void* @priv; - /// filtergraph this filter belongs to - public AVFilterGraph* @graph; - /// Type of multithreading being allowed/used. A combination of AVFILTER_THREAD_* flags. - public int @thread_type; - /// An opaque struct for libavfilter internal use. - public AVFilterInternal* @internal; - public AVFilterCommand* @command_queue; - /// enable expression string - public byte* @enable_str; - /// parsed expression (AVExpr*) - public void* @enable; - /// variable values for the enable expression - public double* @var_values; - /// the enabled state from the last expression evaluation - public int @is_disabled; - /// For filters which will create hardware frames, sets the device the filter should create them in. All other filters will ignore this field: in particular, a filter which consumes or processes hardware frames will instead use the hw_frames_ctx field in AVFilterLink to carry the hardware context information. - public AVBufferRef* @hw_device_ctx; - /// Max number of threads allowed in this filter instance. If <= 0, its value is ignored. Overrides global number of threads set per filter graph. - public int @nb_threads; - /// Ready status of the filter. A non-0 value means that the filter needs activating; a higher value suggests a more urgent activation. - public uint @ready; - /// Sets the number of extra hardware frames which the filter will allocate on its output links for use in following filters or by the caller. - public int @extra_hw_frames; - } - - /// Filter definition. This defines the pads a filter contains, and all the callback functions used to interact with the filter. - public unsafe partial struct AVFilter - { - /// Filter name. Must be non-NULL and unique among filters. - public byte* @name; - /// A description of the filter. May be NULL. - public byte* @description; - /// List of static inputs. - public AVFilterPad* @inputs; - /// List of static outputs. - public AVFilterPad* @outputs; - /// A class for the private data, used to declare filter private AVOptions. This field is NULL for filters that do not declare any options. - public AVClass* @priv_class; - /// A combination of AVFILTER_FLAG_* - public int @flags; - /// The number of entries in the list of inputs. - public byte @nb_inputs; - /// The number of entries in the list of outputs. - public byte @nb_outputs; - /// This field determines the state of the formats union. It is an enum FilterFormatsState value. - public byte @formats_state; - /// Filter pre-initialization function - public AVFilter_preinit_func @preinit; - /// Filter initialization function. - public AVFilter_init_func @init; - /// Should be set instead of AVFilter.init "init" by the filters that want to pass a dictionary of AVOptions to nested contexts that are allocated during init. - public AVFilter_init_dict_func @init_dict; - /// Filter uninitialization function. - public AVFilter_uninit_func @uninit; - public AVFilter_formats @formats; - /// size of private data to allocate for the filter - public int @priv_size; - /// Additional flags for avfilter internal use only. - public int @flags_internal; - /// Make the filter instance process a command. - public AVFilter_process_command_func @process_command; - /// Filter activation function. - public AVFilter_activate_func @activate; - } - - /// The state of the following union is determined by formats_state. See the documentation of enum FilterFormatsState in internal.h. - [StructLayout(LayoutKind.Explicit)] - public unsafe partial struct AVFilter_formats - { - /// Query formats supported by the filter on its inputs and outputs. - [FieldOffset(0)] - public _query_func_func @query_func; - /// A pointer to an array of admissible pixel formats delimited by AV_PIX_FMT_NONE. The generic code will use this list to indicate that this filter supports each of these pixel formats, provided that all inputs and outputs use the same pixel format. - [FieldOffset(0)] - public AVPixelFormat* @pixels_list; - /// Analogous to pixels, but delimited by AV_SAMPLE_FMT_NONE and restricted to filters that only have AVMEDIA_TYPE_AUDIO inputs and outputs. - [FieldOffset(0)] - public AVSampleFormat* @samples_list; - /// Equivalent to { pix_fmt, AV_PIX_FMT_NONE } as pixels_list. - [FieldOffset(0)] - public AVPixelFormat @pix_fmt; - /// Equivalent to { sample_fmt, AV_SAMPLE_FMT_NONE } as samples_list. - [FieldOffset(0)] - public AVSampleFormat @sample_fmt; - } - - /// A link between two filters. This contains pointers to the source and destination filters between which this link exists, and the indexes of the pads involved. In addition, this link also contains the parameters which have been negotiated and agreed upon between the filter, such as image dimensions, format, etc. - public unsafe partial struct AVFilterLink - { - /// source filter - public AVFilterContext* @src; - /// output pad on the source filter - public AVFilterPad* @srcpad; - /// dest filter - public AVFilterContext* @dst; - /// input pad on the dest filter - public AVFilterPad* @dstpad; - /// filter media type - public AVMediaType @type; - /// agreed upon image width - public int @w; - /// agreed upon image height - public int @h; - /// agreed upon sample aspect ratio - public AVRational @sample_aspect_ratio; - /// channel layout of current buffer (see libavutil/channel_layout.h) - [Obsolete("use ch_layout")] - public ulong @channel_layout; - /// samples per second - public int @sample_rate; - /// agreed upon media format - public int @format; - /// Define the time base used by the PTS of the frames/samples which will pass through this link. During the configuration stage, each filter is supposed to change only the output timebase, while the timebase of the input link is assumed to be an unchangeable property. - public AVRational @time_base; - /// channel layout of current buffer (see libavutil/channel_layout.h) - public AVChannelLayout @ch_layout; - /// Lists of supported formats / etc. supported by the input filter. - public AVFilterFormatsConfig @incfg; - /// Lists of supported formats / etc. supported by the output filter. - public AVFilterFormatsConfig @outcfg; - public AVFilterLink_init_state @init_state; - /// Graph the filter belongs to. - public AVFilterGraph* @graph; - /// Current timestamp of the link, as defined by the most recent frame(s), in link time_base units. - public long @current_pts; - /// Current timestamp of the link, as defined by the most recent frame(s), in AV_TIME_BASE units. - public long @current_pts_us; - /// Index in the age array. - public int @age_index; - /// Frame rate of the stream on the link, or 1/0 if unknown or variable; if left to 0/0, will be automatically copied from the first input of the source filter if it exists. - public AVRational @frame_rate; - /// Minimum number of samples to filter at once. If filter_frame() is called with fewer samples, it will accumulate them in fifo. This field and the related ones must not be changed after filtering has started. If 0, all related fields are ignored. - public int @min_samples; - /// Maximum number of samples to filter at once. If filter_frame() is called with more samples, it will split them. - public int @max_samples; - /// Number of past frames sent through the link. - public long @frame_count_in; - /// Number of past frames sent through the link. - public long @frame_count_out; - /// Number of past samples sent through the link. - public long @sample_count_in; - /// Number of past samples sent through the link. - public long @sample_count_out; - /// A pointer to a FFFramePool struct. - public void* @frame_pool; - /// True if a frame is currently wanted on the output of this filter. Set when ff_request_frame() is called by the output, cleared when a frame is filtered. - public int @frame_wanted_out; - /// For hwaccel pixel formats, this should be a reference to the AVHWFramesContext describing the frames. - public AVBufferRef* @hw_frames_ctx; - /// Internal structure members. The fields below this limit are internal for libavfilter's use and must in no way be accessed by applications. - public byte_array61440 @reserved; - } - - /// Lists of formats / etc. supported by an end of a link. - public unsafe partial struct AVFilterFormatsConfig - { - /// List of supported formats (pixel or sample). - public AVFilterFormats* @formats; - /// Lists of supported sample rates, only for audio. - public AVFilterFormats* @samplerates; - /// Lists of supported channel layouts, only for audio. - public AVFilterChannelLayouts* @channel_layouts; - } - - public unsafe partial struct AVFilterGraph - { - public AVClass* @av_class; - public AVFilterContext** @filters; - public uint @nb_filters; - /// sws options to use for the auto-inserted scale filters - public byte* @scale_sws_opts; - /// Type of multithreading allowed for filters in this graph. A combination of AVFILTER_THREAD_* flags. - public int @thread_type; - /// Maximum number of threads used by filters in this graph. May be set by the caller before adding any filters to the filtergraph. Zero (the default) means that the number of threads is determined automatically. - public int @nb_threads; - /// Opaque object for libavfilter internal use. - public AVFilterGraphInternal* @internal; - /// Opaque user data. May be set by the caller to an arbitrary value, e.g. to be used from callbacks like AVFilterGraph.execute. Libavfilter will not touch this field in any way. - public void* @opaque; - /// This callback may be set by the caller immediately after allocating the graph and before adding any filters to it, to provide a custom multithreading implementation. - public AVFilterGraph_execute_func @execute; - /// swr options to use for the auto-inserted aresample filters, Access ONLY through AVOptions - public byte* @aresample_swr_opts; - /// Private fields - public AVFilterLink** @sink_links; - public int @sink_links_count; - public uint @disable_auto_convert; - } - - /// A linked-list of the inputs/outputs of the filter chain. - public unsafe partial struct AVFilterInOut - { - /// unique name for this input/output in the list - public byte* @name; - /// filter context associated to this input/output - public AVFilterContext* @filter_ctx; - /// index of the filt_ctx pad to use for linking - public int @pad_idx; - /// next input/input in the list, NULL if this is the last - public AVFilterInOut* @next; - } - - /// This structure contains the parameters describing the frames that will be passed to this filter. - public unsafe partial struct AVBufferSrcParameters - { - /// video: the pixel format, value corresponds to enum AVPixelFormat audio: the sample format, value corresponds to enum AVSampleFormat - public int @format; - /// The timebase to be used for the timestamps on the input frames. - public AVRational @time_base; - /// Video only, the display dimensions of the input frames. - public int @width; - /// Video only, the display dimensions of the input frames. - public int @height; - /// Video only, the sample (pixel) aspect ratio. - public AVRational @sample_aspect_ratio; - /// Video only, the frame rate of the input video. This field must only be set to a non-zero value if input stream has a known constant framerate and should be left at its initial value if the framerate is variable or unknown. - public AVRational @frame_rate; - /// Video with a hwaccel pixel format only. This should be a reference to an AVHWFramesContext instance describing the input frames. - public AVBufferRef* @hw_frames_ctx; - /// Audio only, the audio sampling rate in samples per second. - public int @sample_rate; - /// Audio only, the audio channel layout - [Obsolete("use ch_layout")] - public ulong @channel_layout; - /// Audio only, the audio channel layout - public AVChannelLayout @ch_layout; - } - - /// Deprecated and unused struct to use for initializing a buffersink context. - public unsafe partial struct AVBufferSinkParams - { - /// list of allowed pixel formats, terminated by AV_PIX_FMT_NONE - public AVPixelFormat* @pixel_fmts; - } - - /// Deprecated and unused struct to use for initializing an abuffersink context. - public unsafe partial struct AVABufferSinkParams - { - /// list of allowed sample formats, terminated by AV_SAMPLE_FMT_NONE - public AVSampleFormat* @sample_fmts; - /// list of allowed channel layouts, terminated by -1 - public long* @channel_layouts; - /// list of allowed channel counts, terminated by -1 - public int* @channel_counts; - /// if not 0, accept any channel count or layout - public int @all_channel_counts; - /// list of allowed sample rates, terminated by -1 - public int* @sample_rates; - } - - /// Structure describes basic parameters of the device. - public unsafe partial struct AVDeviceInfo - { - /// device name, format depends on device - public byte* @device_name; - /// human friendly name - public byte* @device_description; - /// array indicating what media types(s), if any, a device can provide. If null, cannot provide any - public AVMediaType* @media_types; - /// length of media_types array, 0 if device cannot provide any media types - public int @nb_media_types; - } - - public unsafe partial struct AVDeviceRect - { - /// x coordinate of top left corner - public int @x; - /// y coordinate of top left corner - public int @y; - /// width - public int @width; - /// height - public int @height; - } - - /// Structure describes device capabilities. - public unsafe partial struct AVDeviceCapabilitiesQuery - { - public AVClass* @av_class; - public AVFormatContext* @device_context; - public AVCodecID @codec; - public AVSampleFormat @sample_format; - public AVPixelFormat @pixel_format; - public int @sample_rate; - public int @channels; - public long @channel_layout; - public int @window_width; - public int @window_height; - public int @frame_width; - public int @frame_height; - public AVRational @fps; - } - -} diff --git a/FFmpeg.AutoGen/FFmpeg.structs.incomplete.g.cs b/FFmpeg.AutoGen/FFmpeg.structs.incomplete.g.cs deleted file mode 100644 index 4a6bbdf8..00000000 --- a/FFmpeg.AutoGen/FFmpeg.structs.incomplete.g.cs +++ /dev/null @@ -1,107 +0,0 @@ -using System; -using System.Runtime.InteropServices; - -namespace FFmpeg.AutoGen -{ - /// Context for an Audio FIFO Buffer. - /// This struct is incomplete. - public unsafe partial struct AVAudioFifo - { - } - - /// This struct is incomplete. - public unsafe partial struct AVBPrint - { - } - - /// This struct is incomplete. - public unsafe partial struct AVDictionary - { - } - - /// A reference counted buffer type. It is opaque and is meant to be used through references (AVBufferRef). - /// This struct is incomplete. - public unsafe partial struct AVBuffer - { - } - - /// The buffer pool. This structure is opaque and not meant to be accessed directly. It is allocated with av_buffer_pool_init() and freed with av_buffer_pool_uninit(). - /// This struct is incomplete. - public unsafe partial struct AVBufferPool - { - } - - /// Low-complexity tree container - /// This struct is incomplete. - public unsafe partial struct AVTreeNode - { - } - - /// This struct is incomplete. - public unsafe partial struct AVHWDeviceInternal - { - } - - /// This struct is incomplete. - public unsafe partial struct AVHWFramesInternal - { - } - - /// The libswresample context. Unlike libavcodec and libavformat, this structure is opaque. This means that if you would like to set options, you must use the avoptions API and cannot directly set values to members of the structure. - /// This struct is incomplete. - public unsafe partial struct SwrContext - { - } - - /// This struct is incomplete. - public unsafe partial struct SwsContext - { - } - - /// This struct is incomplete. - public unsafe partial struct AVCodecInternal - { - } - - /// ********************************************** - /// This struct is incomplete. - public unsafe partial struct AVCodecTag - { - } - - /// This struct is incomplete. - public unsafe partial struct URLContext - { - } - - /// This struct is incomplete. - public unsafe partial struct AVFilterPad - { - } - - /// This struct is incomplete. - public unsafe partial struct AVFilterFormats - { - } - - /// This struct is incomplete. - public unsafe partial struct AVFilterChannelLayouts - { - } - - /// This struct is incomplete. - public unsafe partial struct AVFilterGraphInternal - { - } - - /// This struct is incomplete. - public unsafe partial struct AVFilterInternal - { - } - - /// This struct is incomplete. - public unsafe partial struct AVFilterCommand - { - } - -} diff --git a/FFmpeg.AutoGen/FunctionResolverBase.cs b/FFmpeg.AutoGen/FunctionResolverBase.cs new file mode 100644 index 00000000..65778fd6 --- /dev/null +++ b/FFmpeg.AutoGen/FunctionResolverBase.cs @@ -0,0 +1,92 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen; + +public abstract class FunctionResolverBase : IFunctionResolver +{ + public static readonly Dictionary LibraryDependenciesMap = + new() + { + { "avcodec", new[] { "avutil", "swresample" } }, + { "avdevice", new[] { "avcodec", "avfilter", "avformat", "avutil" } }, + { "avfilter", new[] { "avcodec", "avformat", "avutil", "postproc", "swresample", "swscale" } }, + { "avformat", new[] { "avcodec", "avutil" } }, + { "avutil", new string[] { } }, + { "postproc", new[] { "avutil" } }, + { "swresample", new[] { "avutil" } }, + { "swscale", new[] { "avutil" } } + }; + + private readonly Dictionary _loadedLibraries = new(); + + private readonly object _syncRoot = new(); + + public T GetFunctionDelegate(string libraryName, string functionName, bool throwOnError = true) + { + var nativeLibraryHandle = GetOrLoadLibrary(libraryName, throwOnError); + return GetFunctionDelegate(nativeLibraryHandle, functionName, throwOnError); + } + + public T GetFunctionDelegate(IntPtr nativeLibraryHandle, string functionName, bool throwOnError) + { + var functionPointer = FindFunctionPointer(nativeLibraryHandle, functionName); + + if (functionPointer == IntPtr.Zero) + { + if (throwOnError) throw new EntryPointNotFoundException($"Could not find the entrypoint for {functionName}."); + return default; + } + +#if NETSTANDARD2_0_OR_GREATER + try + { + return Marshal.GetDelegateForFunctionPointer(functionPointer); + } + catch (MarshalDirectiveException) + { + if (throwOnError) + throw; + return default; + } +#else + return (T)(object)Marshal.GetDelegateForFunctionPointer(functionPointer, typeof(T)); +#endif + } + + public IntPtr GetOrLoadLibrary(string libraryName, bool throwOnError) + { + if (_loadedLibraries.TryGetValue(libraryName, out var ptr)) return ptr; + + lock (_syncRoot) + { + if (_loadedLibraries.TryGetValue(libraryName, out ptr)) return ptr; + + var dependencies = LibraryDependenciesMap[libraryName]; + dependencies.Where(n => !_loadedLibraries.ContainsKey(n) && !n.Equals(libraryName)) + .ToList() + .ForEach(n => GetOrLoadLibrary(n, false)); + + var version = ffmpeg.LibraryVersionMap[libraryName]; + var nativeLibraryName = GetNativeLibraryName(Path.Combine(ffmpeg.RootPath, libraryName), version); + + ptr = LoadNativeLibrary(nativeLibraryName); + + if (ptr != IntPtr.Zero) _loadedLibraries.Add(libraryName, ptr); + else if (throwOnError) + { + throw new DllNotFoundException( + $"Unable to load DLL '{libraryName}.{version} under {ffmpeg.RootPath}': The specified module could not be found."); + } + + return ptr; + } + } + + protected abstract string GetNativeLibraryName(string libraryName, int version); + protected abstract IntPtr LoadNativeLibrary(string libraryName); + protected abstract IntPtr FindFunctionPointer(IntPtr nativeLibraryHandle, string functionName); +} diff --git a/FFmpeg.AutoGen/FunctionResolverFactory.cs b/FFmpeg.AutoGen/FunctionResolverFactory.cs new file mode 100644 index 00000000..2eae577c --- /dev/null +++ b/FFmpeg.AutoGen/FunctionResolverFactory.cs @@ -0,0 +1,36 @@ +using FFmpeg.AutoGen.Native; +using System; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen; + +public static class FunctionResolverFactory +{ + public static PlatformID GetPlatformId() + { +#if NETSTANDARD2_0_OR_GREATER + if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) return PlatformID.Win32NT; + if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) return PlatformID.Unix; + if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) return PlatformID.MacOSX; + throw new PlatformNotSupportedException(); +#else + return Environment.OSVersion.Platform; + +#endif + } + + public static IFunctionResolver Create() + { + switch (GetPlatformId()) + { + case PlatformID.MacOSX: + return new MacFunctionResolver(); + case PlatformID.Unix: + return new LinuxFunctionResolver(); + case PlatformID.Win32NT: + return new WindowsFunctionResolver(); + default: + throw new PlatformNotSupportedException(); + } + } +} diff --git a/FFmpeg.AutoGen/IFixedArray.cs b/FFmpeg.AutoGen/IFixedArray.cs new file mode 100644 index 00000000..b9e246d4 --- /dev/null +++ b/FFmpeg.AutoGen/IFixedArray.cs @@ -0,0 +1,13 @@ +namespace FFmpeg.AutoGen; + +public interface IFixedArray +{ + int Length { get; } +} + +internal interface IFixedArray : IFixedArray +{ + T this[uint index] { get; set; } + T[] ToArray(); + void UpdateFrom(T[] array); +} diff --git a/FFmpeg.AutoGen/IFunctionResolver.cs b/FFmpeg.AutoGen/IFunctionResolver.cs new file mode 100644 index 00000000..f40b3fd2 --- /dev/null +++ b/FFmpeg.AutoGen/IFunctionResolver.cs @@ -0,0 +1,24 @@ +namespace FFmpeg.AutoGen; + +/// +/// Supports loading functions from native libraries. Provides a more flexible alternative to P/Invoke. +/// +public interface IFunctionResolver +{ + /// + /// Creates a delegate which invokes a native function. + /// + /// + /// The function delegate. + /// + /// + /// The library name which contains the function. + /// + /// + /// The name of the function for which to create the delegate. + /// + /// + /// A new delegate which points to the native function. + /// + T GetFunctionDelegate(string libraryName, string functionName, bool throwOnError = true); +} diff --git a/FFmpeg.AutoGen/Native/FunctionLoader.cs b/FFmpeg.AutoGen/Native/FunctionLoader.cs deleted file mode 100644 index dff173f3..00000000 --- a/FFmpeg.AutoGen/Native/FunctionLoader.cs +++ /dev/null @@ -1,79 +0,0 @@ -using System; -using System.Runtime.InteropServices; - -namespace FFmpeg.AutoGen.Native -{ - /// - /// Supports loading functions from native libraries. Provides a more flexible alternative to P/Invoke. - /// - public static class FunctionLoader - { - /// - /// Creates a delegate which invokes a native function. - /// - /// - /// The function delegate. - /// - /// - /// The native library which contains the function. - /// - /// - /// The name of the function for which to create the delegate. - /// - /// - /// A new delegate which points to the native function. - /// - internal static T GetFunctionDelegate(IntPtr nativeLibraryHandle, string functionName, bool throwOnError = true) - { - var ptr = GetFunctionPointer(nativeLibraryHandle, functionName); - - if (ptr == IntPtr.Zero) - { - if (throwOnError) throw new EntryPointNotFoundException($"Could not find the entrypoint for {functionName}."); - return default(T); - } - -#if NET45 || NET40 - return (T)(object)Marshal.GetDelegateForFunctionPointer(ptr, typeof(T)); -#else - try - { - return Marshal.GetDelegateForFunctionPointer(ptr); - } - catch (MarshalDirectiveException) - { - if (throwOnError) - throw; - return default(T); - } -#endif - } - - private static IntPtr GetFunctionPointer(IntPtr nativeLibraryHandle, string functionName) - { -#if NET45 || NET40 - switch (LibraryLoader.GetPlatformId()) - { - case PlatformID.MacOSX: - return MacNativeMethods.dlsym(nativeLibraryHandle, functionName); - case PlatformID.Unix: - return LinuxNativeMethods.dlsym(nativeLibraryHandle, functionName); - case PlatformID.Win32NT: - case PlatformID.Win32S: - case PlatformID.Win32Windows: - return WindowsNativeMethods.GetProcAddress(nativeLibraryHandle, functionName); - default: - throw new PlatformNotSupportedException(); - } -#else - if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) - return LinuxNativeMethods.dlsym(nativeLibraryHandle, functionName); - if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) - return MacNativeMethods.dlsym(nativeLibraryHandle, functionName); - if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) - return WindowsNativeMethods.GetProcAddress(nativeLibraryHandle, functionName); - throw new PlatformNotSupportedException(); -#endif - } - } -} \ No newline at end of file diff --git a/FFmpeg.AutoGen/Native/LibraryLoader.cs b/FFmpeg.AutoGen/Native/LibraryLoader.cs deleted file mode 100644 index 1206e2d4..00000000 --- a/FFmpeg.AutoGen/Native/LibraryLoader.cs +++ /dev/null @@ -1,101 +0,0 @@ -using System; -using System.IO; -using System.Runtime.InteropServices; - -namespace FFmpeg.AutoGen.Native -{ - public delegate PlatformID GetPlatformId(); - - public delegate string GetNativeLibraryName(string libraryName, int version); - - public static class LibraryLoader - { - static LibraryLoader() - { - GetPlatformId = () => - { -#if NET45 || NET40 - return Environment.OSVersion.Platform; -#else - if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) return PlatformID.Win32NT; - if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) return PlatformID.Unix; - if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) return PlatformID.MacOSX; - throw new PlatformNotSupportedException(); -#endif - }; - - switch (GetPlatformId()) - { - case PlatformID.MacOSX: - GetNativeLibraryName = (libraryName, version) => $"lib{libraryName}.{version}.dylib"; - break; - case PlatformID.Unix: - GetNativeLibraryName = (libraryName, version) => $"lib{libraryName}.so.{version}"; - break; - case PlatformID.Win32NT: - case PlatformID.Win32S: - case PlatformID.Win32Windows: - GetNativeLibraryName = (libraryName, version) => $"{libraryName}-{version}.dll"; - break; - default: - throw new PlatformNotSupportedException(); - } - } - - public static GetPlatformId GetPlatformId; - - public static GetNativeLibraryName GetNativeLibraryName; - - /// - /// Attempts to load a native library using platform nammig convention. - /// - /// Path of the library. - /// Name of the library. - /// Version of the library. - /// - /// A handle to the library when found; otherwise, . - /// - /// - /// This function may return a null handle. If it does, individual functions loaded from it will throw a - /// DllNotFoundException, - /// but not until an attempt is made to actually use the function (rather than load it). This matches how PInvokes - /// behave. - /// - public static IntPtr LoadNativeLibrary(string path, string libraryName, int version) - { - var nativeLibraryName = GetNativeLibraryName(libraryName, version); - var fullName = Path.Combine(path, nativeLibraryName); - return LoadNativeLibrary(fullName); - } - - /// - /// Attempts to load a native library. - /// - /// Name of the library. - /// - /// A handle to the library when found; otherwise, . - /// - /// - /// This function may return a null handle. If it does, individual functions loaded from it will throw a - /// DllNotFoundException, - /// but not until an attempt is made to actually use the function (rather than load it). This matches how PInvokes - /// behave. - /// - public static IntPtr LoadNativeLibrary(string libraryName) - { - switch (GetPlatformId()) - { - case PlatformID.MacOSX: - return MacNativeMethods.dlopen(libraryName, MacNativeMethods.RTLD_NOW); - case PlatformID.Unix: - return LinuxNativeMethods.dlopen(libraryName, LinuxNativeMethods.RTLD_NOW); - case PlatformID.Win32NT: - case PlatformID.Win32S: - case PlatformID.Win32Windows: - return WindowsNativeMethods.LoadLibrary(libraryName); - default: - throw new PlatformNotSupportedException(); - } - } - } -} \ No newline at end of file diff --git a/FFmpeg.AutoGen/Native/LinuxFunctionResolver.cs b/FFmpeg.AutoGen/Native/LinuxFunctionResolver.cs new file mode 100644 index 00000000..cb0928cf --- /dev/null +++ b/FFmpeg.AutoGen/Native/LinuxFunctionResolver.cs @@ -0,0 +1,24 @@ +using System; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen.Native; + +public class LinuxFunctionResolver : FunctionResolverBase +{ + private const string Libdl = "libdl.so.2"; + + private const int RTLD_NOW = 0x002; + + protected override string GetNativeLibraryName(string libraryName, int version) => $"lib{libraryName}.so.{version}"; + + protected override IntPtr LoadNativeLibrary(string libraryName) => dlopen(libraryName, RTLD_NOW); + + protected override IntPtr FindFunctionPointer(IntPtr nativeLibraryHandle, string functionName) => dlsym(nativeLibraryHandle, functionName); + + + [DllImport(Libdl)] + public static extern IntPtr dlsym(IntPtr handle, string symbol); + + [DllImport(Libdl)] + public static extern IntPtr dlopen(string fileName, int flag); +} diff --git a/FFmpeg.AutoGen/Native/LinuxNativeMethods.cs b/FFmpeg.AutoGen/Native/LinuxNativeMethods.cs deleted file mode 100644 index c04e814b..00000000 --- a/FFmpeg.AutoGen/Native/LinuxNativeMethods.cs +++ /dev/null @@ -1,18 +0,0 @@ -using System; -using System.Runtime.InteropServices; - -namespace FFmpeg.AutoGen.Native -{ - public static class LinuxNativeMethods - { - public const int RTLD_NOW = 0x002; - - private const string Libdl = "libdl.so.2"; - - [DllImport(Libdl)] - public static extern IntPtr dlsym(IntPtr handle, string symbol); - - [DllImport(Libdl)] - public static extern IntPtr dlopen(string fileName, int flag); - } -} \ No newline at end of file diff --git a/FFmpeg.AutoGen/Native/MacFunctionResolver.cs b/FFmpeg.AutoGen/Native/MacFunctionResolver.cs new file mode 100644 index 00000000..b99d68d7 --- /dev/null +++ b/FFmpeg.AutoGen/Native/MacFunctionResolver.cs @@ -0,0 +1,22 @@ +using System; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen.Native; + +public class MacFunctionResolver : FunctionResolverBase +{ + private const string Libdl = "libdl"; + + private const int RTLD_NOW = 0x002; + + protected override string GetNativeLibraryName(string libraryName, int version) => $"lib{libraryName}.{version}.dylib"; + protected override IntPtr LoadNativeLibrary(string libraryName) => dlopen(libraryName, RTLD_NOW); + protected override IntPtr FindFunctionPointer(IntPtr nativeLibraryHandle, string functionName) => dlsym(nativeLibraryHandle, functionName); + + + [DllImport(Libdl)] + public static extern IntPtr dlsym(IntPtr handle, string symbol); + + [DllImport(Libdl)] + public static extern IntPtr dlopen(string fileName, int flag); +} diff --git a/FFmpeg.AutoGen/Native/MacNativeMethods.cs b/FFmpeg.AutoGen/Native/MacNativeMethods.cs deleted file mode 100644 index 4567e59b..00000000 --- a/FFmpeg.AutoGen/Native/MacNativeMethods.cs +++ /dev/null @@ -1,18 +0,0 @@ -using System; -using System.Runtime.InteropServices; - -namespace FFmpeg.AutoGen.Native -{ - public static class MacNativeMethods - { - public const int RTLD_NOW = 0x002; - - private const string Libdl = "libdl"; - - [DllImport(Libdl)] - public static extern IntPtr dlsym(IntPtr handle, string symbol); - - [DllImport(Libdl)] - public static extern IntPtr dlopen(string fileName, int flag); - } -} \ No newline at end of file diff --git a/FFmpeg.AutoGen/Native/WindowsFunctionResolver.cs b/FFmpeg.AutoGen/Native/WindowsFunctionResolver.cs new file mode 100644 index 00000000..d97832d5 --- /dev/null +++ b/FFmpeg.AutoGen/Native/WindowsFunctionResolver.cs @@ -0,0 +1,60 @@ +using System; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen.Native; + +public class WindowsFunctionResolver : FunctionResolverBase +{ + private const string Kernel32 = "kernel32"; + + protected override string GetNativeLibraryName(string libraryName, int version) => $"{libraryName}-{version}.dll"; + + protected override IntPtr LoadNativeLibrary(string libraryName) => LoadLibrary(libraryName); + protected override IntPtr FindFunctionPointer(IntPtr nativeLibraryHandle, string functionName) => GetProcAddress(nativeLibraryHandle, functionName); + + + [DllImport(Kernel32, CharSet = CharSet.Ansi, BestFitMapping = false)] + public static extern IntPtr GetProcAddress(IntPtr hModule, string lpProcName); + + /// + /// Loads the specified module into the address space of the calling process. The specified module may cause other + /// modules to be loaded. + /// + /// + /// + /// The name of the module. This can be either a library module (a .dll file) or an executable module (an + /// .exe file). + /// The name specified is the file name of the module and is not related to the name stored in the library module + /// itself, + /// as specified by the LIBRARY keyword in the module-definition (.def) file. + /// + /// + /// If the string specifies a full path, the function searches only that path for the module. + /// + /// + /// If the string specifies a relative path or a module name without a path, the function uses a standard search + /// strategy + /// to find the module; for more information, see the Remarks. + /// + /// + /// If the function cannot find the module, the function fails. When specifying a path, be sure to use backslashes + /// (\), + /// not forward slashes (/). For more information about paths, see Naming a File or Directory. + /// + /// + /// If the string specifies a module name without a path and the file name extension is omitted, the function + /// appends the + /// default library extension .dll to the module name. To prevent the function from appending .dll to + /// the module name, + /// include a trailing point character (.) in the module name string. + /// + /// + /// + /// If the function succeeds, the return value is a handle to the module. + /// If the function fails, the return value is . To get extended error information, call + /// . + /// + /// + [DllImport(Kernel32, SetLastError = true)] + public static extern IntPtr LoadLibrary(string dllToLoad); +} diff --git a/FFmpeg.AutoGen/Native/WindowsNativeMethods.cs b/FFmpeg.AutoGen/Native/WindowsNativeMethods.cs deleted file mode 100644 index 80de559a..00000000 --- a/FFmpeg.AutoGen/Native/WindowsNativeMethods.cs +++ /dev/null @@ -1,48 +0,0 @@ -using System; -using System.Runtime.InteropServices; - -namespace FFmpeg.AutoGen.Native -{ - public static class WindowsNativeMethods - { - private const string Kernel32 = "kernel32"; - - [DllImport(Kernel32, CharSet = CharSet.Ansi, BestFitMapping = false)] - public static extern IntPtr GetProcAddress(IntPtr hModule, string lpProcName); - - /// - /// Loads the specified module into the address space of the calling process. The specified module may cause other modules to be loaded. - /// - /// - /// - /// The name of the module. This can be either a library module (a .dll file) or an executable module (an .exe file). - /// The name specified is the file name of the module and is not related to the name stored in the library module itself, - /// as specified by the LIBRARY keyword in the module-definition (.def) file. - /// - /// - /// If the string specifies a full path, the function searches only that path for the module. - /// - /// - /// If the string specifies a relative path or a module name without a path, the function uses a standard search strategy - /// to find the module; for more information, see the Remarks. - /// - /// - /// If the function cannot find the module, the function fails. When specifying a path, be sure to use backslashes (\), - /// not forward slashes (/). For more information about paths, see Naming a File or Directory. - /// - /// - /// If the string specifies a module name without a path and the file name extension is omitted, the function appends the - /// default library extension .dll to the module name. To prevent the function from appending .dll to the module name, - /// include a trailing point character (.) in the module name string. - /// - /// - /// - /// If the function succeeds, the return value is a handle to the module. - /// If the function fails, the return value is . To get extended error information, call - /// . - /// - /// - [DllImport(Kernel32, SetLastError = true)] - public static extern IntPtr LoadLibrary(string dllToLoad); - } -} \ No newline at end of file diff --git a/FFmpeg.AutoGen/UTF8Marshaler.cs b/FFmpeg.AutoGen/UTF8Marshaler.cs index 78500a78..ba024745 100644 --- a/FFmpeg.AutoGen/UTF8Marshaler.cs +++ b/FFmpeg.AutoGen/UTF8Marshaler.cs @@ -4,22 +4,16 @@ namespace FFmpeg.AutoGen; -#if NETSTANDARD2_1_OR_GREATER -#else public class UTF8Marshaler : ICustomMarshaler { - private static readonly UTF8Marshaler Instance = new(); - public virtual object MarshalNativeToManaged(IntPtr pNativeData) => FromNative(Encoding.UTF8, pNativeData); public virtual IntPtr MarshalManagedToNative(object managedObj) { if (managedObj == null) return IntPtr.Zero; - - var str = managedObj as string; - - if (str == null) + + if (managedObj is not string str) throw new MarshalDirectiveException($"{GetType().Name} must be used on a string."); return FromManaged(Encoding.UTF8, str); @@ -27,7 +21,7 @@ public virtual IntPtr MarshalManagedToNative(object managedObj) public virtual void CleanUpNativeData(IntPtr pNativeData) { - //Free anything allocated by MarshalManagedtoNative + //Free anything allocated by MarshalManagedToNative //This is called after the native function call completes if (pNativeData != IntPtr.Zero) @@ -40,13 +34,8 @@ public void CleanUpManagedData(object managedObj) //This is called after the native function call completes } - public int GetNativeDataSize() => - // Not a value type - -1; - - public static ICustomMarshaler GetInstance(string cookie) => Instance; - - + public int GetNativeDataSize() => -1; // Not a value type + public static unsafe string FromNative(Encoding encoding, IntPtr pNativeData) => FromNative(encoding, (byte*)pNativeData); public static unsafe string FromNative(Encoding encoding, byte* pNativeData) @@ -87,4 +76,3 @@ public static unsafe IntPtr FromManaged(Encoding encoding, string value) return new IntPtr(buffer); } } -#endif diff --git a/FFmpeg.AutoGen/generated/Arrays.g.cs b/FFmpeg.AutoGen/generated/Arrays.g.cs new file mode 100644 index 00000000..edc086d3 --- /dev/null +++ b/FFmpeg.AutoGen/generated/Arrays.g.cs @@ -0,0 +1,576 @@ +using System; + +namespace FFmpeg.AutoGen; + +public unsafe struct AVRational_array2 : IFixedArray +{ + public static readonly int Size = 2; + public int Length => 2; + AVRational _0; AVRational _1; + + public AVRational this[uint i] + { + get { if (i >= 2) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { return *(p0 + i); } } + set { if (i >= 2) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { *(p0 + i) = value; } } + } + public AVRational[] ToArray() + { + fixed (AVRational* p0 = &_0) { var a = new AVRational[2]; for (uint i = 0; i < 2; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(AVRational[] array) + { + fixed (AVRational* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 2) return; } } + } + public static implicit operator AVRational[](AVRational_array2 @struct) => @struct.ToArray(); +} + +public unsafe struct short_array2 : IFixedArray +{ + public static readonly int Size = 2; + public int Length => 2; + fixed short _[2]; + + public short this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public short[] ToArray() + { + var a = new short[2]; for (uint i = 0; i < 2; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(short[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 2) return; } + } + public static implicit operator short[](short_array2 @struct) => @struct.ToArray(); +} + +public unsafe struct void_ptrArray2 : IFixedArray +{ + public static readonly int Size = 2; + public int Length => 2; + void* _0; void* _1; + + public void* this[uint i] + { + get { if (i >= 2) throw new ArgumentOutOfRangeException(); fixed (void** p0 = &_0) { return *(p0 + i); } } + set { if (i >= 2) throw new ArgumentOutOfRangeException(); fixed (void** p0 = &_0) { *(p0 + i) = value; } } + } + public void*[] ToArray() + { + fixed (void** p0 = &_0) { var a = new void*[2]; for (uint i = 0; i < 2; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(void*[] array) + { + fixed (void** p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 2) return; } } + } + public static implicit operator void*[](void_ptrArray2 @struct) => @struct.ToArray(); +} + +public unsafe struct AVHDRPlusColorTransformParams_array3 : IFixedArray +{ + public static readonly int Size = 3; + public int Length => 3; + AVHDRPlusColorTransformParams _0; AVHDRPlusColorTransformParams _1; AVHDRPlusColorTransformParams _2; + + public AVHDRPlusColorTransformParams this[uint i] + { + get { if (i >= 3) throw new ArgumentOutOfRangeException(); fixed (AVHDRPlusColorTransformParams* p0 = &_0) { return *(p0 + i); } } + set { if (i >= 3) throw new ArgumentOutOfRangeException(); fixed (AVHDRPlusColorTransformParams* p0 = &_0) { *(p0 + i) = value; } } + } + public AVHDRPlusColorTransformParams[] ToArray() + { + fixed (AVHDRPlusColorTransformParams* p0 = &_0) { var a = new AVHDRPlusColorTransformParams[3]; for (uint i = 0; i < 3; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(AVHDRPlusColorTransformParams[] array) + { + fixed (AVHDRPlusColorTransformParams* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 3) return; } } + } + public static implicit operator AVHDRPlusColorTransformParams[](AVHDRPlusColorTransformParams_array3 @struct) => @struct.ToArray(); +} + +public unsafe struct AVRational_array3 : IFixedArray +{ + public static readonly int Size = 3; + public int Length => 3; + AVRational _0; AVRational _1; AVRational _2; + + public AVRational this[uint i] + { + get { if (i >= 3) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { return *(p0 + i); } } + set { if (i >= 3) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { *(p0 + i) = value; } } + } + public AVRational[] ToArray() + { + fixed (AVRational* p0 = &_0) { var a = new AVRational[3]; for (uint i = 0; i < 3; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(AVRational[] array) + { + fixed (AVRational* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 3) return; } } + } + public static implicit operator AVRational[](AVRational_array3 @struct) => @struct.ToArray(); +} + +public unsafe struct AVRational_array3x2 : IFixedArray +{ + public static readonly int Size = 3; + public int Length => 3; + AVRational_array2 _0; AVRational_array2 _1; AVRational_array2 _2; + + public AVRational_array2 this[uint i] + { + get { if (i >= 3) throw new ArgumentOutOfRangeException(); fixed (AVRational_array2* p0 = &_0) { return *(p0 + i); } } + set { if (i >= 3) throw new ArgumentOutOfRangeException(); fixed (AVRational_array2* p0 = &_0) { *(p0 + i) = value; } } + } + public AVRational_array2[] ToArray() + { + fixed (AVRational_array2* p0 = &_0) { var a = new AVRational_array2[3]; for (uint i = 0; i < 3; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(AVRational_array2[] array) + { + fixed (AVRational_array2* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 3) return; } } + } + public static implicit operator AVRational_array2[](AVRational_array3x2 @struct) => @struct.ToArray(); +} + +public unsafe struct byte_ptrArray3 : IFixedArray +{ + public static readonly int Size = 3; + public int Length => 3; + byte* _0; byte* _1; byte* _2; + + public byte* this[uint i] + { + get { if (i >= 3) throw new ArgumentOutOfRangeException(); fixed (byte** p0 = &_0) { return *(p0 + i); } } + set { if (i >= 3) throw new ArgumentOutOfRangeException(); fixed (byte** p0 = &_0) { *(p0 + i) = value; } } + } + public byte*[] ToArray() + { + fixed (byte** p0 = &_0) { var a = new byte*[3]; for (uint i = 0; i < 3; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(byte*[] array) + { + fixed (byte** p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 3) return; } } + } + public static implicit operator byte*[](byte_ptrArray3 @struct) => @struct.ToArray(); +} + +public unsafe struct int_array3 : IFixedArray +{ + public static readonly int Size = 3; + public int Length => 3; + fixed int _[3]; + + public int this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public int[] ToArray() + { + var a = new int[3]; for (uint i = 0; i < 3; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(int[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 3) return; } + } + public static implicit operator int[](int_array3 @struct) => @struct.ToArray(); +} + +public unsafe struct short_array3x2 : IFixedArray +{ + public static readonly int Size = 3; + public int Length => 3; + short_array2 _0; short_array2 _1; short_array2 _2; + + public short_array2 this[uint i] + { + get { if (i >= 3) throw new ArgumentOutOfRangeException(); fixed (short_array2* p0 = &_0) { return *(p0 + i); } } + set { if (i >= 3) throw new ArgumentOutOfRangeException(); fixed (short_array2* p0 = &_0) { *(p0 + i) = value; } } + } + public short_array2[] ToArray() + { + fixed (short_array2* p0 = &_0) { var a = new short_array2[3]; for (uint i = 0; i < 3; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(short_array2[] array) + { + fixed (short_array2* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 3) return; } } + } + public static implicit operator short_array2[](short_array3x2 @struct) => @struct.ToArray(); +} + +public unsafe struct AVComponentDescriptor_array4 : IFixedArray +{ + public static readonly int Size = 4; + public int Length => 4; + AVComponentDescriptor _0; AVComponentDescriptor _1; AVComponentDescriptor _2; AVComponentDescriptor _3; + + public AVComponentDescriptor this[uint i] + { + get { if (i >= 4) throw new ArgumentOutOfRangeException(); fixed (AVComponentDescriptor* p0 = &_0) { return *(p0 + i); } } + set { if (i >= 4) throw new ArgumentOutOfRangeException(); fixed (AVComponentDescriptor* p0 = &_0) { *(p0 + i) = value; } } + } + public AVComponentDescriptor[] ToArray() + { + fixed (AVComponentDescriptor* p0 = &_0) { var a = new AVComponentDescriptor[4]; for (uint i = 0; i < 4; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(AVComponentDescriptor[] array) + { + fixed (AVComponentDescriptor* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 4) return; } } + } + public static implicit operator AVComponentDescriptor[](AVComponentDescriptor_array4 @struct) => @struct.ToArray(); +} + +public unsafe struct byte_ptrArray4 : IFixedArray +{ + public static readonly int Size = 4; + public int Length => 4; + byte* _0; byte* _1; byte* _2; byte* _3; + + public byte* this[uint i] + { + get { if (i >= 4) throw new ArgumentOutOfRangeException(); fixed (byte** p0 = &_0) { return *(p0 + i); } } + set { if (i >= 4) throw new ArgumentOutOfRangeException(); fixed (byte** p0 = &_0) { *(p0 + i) = value; } } + } + public byte*[] ToArray() + { + fixed (byte** p0 = &_0) { var a = new byte*[4]; for (uint i = 0; i < 4; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(byte*[] array) + { + fixed (byte** p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 4) return; } } + } + public static implicit operator byte*[](byte_ptrArray4 @struct) => @struct.ToArray(); +} + +public unsafe struct int_array4 : IFixedArray +{ + public static readonly int Size = 4; + public int Length => 4; + fixed int _[4]; + + public int this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public int[] ToArray() + { + var a = new int[4]; for (uint i = 0; i < 4; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(int[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 4) return; } + } + public static implicit operator int[](int_array4 @struct) => @struct.ToArray(); +} + +public unsafe struct long_array4 : IFixedArray +{ + public static readonly int Size = 4; + public int Length => 4; + fixed long _[4]; + + public long this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public long[] ToArray() + { + var a = new long[4]; for (uint i = 0; i < 4; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(long[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 4) return; } + } + public static implicit operator long[](long_array4 @struct) => @struct.ToArray(); +} + +public unsafe struct ulong_array4 : IFixedArray +{ + public static readonly int Size = 4; + public int Length => 4; + fixed ulong _[4]; + + public ulong this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public ulong[] ToArray() + { + var a = new ulong[4]; for (uint i = 0; i < 4; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(ulong[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 4) return; } + } + public static implicit operator ulong[](ulong_array4 @struct) => @struct.ToArray(); +} + +public unsafe struct int_array7 : IFixedArray +{ + public static readonly int Size = 7; + public int Length => 7; + fixed int _[7]; + + public int this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public int[] ToArray() + { + var a = new int[7]; for (uint i = 0; i < 7; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(int[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 7) return; } + } + public static implicit operator int[](int_array7 @struct) => @struct.ToArray(); +} + +public unsafe struct AVBufferRef_ptrArray8 : IFixedArray +{ + public static readonly int Size = 8; + public int Length => 8; + AVBufferRef* _0; AVBufferRef* _1; AVBufferRef* _2; AVBufferRef* _3; AVBufferRef* _4; AVBufferRef* _5; AVBufferRef* _6; AVBufferRef* _7; + + public AVBufferRef* this[uint i] + { + get { if (i >= 8) throw new ArgumentOutOfRangeException(); fixed (AVBufferRef** p0 = &_0) { return *(p0 + i); } } + set { if (i >= 8) throw new ArgumentOutOfRangeException(); fixed (AVBufferRef** p0 = &_0) { *(p0 + i) = value; } } + } + public AVBufferRef*[] ToArray() + { + fixed (AVBufferRef** p0 = &_0) { var a = new AVBufferRef*[8]; for (uint i = 0; i < 8; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(AVBufferRef*[] array) + { + fixed (AVBufferRef** p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 8) return; } } + } + public static implicit operator AVBufferRef*[](AVBufferRef_ptrArray8 @struct) => @struct.ToArray(); +} + +public unsafe struct byte_ptrArray8 : IFixedArray +{ + public static readonly int Size = 8; + public int Length => 8; + byte* _0; byte* _1; byte* _2; byte* _3; byte* _4; byte* _5; byte* _6; byte* _7; + + public byte* this[uint i] + { + get { if (i >= 8) throw new ArgumentOutOfRangeException(); fixed (byte** p0 = &_0) { return *(p0 + i); } } + set { if (i >= 8) throw new ArgumentOutOfRangeException(); fixed (byte** p0 = &_0) { *(p0 + i) = value; } } + } + public byte*[] ToArray() + { + fixed (byte** p0 = &_0) { var a = new byte*[8]; for (uint i = 0; i < 8; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(byte*[] array) + { + fixed (byte** p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 8) return; } } + } + public static implicit operator byte*[](byte_ptrArray8 @struct) => @struct.ToArray(); +} + +public unsafe struct byte_array8 : IFixedArray +{ + public static readonly int Size = 8; + public int Length => 8; + fixed byte _[8]; + + public byte this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public byte[] ToArray() + { + var a = new byte[8]; for (uint i = 0; i < 8; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(byte[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 8) return; } + } + public static implicit operator byte[](byte_array8 @struct) => @struct.ToArray(); +} + +public unsafe struct int_array8 : IFixedArray +{ + public static readonly int Size = 8; + public int Length => 8; + fixed int _[8]; + + public int this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public int[] ToArray() + { + var a = new int[8]; for (uint i = 0; i < 8; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(int[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 8) return; } + } + public static implicit operator int[](int_array8 @struct) => @struct.ToArray(); +} + +public unsafe struct ulong_array8 : IFixedArray +{ + public static readonly int Size = 8; + public int Length => 8; + fixed ulong _[8]; + + public ulong this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public ulong[] ToArray() + { + var a = new ulong[8]; for (uint i = 0; i < 8; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(ulong[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 8) return; } + } + public static implicit operator ulong[](ulong_array8 @struct) => @struct.ToArray(); +} + +public unsafe struct AVHDRPlusPercentile_array15 : IFixedArray +{ + public static readonly int Size = 15; + public int Length => 15; + AVHDRPlusPercentile _0; AVHDRPlusPercentile _1; AVHDRPlusPercentile _2; AVHDRPlusPercentile _3; AVHDRPlusPercentile _4; AVHDRPlusPercentile _5; AVHDRPlusPercentile _6; AVHDRPlusPercentile _7; AVHDRPlusPercentile _8; AVHDRPlusPercentile _9; AVHDRPlusPercentile _10; AVHDRPlusPercentile _11; AVHDRPlusPercentile _12; AVHDRPlusPercentile _13; AVHDRPlusPercentile _14; + + public AVHDRPlusPercentile this[uint i] + { + get { if (i >= 15) throw new ArgumentOutOfRangeException(); fixed (AVHDRPlusPercentile* p0 = &_0) { return *(p0 + i); } } + set { if (i >= 15) throw new ArgumentOutOfRangeException(); fixed (AVHDRPlusPercentile* p0 = &_0) { *(p0 + i) = value; } } + } + public AVHDRPlusPercentile[] ToArray() + { + fixed (AVHDRPlusPercentile* p0 = &_0) { var a = new AVHDRPlusPercentile[15]; for (uint i = 0; i < 15; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(AVHDRPlusPercentile[] array) + { + fixed (AVHDRPlusPercentile* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 15) return; } } + } + public static implicit operator AVHDRPlusPercentile[](AVHDRPlusPercentile_array15 @struct) => @struct.ToArray(); +} + +public unsafe struct AVRational_array15 : IFixedArray +{ + public static readonly int Size = 15; + public int Length => 15; + AVRational _0; AVRational _1; AVRational _2; AVRational _3; AVRational _4; AVRational _5; AVRational _6; AVRational _7; AVRational _8; AVRational _9; AVRational _10; AVRational _11; AVRational _12; AVRational _13; AVRational _14; + + public AVRational this[uint i] + { + get { if (i >= 15) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { return *(p0 + i); } } + set { if (i >= 15) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { *(p0 + i) = value; } } + } + public AVRational[] ToArray() + { + fixed (AVRational* p0 = &_0) { var a = new AVRational[15]; for (uint i = 0; i < 15; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(AVRational[] array) + { + fixed (AVRational* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 15) return; } } + } + public static implicit operator AVRational[](AVRational_array15 @struct) => @struct.ToArray(); +} + +public unsafe struct byte_array16 : IFixedArray +{ + public static readonly int Size = 16; + public int Length => 16; + fixed byte _[16]; + + public byte this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public byte[] ToArray() + { + var a = new byte[16]; for (uint i = 0; i < 16; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(byte[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 16) return; } + } + public static implicit operator byte[](byte_array16 @struct) => @struct.ToArray(); +} + +public unsafe struct AVRational_array25 : IFixedArray +{ + public static readonly int Size = 25; + public int Length => 25; + AVRational _0; AVRational _1; AVRational _2; AVRational _3; AVRational _4; AVRational _5; AVRational _6; AVRational _7; AVRational _8; AVRational _9; AVRational _10; AVRational _11; AVRational _12; AVRational _13; AVRational _14; AVRational _15; AVRational _16; AVRational _17; AVRational _18; AVRational _19; AVRational _20; AVRational _21; AVRational _22; AVRational _23; AVRational _24; + + public AVRational this[uint i] + { + get { if (i >= 25) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { return *(p0 + i); } } + set { if (i >= 25) throw new ArgumentOutOfRangeException(); fixed (AVRational* p0 = &_0) { *(p0 + i) = value; } } + } + public AVRational[] ToArray() + { + fixed (AVRational* p0 = &_0) { var a = new AVRational[25]; for (uint i = 0; i < 25; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(AVRational[] array) + { + fixed (AVRational* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 25) return; } } + } + public static implicit operator AVRational[](AVRational_array25 @struct) => @struct.ToArray(); +} + +public unsafe struct AVRational_array25x25 : IFixedArray +{ + public static readonly int Size = 25; + public int Length => 25; + AVRational_array25 _0; AVRational_array25 _1; AVRational_array25 _2; AVRational_array25 _3; AVRational_array25 _4; AVRational_array25 _5; AVRational_array25 _6; AVRational_array25 _7; AVRational_array25 _8; AVRational_array25 _9; AVRational_array25 _10; AVRational_array25 _11; AVRational_array25 _12; AVRational_array25 _13; AVRational_array25 _14; AVRational_array25 _15; AVRational_array25 _16; AVRational_array25 _17; AVRational_array25 _18; AVRational_array25 _19; AVRational_array25 _20; AVRational_array25 _21; AVRational_array25 _22; AVRational_array25 _23; AVRational_array25 _24; + + public AVRational_array25 this[uint i] + { + get { if (i >= 25) throw new ArgumentOutOfRangeException(); fixed (AVRational_array25* p0 = &_0) { return *(p0 + i); } } + set { if (i >= 25) throw new ArgumentOutOfRangeException(); fixed (AVRational_array25* p0 = &_0) { *(p0 + i) = value; } } + } + public AVRational_array25[] ToArray() + { + fixed (AVRational_array25* p0 = &_0) { var a = new AVRational_array25[25]; for (uint i = 0; i < 25; i++) a[i] = *(p0 + i); return a; } + } + public void UpdateFrom(AVRational_array25[] array) + { + fixed (AVRational_array25* p0 = &_0) { uint i = 0; foreach(var value in array) { *(p0 + i++) = value; if (i >= 25) return; } } + } + public static implicit operator AVRational_array25[](AVRational_array25x25 @struct) => @struct.ToArray(); +} + +public unsafe struct byte_array61440 : IFixedArray +{ + public static readonly int Size = 61440; + public int Length => 61440; + fixed byte _[61440]; + + public byte this[uint i] + { + get => _[i]; + set => _[i] = value; + } + public byte[] ToArray() + { + var a = new byte[61440]; for (uint i = 0; i < 61440; i++) a[i] = _[i]; return a; + } + public void UpdateFrom(byte[] array) + { + uint i = 0; foreach(var value in array) { _[i++] = value; if (i >= 61440) return; } + } + public static implicit operator byte[](byte_array61440 @struct) => @struct.ToArray(); +} + diff --git a/FFmpeg.AutoGen/generated/Delegates.g.cs b/FFmpeg.AutoGen/generated/Delegates.g.cs new file mode 100644 index 00000000..9989085d --- /dev/null +++ b/FFmpeg.AutoGen/generated/Delegates.g.cs @@ -0,0 +1,707 @@ +using System; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen; + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int _query_func (AVFilterContext* @p0); +public unsafe struct _query_func_func +{ + public IntPtr Pointer; + public static implicit operator _query_func_func(_query_func func) => new _query_func_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void av_buffer_create_free (void* @opaque, byte* @data); +public unsafe struct av_buffer_create_free_func +{ + public IntPtr Pointer; + public static implicit operator av_buffer_create_free_func(av_buffer_create_free func) => new av_buffer_create_free_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate AVBufferRef* av_buffer_pool_init_alloc (ulong @size); +public unsafe struct av_buffer_pool_init_alloc_func +{ + public IntPtr Pointer; + public static implicit operator av_buffer_pool_init_alloc_func(av_buffer_pool_init_alloc func) => new av_buffer_pool_init_alloc_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate AVBufferRef* av_buffer_pool_init2_alloc (void* @opaque, ulong @size); +public unsafe struct av_buffer_pool_init2_alloc_func +{ + public IntPtr Pointer; + public static implicit operator av_buffer_pool_init2_alloc_func(av_buffer_pool_init2_alloc func) => new av_buffer_pool_init2_alloc_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void av_buffer_pool_init2_pool_free (void* @opaque); +public unsafe struct av_buffer_pool_init2_pool_free_func +{ + public IntPtr Pointer; + public static implicit operator av_buffer_pool_init2_pool_free_func(av_buffer_pool_init2_pool_free func) => new av_buffer_pool_init2_pool_free_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void av_log_set_callback_callback (void* @p0, int @p1, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @p2, byte* @p3); +public unsafe struct av_log_set_callback_callback_func +{ + public IntPtr Pointer; + public static implicit operator av_log_set_callback_callback_func(av_log_set_callback_callback func) => new av_log_set_callback_callback_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int av_tree_enumerate_cmp (void* @opaque, void* @elem); +public unsafe struct av_tree_enumerate_cmp_func +{ + public IntPtr Pointer; + public static implicit operator av_tree_enumerate_cmp_func(av_tree_enumerate_cmp func) => new av_tree_enumerate_cmp_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int av_tree_enumerate_enu (void* @opaque, void* @elem); +public unsafe struct av_tree_enumerate_enu_func +{ + public IntPtr Pointer; + public static implicit operator av_tree_enumerate_enu_func(av_tree_enumerate_enu func) => new av_tree_enumerate_enu_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int av_tree_find_cmp (void* @key, void* @b); +public unsafe struct av_tree_find_cmp_func +{ + public IntPtr Pointer; + public static implicit operator av_tree_find_cmp_func(av_tree_find_cmp func) => new av_tree_find_cmp_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int av_tree_insert_cmp (void* @key, void* @b); +public unsafe struct av_tree_insert_cmp_func +{ + public IntPtr Pointer; + public static implicit operator av_tree_insert_cmp_func(av_tree_insert_cmp func) => new av_tree_insert_cmp_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate AVClass* AVClass_child_class_iterate (void** @iter); +public unsafe struct AVClass_child_class_iterate_func +{ + public IntPtr Pointer; + public static implicit operator AVClass_child_class_iterate_func(AVClass_child_class_iterate func) => new AVClass_child_class_iterate_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void* AVClass_child_next (void* @obj, void* @prev); +public unsafe struct AVClass_child_next_func +{ + public IntPtr Pointer; + public static implicit operator AVClass_child_next_func(AVClass_child_next func) => new AVClass_child_next_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate AVClassCategory AVClass_get_category (void* @ctx); +public unsafe struct AVClass_get_category_func +{ + public IntPtr Pointer; + public static implicit operator AVClass_get_category_func(AVClass_get_category func) => new AVClass_get_category_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate string AVClass_item_name (void* @ctx); +public unsafe struct AVClass_item_name_func +{ + public IntPtr Pointer; + public static implicit operator AVClass_item_name_func(AVClass_item_name func) => new AVClass_item_name_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVClass_query_ranges (AVOptionRanges** @p0, void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, int @flags); +public unsafe struct AVClass_query_ranges_func +{ + public IntPtr Pointer; + public static implicit operator AVClass_query_ranges_func(AVClass_query_ranges func) => new AVClass_query_ranges_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int avcodec_default_execute_func (AVCodecContext* @c2, void* @arg2); +public unsafe struct avcodec_default_execute_func_func +{ + public IntPtr Pointer; + public static implicit operator avcodec_default_execute_func_func(avcodec_default_execute_func func) => new avcodec_default_execute_func_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int avcodec_default_execute2_func (AVCodecContext* @c2, void* @arg2, int @p2, int @p3); +public unsafe struct avcodec_default_execute2_func_func +{ + public IntPtr Pointer; + public static implicit operator avcodec_default_execute2_func_func(avcodec_default_execute2_func func) => new avcodec_default_execute2_func_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void AVCodecContext_draw_horiz_band (AVCodecContext* @s, AVFrame* @src, ref int_array8 @offset, int @y, int @type, int @height); +public unsafe struct AVCodecContext_draw_horiz_band_func +{ + public IntPtr Pointer; + public static implicit operator AVCodecContext_draw_horiz_band_func(AVCodecContext_draw_horiz_band func) => new AVCodecContext_draw_horiz_band_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVCodecContext_execute (AVCodecContext* @c, func_func @func, void* @arg2, int* @ret, int @count, int @size); +public unsafe struct AVCodecContext_execute_func +{ + public IntPtr Pointer; + public static implicit operator AVCodecContext_execute_func(AVCodecContext_execute func) => new AVCodecContext_execute_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVCodecContext_execute2 (AVCodecContext* @c, func_func @func, void* @arg2, int* @ret, int @count); +public unsafe struct AVCodecContext_execute2_func +{ + public IntPtr Pointer; + public static implicit operator AVCodecContext_execute2_func(AVCodecContext_execute2 func) => new AVCodecContext_execute2_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVCodecContext_get_buffer2 (AVCodecContext* @s, AVFrame* @frame, int @flags); +public unsafe struct AVCodecContext_get_buffer2_func +{ + public IntPtr Pointer; + public static implicit operator AVCodecContext_get_buffer2_func(AVCodecContext_get_buffer2 func) => new AVCodecContext_get_buffer2_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVCodecContext_get_encode_buffer (AVCodecContext* @s, AVPacket* @pkt, int @flags); +public unsafe struct AVCodecContext_get_encode_buffer_func +{ + public IntPtr Pointer; + public static implicit operator AVCodecContext_get_encode_buffer_func(AVCodecContext_get_encode_buffer func) => new AVCodecContext_get_encode_buffer_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate AVPixelFormat AVCodecContext_get_format (AVCodecContext* @s, AVPixelFormat* @fmt); +public unsafe struct AVCodecContext_get_format_func +{ + public IntPtr Pointer; + public static implicit operator AVCodecContext_get_format_func(AVCodecContext_get_format func) => new AVCodecContext_get_format_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void AVCodecParser_parser_close (AVCodecParserContext* @s); +public unsafe struct AVCodecParser_parser_close_func +{ + public IntPtr Pointer; + public static implicit operator AVCodecParser_parser_close_func(AVCodecParser_parser_close func) => new AVCodecParser_parser_close_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVCodecParser_parser_init (AVCodecParserContext* @s); +public unsafe struct AVCodecParser_parser_init_func +{ + public IntPtr Pointer; + public static implicit operator AVCodecParser_parser_init_func(AVCodecParser_parser_init func) => new AVCodecParser_parser_init_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVCodecParser_parser_parse (AVCodecParserContext* @s, AVCodecContext* @avctx, byte** @poutbuf, int* @poutbuf_size, byte* @buf, int @buf_size); +public unsafe struct AVCodecParser_parser_parse_func +{ + public IntPtr Pointer; + public static implicit operator AVCodecParser_parser_parse_func(AVCodecParser_parser_parse func) => new AVCodecParser_parser_parse_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVCodecParser_split (AVCodecContext* @avctx, byte* @buf, int @buf_size); +public unsafe struct AVCodecParser_split_func +{ + public IntPtr Pointer; + public static implicit operator AVCodecParser_split_func(AVCodecParser_split func) => new AVCodecParser_split_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void AVD3D11VADeviceContext_lock (void* @lock_ctx); +public unsafe struct AVD3D11VADeviceContext_lock_func +{ + public IntPtr Pointer; + public static implicit operator AVD3D11VADeviceContext_lock_func(AVD3D11VADeviceContext_lock func) => new AVD3D11VADeviceContext_lock_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void AVD3D11VADeviceContext_unlock (void* @lock_ctx); +public unsafe struct AVD3D11VADeviceContext_unlock_func +{ + public IntPtr Pointer; + public static implicit operator AVD3D11VADeviceContext_unlock_func(AVD3D11VADeviceContext_unlock func) => new AVD3D11VADeviceContext_unlock_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVFilter_activate (AVFilterContext* @ctx); +public unsafe struct AVFilter_activate_func +{ + public IntPtr Pointer; + public static implicit operator AVFilter_activate_func(AVFilter_activate func) => new AVFilter_activate_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVFilter_init_dict (AVFilterContext* @ctx, AVDictionary** @options); +public unsafe struct AVFilter_init_dict_func +{ + public IntPtr Pointer; + public static implicit operator AVFilter_init_dict_func(AVFilter_init_dict func) => new AVFilter_init_dict_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVFilter_init (AVFilterContext* @ctx); +public unsafe struct AVFilter_init_func +{ + public IntPtr Pointer; + public static implicit operator AVFilter_init_func(AVFilter_init func) => new AVFilter_init_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVFilter_preinit (AVFilterContext* @ctx); +public unsafe struct AVFilter_preinit_func +{ + public IntPtr Pointer; + public static implicit operator AVFilter_preinit_func(AVFilter_preinit func) => new AVFilter_preinit_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVFilter_process_command (AVFilterContext* @p0, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @cmd, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @arg, byte* @res, int @res_len, int @flags); +public unsafe struct AVFilter_process_command_func +{ + public IntPtr Pointer; + public static implicit operator AVFilter_process_command_func(AVFilter_process_command func) => new AVFilter_process_command_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void AVFilter_uninit (AVFilterContext* @ctx); +public unsafe struct AVFilter_uninit_func +{ + public IntPtr Pointer; + public static implicit operator AVFilter_uninit_func(AVFilter_uninit func) => new AVFilter_uninit_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVFilterGraph_execute (AVFilterContext* @ctx, func_func @func, void* @arg, int* @ret, int @nb_jobs); +public unsafe struct AVFilterGraph_execute_func +{ + public IntPtr Pointer; + public static implicit operator AVFilterGraph_execute_func(AVFilterGraph_execute func) => new AVFilterGraph_execute_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVFormatContext_control_message_cb (AVFormatContext* @s, int @type, void* @data, ulong @data_size); +public unsafe struct AVFormatContext_control_message_cb_func +{ + public IntPtr Pointer; + public static implicit operator AVFormatContext_control_message_cb_func(AVFormatContext_control_message_cb func) => new AVFormatContext_control_message_cb_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void AVFormatContext_io_close (AVFormatContext* @s, AVIOContext* @pb); +public unsafe struct AVFormatContext_io_close_func +{ + public IntPtr Pointer; + public static implicit operator AVFormatContext_io_close_func(AVFormatContext_io_close func) => new AVFormatContext_io_close_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVFormatContext_io_close2 (AVFormatContext* @s, AVIOContext* @pb); +public unsafe struct AVFormatContext_io_close2_func +{ + public IntPtr Pointer; + public static implicit operator AVFormatContext_io_close2_func(AVFormatContext_io_close2 func) => new AVFormatContext_io_close2_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVFormatContext_io_open (AVFormatContext* @s, AVIOContext** @pb, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, int @flags, AVDictionary** @options); +public unsafe struct AVFormatContext_io_open_func +{ + public IntPtr Pointer; + public static implicit operator AVFormatContext_io_open_func(AVFormatContext_io_open func) => new AVFormatContext_io_open_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVHWAccel_alloc_frame (AVCodecContext* @avctx, AVFrame* @frame); +public unsafe struct AVHWAccel_alloc_frame_func +{ + public IntPtr Pointer; + public static implicit operator AVHWAccel_alloc_frame_func(AVHWAccel_alloc_frame func) => new AVHWAccel_alloc_frame_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVHWAccel_decode_params (AVCodecContext* @avctx, int @type, byte* @buf, uint @buf_size); +public unsafe struct AVHWAccel_decode_params_func +{ + public IntPtr Pointer; + public static implicit operator AVHWAccel_decode_params_func(AVHWAccel_decode_params func) => new AVHWAccel_decode_params_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVHWAccel_decode_slice (AVCodecContext* @avctx, byte* @buf, uint @buf_size); +public unsafe struct AVHWAccel_decode_slice_func +{ + public IntPtr Pointer; + public static implicit operator AVHWAccel_decode_slice_func(AVHWAccel_decode_slice func) => new AVHWAccel_decode_slice_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVHWAccel_end_frame (AVCodecContext* @avctx); +public unsafe struct AVHWAccel_end_frame_func +{ + public IntPtr Pointer; + public static implicit operator AVHWAccel_end_frame_func(AVHWAccel_end_frame func) => new AVHWAccel_end_frame_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVHWAccel_frame_params (AVCodecContext* @avctx, AVBufferRef* @hw_frames_ctx); +public unsafe struct AVHWAccel_frame_params_func +{ + public IntPtr Pointer; + public static implicit operator AVHWAccel_frame_params_func(AVHWAccel_frame_params func) => new AVHWAccel_frame_params_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVHWAccel_init (AVCodecContext* @avctx); +public unsafe struct AVHWAccel_init_func +{ + public IntPtr Pointer; + public static implicit operator AVHWAccel_init_func(AVHWAccel_init func) => new AVHWAccel_init_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVHWAccel_start_frame (AVCodecContext* @avctx, byte* @buf, uint @buf_size); +public unsafe struct AVHWAccel_start_frame_func +{ + public IntPtr Pointer; + public static implicit operator AVHWAccel_start_frame_func(AVHWAccel_start_frame func) => new AVHWAccel_start_frame_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVHWAccel_uninit (AVCodecContext* @avctx); +public unsafe struct AVHWAccel_uninit_func +{ + public IntPtr Pointer; + public static implicit operator AVHWAccel_uninit_func(AVHWAccel_uninit func) => new AVHWAccel_uninit_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void AVHWDeviceContext_free (AVHWDeviceContext* @ctx); +public unsafe struct AVHWDeviceContext_free_func +{ + public IntPtr Pointer; + public static implicit operator AVHWDeviceContext_free_func(AVHWDeviceContext_free func) => new AVHWDeviceContext_free_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void AVHWFramesContext_free (AVHWFramesContext* @ctx); +public unsafe struct AVHWFramesContext_free_func +{ + public IntPtr Pointer; + public static implicit operator AVHWFramesContext_free_func(AVHWFramesContext_free func) => new AVHWFramesContext_free_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVInputFormat_get_device_list (AVFormatContext* @s, AVDeviceInfoList* @device_list); +public unsafe struct AVInputFormat_get_device_list_func +{ + public IntPtr Pointer; + public static implicit operator AVInputFormat_get_device_list_func(AVInputFormat_get_device_list func) => new AVInputFormat_get_device_list_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVInputFormat_read_close (AVFormatContext* @p0); +public unsafe struct AVInputFormat_read_close_func +{ + public IntPtr Pointer; + public static implicit operator AVInputFormat_read_close_func(AVInputFormat_read_close func) => new AVInputFormat_read_close_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVInputFormat_read_header (AVFormatContext* @p0); +public unsafe struct AVInputFormat_read_header_func +{ + public IntPtr Pointer; + public static implicit operator AVInputFormat_read_header_func(AVInputFormat_read_header func) => new AVInputFormat_read_header_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVInputFormat_read_packet (AVFormatContext* @p0, AVPacket* @pkt); +public unsafe struct AVInputFormat_read_packet_func +{ + public IntPtr Pointer; + public static implicit operator AVInputFormat_read_packet_func(AVInputFormat_read_packet func) => new AVInputFormat_read_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVInputFormat_read_pause (AVFormatContext* @p0); +public unsafe struct AVInputFormat_read_pause_func +{ + public IntPtr Pointer; + public static implicit operator AVInputFormat_read_pause_func(AVInputFormat_read_pause func) => new AVInputFormat_read_pause_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVInputFormat_read_play (AVFormatContext* @p0); +public unsafe struct AVInputFormat_read_play_func +{ + public IntPtr Pointer; + public static implicit operator AVInputFormat_read_play_func(AVInputFormat_read_play func) => new AVInputFormat_read_play_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVInputFormat_read_probe (AVProbeData* @p0); +public unsafe struct AVInputFormat_read_probe_func +{ + public IntPtr Pointer; + public static implicit operator AVInputFormat_read_probe_func(AVInputFormat_read_probe func) => new AVInputFormat_read_probe_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVInputFormat_read_seek (AVFormatContext* @p0, int @stream_index, long @timestamp, int @flags); +public unsafe struct AVInputFormat_read_seek_func +{ + public IntPtr Pointer; + public static implicit operator AVInputFormat_read_seek_func(AVInputFormat_read_seek func) => new AVInputFormat_read_seek_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVInputFormat_read_seek2 (AVFormatContext* @s, int @stream_index, long @min_ts, long @ts, long @max_ts, int @flags); +public unsafe struct AVInputFormat_read_seek2_func +{ + public IntPtr Pointer; + public static implicit operator AVInputFormat_read_seek2_func(AVInputFormat_read_seek2 func) => new AVInputFormat_read_seek2_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate long AVInputFormat_read_timestamp (AVFormatContext* @s, int @stream_index, long* @pos, long @pos_limit); +public unsafe struct AVInputFormat_read_timestamp_func +{ + public IntPtr Pointer; + public static implicit operator AVInputFormat_read_timestamp_func(AVInputFormat_read_timestamp func) => new AVInputFormat_read_timestamp_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int avio_alloc_context_read_packet (void* @opaque, byte* @buf, int @buf_size); +public unsafe struct avio_alloc_context_read_packet_func +{ + public IntPtr Pointer; + public static implicit operator avio_alloc_context_read_packet_func(avio_alloc_context_read_packet func) => new avio_alloc_context_read_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate long avio_alloc_context_seek (void* @opaque, long @offset, int @whence); +public unsafe struct avio_alloc_context_seek_func +{ + public IntPtr Pointer; + public static implicit operator avio_alloc_context_seek_func(avio_alloc_context_seek func) => new avio_alloc_context_seek_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int avio_alloc_context_write_packet (void* @opaque, byte* @buf, int @buf_size); +public unsafe struct avio_alloc_context_write_packet_func +{ + public IntPtr Pointer; + public static implicit operator avio_alloc_context_write_packet_func(avio_alloc_context_write_packet func) => new avio_alloc_context_write_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVIOContext_read_packet (void* @opaque, byte* @buf, int @buf_size); +public unsafe struct AVIOContext_read_packet_func +{ + public IntPtr Pointer; + public static implicit operator AVIOContext_read_packet_func(AVIOContext_read_packet func) => new AVIOContext_read_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVIOContext_read_pause (void* @opaque, int @pause); +public unsafe struct AVIOContext_read_pause_func +{ + public IntPtr Pointer; + public static implicit operator AVIOContext_read_pause_func(AVIOContext_read_pause func) => new AVIOContext_read_pause_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate long AVIOContext_read_seek (void* @opaque, int @stream_index, long @timestamp, int @flags); +public unsafe struct AVIOContext_read_seek_func +{ + public IntPtr Pointer; + public static implicit operator AVIOContext_read_seek_func(AVIOContext_read_seek func) => new AVIOContext_read_seek_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate long AVIOContext_seek (void* @opaque, long @offset, int @whence); +public unsafe struct AVIOContext_seek_func +{ + public IntPtr Pointer; + public static implicit operator AVIOContext_seek_func(AVIOContext_seek func) => new AVIOContext_seek_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate ulong AVIOContext_update_checksum (ulong @checksum, byte* @buf, uint @size); +public unsafe struct AVIOContext_update_checksum_func +{ + public IntPtr Pointer; + public static implicit operator AVIOContext_update_checksum_func(AVIOContext_update_checksum func) => new AVIOContext_update_checksum_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVIOContext_write_data_type (void* @opaque, byte* @buf, int @buf_size, AVIODataMarkerType @type, long @time); +public unsafe struct AVIOContext_write_data_type_func +{ + public IntPtr Pointer; + public static implicit operator AVIOContext_write_data_type_func(AVIOContext_write_data_type func) => new AVIOContext_write_data_type_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVIOContext_write_packet (void* @opaque, byte* @buf, int @buf_size); +public unsafe struct AVIOContext_write_packet_func +{ + public IntPtr Pointer; + public static implicit operator AVIOContext_write_packet_func(AVIOContext_write_packet func) => new AVIOContext_write_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVIOInterruptCB_callback (void* @p0); +public unsafe struct AVIOInterruptCB_callback_func +{ + public IntPtr Pointer; + public static implicit operator AVIOInterruptCB_callback_func(AVIOInterruptCB_callback func) => new AVIOInterruptCB_callback_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVOutputFormat_check_bitstream (AVFormatContext* @s, AVStream* @st, AVPacket* @pkt); +public unsafe struct AVOutputFormat_check_bitstream_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_check_bitstream_func(AVOutputFormat_check_bitstream func) => new AVOutputFormat_check_bitstream_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVOutputFormat_control_message (AVFormatContext* @s, int @type, void* @data, ulong @data_size); +public unsafe struct AVOutputFormat_control_message_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_control_message_func(AVOutputFormat_control_message func) => new AVOutputFormat_control_message_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void AVOutputFormat_deinit (AVFormatContext* @p0); +public unsafe struct AVOutputFormat_deinit_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_deinit_func(AVOutputFormat_deinit func) => new AVOutputFormat_deinit_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVOutputFormat_get_device_list (AVFormatContext* @s, AVDeviceInfoList* @device_list); +public unsafe struct AVOutputFormat_get_device_list_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_get_device_list_func(AVOutputFormat_get_device_list func) => new AVOutputFormat_get_device_list_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate void AVOutputFormat_get_output_timestamp (AVFormatContext* @s, int @stream, long* @dts, long* @wall); +public unsafe struct AVOutputFormat_get_output_timestamp_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_get_output_timestamp_func(AVOutputFormat_get_output_timestamp func) => new AVOutputFormat_get_output_timestamp_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVOutputFormat_init (AVFormatContext* @p0); +public unsafe struct AVOutputFormat_init_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_init_func(AVOutputFormat_init func) => new AVOutputFormat_init_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVOutputFormat_interleave_packet (AVFormatContext* @s, AVPacket* @pkt, int @flush, int @has_packet); +public unsafe struct AVOutputFormat_interleave_packet_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_interleave_packet_func(AVOutputFormat_interleave_packet func) => new AVOutputFormat_interleave_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVOutputFormat_query_codec (AVCodecID @id, int @std_compliance); +public unsafe struct AVOutputFormat_query_codec_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_query_codec_func(AVOutputFormat_query_codec func) => new AVOutputFormat_query_codec_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVOutputFormat_write_header (AVFormatContext* @p0); +public unsafe struct AVOutputFormat_write_header_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_write_header_func(AVOutputFormat_write_header func) => new AVOutputFormat_write_header_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVOutputFormat_write_packet (AVFormatContext* @p0, AVPacket* @pkt); +public unsafe struct AVOutputFormat_write_packet_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_write_packet_func(AVOutputFormat_write_packet func) => new AVOutputFormat_write_packet_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVOutputFormat_write_trailer (AVFormatContext* @p0); +public unsafe struct AVOutputFormat_write_trailer_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_write_trailer_func(AVOutputFormat_write_trailer func) => new AVOutputFormat_write_trailer_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int AVOutputFormat_write_uncoded_frame (AVFormatContext* @p0, int @stream_index, AVFrame** @frame, uint @flags); +public unsafe struct AVOutputFormat_write_uncoded_frame_func +{ + public IntPtr Pointer; + public static implicit operator AVOutputFormat_write_uncoded_frame_func(AVOutputFormat_write_uncoded_frame func) => new AVOutputFormat_write_uncoded_frame_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + +[UnmanagedFunctionPointer(CallingConvention.Cdecl)] +public unsafe delegate int func (AVFilterContext* @ctx, void* @arg, int @jobnr, int @nb_jobs); +public unsafe struct func_func +{ + public IntPtr Pointer; + public static implicit operator func_func(func func) => new func_func { Pointer = func == null ? IntPtr.Zero : Marshal.GetFunctionPointerForDelegate(func) }; +} + diff --git a/FFmpeg.AutoGen/generated/DynamicallyLoadedBindings.g.cs b/FFmpeg.AutoGen/generated/DynamicallyLoadedBindings.g.cs new file mode 100644 index 00000000..12e22d93 --- /dev/null +++ b/FFmpeg.AutoGen/generated/DynamicallyLoadedBindings.g.cs @@ -0,0 +1,4246 @@ +using System; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen; + +public static unsafe partial class DynamicallyLoadedBindings +{ + public static bool ThrowErrorIfFunctionNotFound; + public static IFunctionResolver FunctionResolver; + + public unsafe static void Initialize() + { + if (FunctionResolver == null) FunctionResolver = FunctionResolverFactory.Create(); + + vectors.av_abuffersink_params_alloc = () => + { + vectors.av_abuffersink_params_alloc = FunctionResolver.GetFunctionDelegate("avfilter", "av_abuffersink_params_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_abuffersink_params_alloc(); + }; + + vectors.av_add_index_entry = (AVStream* @st, long @pos, long @timestamp, int @size, int @distance, int @flags) => + { + vectors.av_add_index_entry = FunctionResolver.GetFunctionDelegate("avformat", "av_add_index_entry", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_add_index_entry(@st, @pos, @timestamp, @size, @distance, @flags); + }; + + vectors.av_add_q = (AVRational @b, AVRational @c) => + { + vectors.av_add_q = FunctionResolver.GetFunctionDelegate("avutil", "av_add_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_add_q(@b, @c); + }; + + vectors.av_add_stable = (AVRational @ts_tb, long @ts, AVRational @inc_tb, long @inc) => + { + vectors.av_add_stable = FunctionResolver.GetFunctionDelegate("avutil", "av_add_stable", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_add_stable(@ts_tb, @ts, @inc_tb, @inc); + }; + + vectors.av_append_packet = (AVIOContext* @s, AVPacket* @pkt, int @size) => + { + vectors.av_append_packet = FunctionResolver.GetFunctionDelegate("avformat", "av_append_packet", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_append_packet(@s, @pkt, @size); + }; + + vectors.av_audio_fifo_alloc = (AVSampleFormat @sample_fmt, int @channels, int @nb_samples) => + { + vectors.av_audio_fifo_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_audio_fifo_alloc(@sample_fmt, @channels, @nb_samples); + }; + + vectors.av_audio_fifo_drain = (AVAudioFifo* @af, int @nb_samples) => + { + vectors.av_audio_fifo_drain = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_drain", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_audio_fifo_drain(@af, @nb_samples); + }; + + vectors.av_audio_fifo_free = (AVAudioFifo* @af) => + { + vectors.av_audio_fifo_free = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_audio_fifo_free(@af); + }; + + vectors.av_audio_fifo_peek = (AVAudioFifo* @af, void** @data, int @nb_samples) => + { + vectors.av_audio_fifo_peek = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_peek", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_audio_fifo_peek(@af, @data, @nb_samples); + }; + + vectors.av_audio_fifo_peek_at = (AVAudioFifo* @af, void** @data, int @nb_samples, int @offset) => + { + vectors.av_audio_fifo_peek_at = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_peek_at", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_audio_fifo_peek_at(@af, @data, @nb_samples, @offset); + }; + + vectors.av_audio_fifo_read = (AVAudioFifo* @af, void** @data, int @nb_samples) => + { + vectors.av_audio_fifo_read = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_read", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_audio_fifo_read(@af, @data, @nb_samples); + }; + + vectors.av_audio_fifo_realloc = (AVAudioFifo* @af, int @nb_samples) => + { + vectors.av_audio_fifo_realloc = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_realloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_audio_fifo_realloc(@af, @nb_samples); + }; + + vectors.av_audio_fifo_reset = (AVAudioFifo* @af) => + { + vectors.av_audio_fifo_reset = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_reset", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_audio_fifo_reset(@af); + }; + + vectors.av_audio_fifo_size = (AVAudioFifo* @af) => + { + vectors.av_audio_fifo_size = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_size", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_audio_fifo_size(@af); + }; + + vectors.av_audio_fifo_space = (AVAudioFifo* @af) => + { + vectors.av_audio_fifo_space = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_space", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_audio_fifo_space(@af); + }; + + vectors.av_audio_fifo_write = (AVAudioFifo* @af, void** @data, int @nb_samples) => + { + vectors.av_audio_fifo_write = FunctionResolver.GetFunctionDelegate("avutil", "av_audio_fifo_write", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_audio_fifo_write(@af, @data, @nb_samples); + }; + + vectors.av_bprint_channel_layout = (AVBPrint* @bp, int @nb_channels, ulong @channel_layout) => + { + vectors.av_bprint_channel_layout = FunctionResolver.GetFunctionDelegate("avutil", "av_bprint_channel_layout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_bprint_channel_layout(@bp, @nb_channels, @channel_layout); + }; + + vectors.av_bsf_alloc = (AVBitStreamFilter* @filter, AVBSFContext** @ctx) => + { + vectors.av_bsf_alloc = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_alloc(@filter, @ctx); + }; + + vectors.av_bsf_flush = (AVBSFContext* @ctx) => + { + vectors.av_bsf_flush = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_flush", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_bsf_flush(@ctx); + }; + + vectors.av_bsf_free = (AVBSFContext** @ctx) => + { + vectors.av_bsf_free = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_bsf_free(@ctx); + }; + + vectors.av_bsf_get_by_name = (string @name) => + { + vectors.av_bsf_get_by_name = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_get_by_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_get_by_name(@name); + }; + + vectors.av_bsf_get_class = () => + { + vectors.av_bsf_get_class = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_get_class", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_get_class(); + }; + + vectors.av_bsf_get_null_filter = (AVBSFContext** @bsf) => + { + vectors.av_bsf_get_null_filter = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_get_null_filter", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_get_null_filter(@bsf); + }; + + vectors.av_bsf_init = (AVBSFContext* @ctx) => + { + vectors.av_bsf_init = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_init", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_init(@ctx); + }; + + vectors.av_bsf_iterate = (void** @opaque) => + { + vectors.av_bsf_iterate = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_iterate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_iterate(@opaque); + }; + + vectors.av_bsf_list_alloc = () => + { + vectors.av_bsf_list_alloc = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_list_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_list_alloc(); + }; + + vectors.av_bsf_list_append = (AVBSFList* @lst, AVBSFContext* @bsf) => + { + vectors.av_bsf_list_append = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_list_append", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_list_append(@lst, @bsf); + }; + + vectors.av_bsf_list_append2 = (AVBSFList* @lst, string @bsf_name, AVDictionary** @options) => + { + vectors.av_bsf_list_append2 = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_list_append2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_list_append2(@lst, @bsf_name, @options); + }; + + vectors.av_bsf_list_finalize = (AVBSFList** @lst, AVBSFContext** @bsf) => + { + vectors.av_bsf_list_finalize = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_list_finalize", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_list_finalize(@lst, @bsf); + }; + + vectors.av_bsf_list_free = (AVBSFList** @lst) => + { + vectors.av_bsf_list_free = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_list_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_bsf_list_free(@lst); + }; + + vectors.av_bsf_list_parse_str = (string @str, AVBSFContext** @bsf) => + { + vectors.av_bsf_list_parse_str = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_list_parse_str", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_list_parse_str(@str, @bsf); + }; + + vectors.av_bsf_receive_packet = (AVBSFContext* @ctx, AVPacket* @pkt) => + { + vectors.av_bsf_receive_packet = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_receive_packet", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_receive_packet(@ctx, @pkt); + }; + + vectors.av_bsf_send_packet = (AVBSFContext* @ctx, AVPacket* @pkt) => + { + vectors.av_bsf_send_packet = FunctionResolver.GetFunctionDelegate("avcodec", "av_bsf_send_packet", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_bsf_send_packet(@ctx, @pkt); + }; + + vectors.av_buffer_alloc = (ulong @size) => + { + vectors.av_buffer_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_alloc(@size); + }; + + vectors.av_buffer_allocz = (ulong @size) => + { + vectors.av_buffer_allocz = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_allocz", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_allocz(@size); + }; + + vectors.av_buffer_create = (byte* @data, ulong @size, av_buffer_create_free_func @free, void* @opaque, int @flags) => + { + vectors.av_buffer_create = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_create", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_create(@data, @size, @free, @opaque, @flags); + }; + + vectors.av_buffer_default_free = (void* @opaque, byte* @data) => + { + vectors.av_buffer_default_free = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_default_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_buffer_default_free(@opaque, @data); + }; + + vectors.av_buffer_get_opaque = (AVBufferRef* @buf) => + { + vectors.av_buffer_get_opaque = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_get_opaque", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_get_opaque(@buf); + }; + + vectors.av_buffer_get_ref_count = (AVBufferRef* @buf) => + { + vectors.av_buffer_get_ref_count = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_get_ref_count", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_get_ref_count(@buf); + }; + + vectors.av_buffer_is_writable = (AVBufferRef* @buf) => + { + vectors.av_buffer_is_writable = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_is_writable", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_is_writable(@buf); + }; + + vectors.av_buffer_make_writable = (AVBufferRef** @buf) => + { + vectors.av_buffer_make_writable = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_make_writable", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_make_writable(@buf); + }; + + vectors.av_buffer_pool_buffer_get_opaque = (AVBufferRef* @ref) => + { + vectors.av_buffer_pool_buffer_get_opaque = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_pool_buffer_get_opaque", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_pool_buffer_get_opaque(@ref); + }; + + vectors.av_buffer_pool_get = (AVBufferPool* @pool) => + { + vectors.av_buffer_pool_get = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_pool_get", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_pool_get(@pool); + }; + + vectors.av_buffer_pool_init = (ulong @size, av_buffer_pool_init_alloc_func @alloc) => + { + vectors.av_buffer_pool_init = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_pool_init", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_pool_init(@size, @alloc); + }; + + vectors.av_buffer_pool_init2 = (ulong @size, void* @opaque, av_buffer_pool_init2_alloc_func @alloc, av_buffer_pool_init2_pool_free_func @pool_free) => + { + vectors.av_buffer_pool_init2 = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_pool_init2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_pool_init2(@size, @opaque, @alloc, @pool_free); + }; + + vectors.av_buffer_pool_uninit = (AVBufferPool** @pool) => + { + vectors.av_buffer_pool_uninit = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_pool_uninit", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_buffer_pool_uninit(@pool); + }; + + vectors.av_buffer_realloc = (AVBufferRef** @buf, ulong @size) => + { + vectors.av_buffer_realloc = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_realloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_realloc(@buf, @size); + }; + + vectors.av_buffer_ref = (AVBufferRef* @buf) => + { + vectors.av_buffer_ref = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_ref", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_ref(@buf); + }; + + vectors.av_buffer_replace = (AVBufferRef** @dst, AVBufferRef* @src) => + { + vectors.av_buffer_replace = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_replace", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffer_replace(@dst, @src); + }; + + vectors.av_buffer_unref = (AVBufferRef** @buf) => + { + vectors.av_buffer_unref = FunctionResolver.GetFunctionDelegate("avutil", "av_buffer_unref", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_buffer_unref(@buf); + }; + + vectors.av_buffersink_get_ch_layout = (AVFilterContext* @ctx, AVChannelLayout* @ch_layout) => + { + vectors.av_buffersink_get_ch_layout = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_ch_layout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_ch_layout(@ctx, @ch_layout); + }; + + vectors.av_buffersink_get_channel_layout = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_channel_layout = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_channel_layout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_channel_layout(@ctx); + }; + + vectors.av_buffersink_get_channels = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_channels = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_channels", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_channels(@ctx); + }; + + vectors.av_buffersink_get_format = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_format = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_format", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_format(@ctx); + }; + + vectors.av_buffersink_get_frame = (AVFilterContext* @ctx, AVFrame* @frame) => + { + vectors.av_buffersink_get_frame = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_frame(@ctx, @frame); + }; + + vectors.av_buffersink_get_frame_flags = (AVFilterContext* @ctx, AVFrame* @frame, int @flags) => + { + vectors.av_buffersink_get_frame_flags = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_frame_flags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_frame_flags(@ctx, @frame, @flags); + }; + + vectors.av_buffersink_get_frame_rate = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_frame_rate = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_frame_rate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_frame_rate(@ctx); + }; + + vectors.av_buffersink_get_h = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_h = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_h", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_h(@ctx); + }; + + vectors.av_buffersink_get_hw_frames_ctx = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_hw_frames_ctx = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_hw_frames_ctx", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_hw_frames_ctx(@ctx); + }; + + vectors.av_buffersink_get_sample_aspect_ratio = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_sample_aspect_ratio = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_sample_aspect_ratio", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_sample_aspect_ratio(@ctx); + }; + + vectors.av_buffersink_get_sample_rate = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_sample_rate = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_sample_rate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_sample_rate(@ctx); + }; + + vectors.av_buffersink_get_samples = (AVFilterContext* @ctx, AVFrame* @frame, int @nb_samples) => + { + vectors.av_buffersink_get_samples = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_samples", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_samples(@ctx, @frame, @nb_samples); + }; + + vectors.av_buffersink_get_time_base = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_time_base = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_time_base", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_time_base(@ctx); + }; + + vectors.av_buffersink_get_type = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_type = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_type", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_type(@ctx); + }; + + vectors.av_buffersink_get_w = (AVFilterContext* @ctx) => + { + vectors.av_buffersink_get_w = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_get_w", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_get_w(@ctx); + }; + + vectors.av_buffersink_params_alloc = () => + { + vectors.av_buffersink_params_alloc = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_params_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersink_params_alloc(); + }; + + vectors.av_buffersink_set_frame_size = (AVFilterContext* @ctx, uint @frame_size) => + { + vectors.av_buffersink_set_frame_size = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersink_set_frame_size", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_buffersink_set_frame_size(@ctx, @frame_size); + }; + + vectors.av_buffersrc_add_frame = (AVFilterContext* @ctx, AVFrame* @frame) => + { + vectors.av_buffersrc_add_frame = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersrc_add_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersrc_add_frame(@ctx, @frame); + }; + + vectors.av_buffersrc_add_frame_flags = (AVFilterContext* @buffer_src, AVFrame* @frame, int @flags) => + { + vectors.av_buffersrc_add_frame_flags = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersrc_add_frame_flags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersrc_add_frame_flags(@buffer_src, @frame, @flags); + }; + + vectors.av_buffersrc_close = (AVFilterContext* @ctx, long @pts, uint @flags) => + { + vectors.av_buffersrc_close = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersrc_close", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersrc_close(@ctx, @pts, @flags); + }; + + vectors.av_buffersrc_get_nb_failed_requests = (AVFilterContext* @buffer_src) => + { + vectors.av_buffersrc_get_nb_failed_requests = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersrc_get_nb_failed_requests", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersrc_get_nb_failed_requests(@buffer_src); + }; + + vectors.av_buffersrc_parameters_alloc = () => + { + vectors.av_buffersrc_parameters_alloc = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersrc_parameters_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersrc_parameters_alloc(); + }; + + vectors.av_buffersrc_parameters_set = (AVFilterContext* @ctx, AVBufferSrcParameters* @param) => + { + vectors.av_buffersrc_parameters_set = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersrc_parameters_set", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersrc_parameters_set(@ctx, @param); + }; + + vectors.av_buffersrc_write_frame = (AVFilterContext* @ctx, AVFrame* @frame) => + { + vectors.av_buffersrc_write_frame = FunctionResolver.GetFunctionDelegate("avfilter", "av_buffersrc_write_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_buffersrc_write_frame(@ctx, @frame); + }; + + vectors.av_calloc = (ulong @nmemb, ulong @size) => + { + vectors.av_calloc = FunctionResolver.GetFunctionDelegate("avutil", "av_calloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_calloc(@nmemb, @size); + }; + + vectors.av_channel_description = (byte* @buf, ulong @buf_size, AVChannel @channel) => + { + vectors.av_channel_description = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_description", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_description(@buf, @buf_size, @channel); + }; + + vectors.av_channel_description_bprint = (AVBPrint* @bp, AVChannel @channel_id) => + { + vectors.av_channel_description_bprint = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_description_bprint", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_channel_description_bprint(@bp, @channel_id); + }; + + vectors.av_channel_from_string = (string @name) => + { + vectors.av_channel_from_string = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_from_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_from_string(@name); + }; + + vectors.av_channel_layout_channel_from_index = (AVChannelLayout* @channel_layout, uint @idx) => + { + vectors.av_channel_layout_channel_from_index = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_channel_from_index", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_channel_from_index(@channel_layout, @idx); + }; + + vectors.av_channel_layout_channel_from_string = (AVChannelLayout* @channel_layout, string @name) => + { + vectors.av_channel_layout_channel_from_string = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_channel_from_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_channel_from_string(@channel_layout, @name); + }; + + vectors.av_channel_layout_check = (AVChannelLayout* @channel_layout) => + { + vectors.av_channel_layout_check = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_check", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_check(@channel_layout); + }; + + vectors.av_channel_layout_compare = (AVChannelLayout* @chl, AVChannelLayout* @chl1) => + { + vectors.av_channel_layout_compare = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_compare", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_compare(@chl, @chl1); + }; + + vectors.av_channel_layout_copy = (AVChannelLayout* @dst, AVChannelLayout* @src) => + { + vectors.av_channel_layout_copy = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_copy", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_copy(@dst, @src); + }; + + vectors.av_channel_layout_default = (AVChannelLayout* @ch_layout, int @nb_channels) => + { + vectors.av_channel_layout_default = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_default", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_channel_layout_default(@ch_layout, @nb_channels); + }; + + vectors.av_channel_layout_describe = (AVChannelLayout* @channel_layout, byte* @buf, ulong @buf_size) => + { + vectors.av_channel_layout_describe = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_describe", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_describe(@channel_layout, @buf, @buf_size); + }; + + vectors.av_channel_layout_describe_bprint = (AVChannelLayout* @channel_layout, AVBPrint* @bp) => + { + vectors.av_channel_layout_describe_bprint = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_describe_bprint", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_describe_bprint(@channel_layout, @bp); + }; + + vectors.av_channel_layout_extract_channel = (ulong @channel_layout, int @index) => + { + vectors.av_channel_layout_extract_channel = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_extract_channel", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_extract_channel(@channel_layout, @index); + }; + + vectors.av_channel_layout_from_mask = (AVChannelLayout* @channel_layout, ulong @mask) => + { + vectors.av_channel_layout_from_mask = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_from_mask", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_from_mask(@channel_layout, @mask); + }; + + vectors.av_channel_layout_from_string = (AVChannelLayout* @channel_layout, string @str) => + { + vectors.av_channel_layout_from_string = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_from_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_from_string(@channel_layout, @str); + }; + + vectors.av_channel_layout_index_from_channel = (AVChannelLayout* @channel_layout, AVChannel @channel) => + { + vectors.av_channel_layout_index_from_channel = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_index_from_channel", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_index_from_channel(@channel_layout, @channel); + }; + + vectors.av_channel_layout_index_from_string = (AVChannelLayout* @channel_layout, string @name) => + { + vectors.av_channel_layout_index_from_string = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_index_from_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_index_from_string(@channel_layout, @name); + }; + + vectors.av_channel_layout_standard = (void** @opaque) => + { + vectors.av_channel_layout_standard = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_standard", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_standard(@opaque); + }; + + vectors.av_channel_layout_subset = (AVChannelLayout* @channel_layout, ulong @mask) => + { + vectors.av_channel_layout_subset = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_subset", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_layout_subset(@channel_layout, @mask); + }; + + vectors.av_channel_layout_uninit = (AVChannelLayout* @channel_layout) => + { + vectors.av_channel_layout_uninit = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_layout_uninit", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_channel_layout_uninit(@channel_layout); + }; + + vectors.av_channel_name = (byte* @buf, ulong @buf_size, AVChannel @channel) => + { + vectors.av_channel_name = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_channel_name(@buf, @buf_size, @channel); + }; + + vectors.av_channel_name_bprint = (AVBPrint* @bp, AVChannel @channel_id) => + { + vectors.av_channel_name_bprint = FunctionResolver.GetFunctionDelegate("avutil", "av_channel_name_bprint", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_channel_name_bprint(@bp, @channel_id); + }; + + vectors.av_chroma_location_from_name = (string @name) => + { + vectors.av_chroma_location_from_name = FunctionResolver.GetFunctionDelegate("avutil", "av_chroma_location_from_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_chroma_location_from_name(@name); + }; + + vectors.av_chroma_location_name = (AVChromaLocation @location) => + { + vectors.av_chroma_location_name = FunctionResolver.GetFunctionDelegate("avutil", "av_chroma_location_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_chroma_location_name(@location); + }; + + vectors.av_codec_get_id = (AVCodecTag** @tags, uint @tag) => + { + vectors.av_codec_get_id = FunctionResolver.GetFunctionDelegate("avformat", "av_codec_get_id", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_codec_get_id(@tags, @tag); + }; + + vectors.av_codec_get_tag = (AVCodecTag** @tags, AVCodecID @id) => + { + vectors.av_codec_get_tag = FunctionResolver.GetFunctionDelegate("avformat", "av_codec_get_tag", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_codec_get_tag(@tags, @id); + }; + + vectors.av_codec_get_tag2 = (AVCodecTag** @tags, AVCodecID @id, uint* @tag) => + { + vectors.av_codec_get_tag2 = FunctionResolver.GetFunctionDelegate("avformat", "av_codec_get_tag2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_codec_get_tag2(@tags, @id, @tag); + }; + + vectors.av_codec_is_decoder = (AVCodec* @codec) => + { + vectors.av_codec_is_decoder = FunctionResolver.GetFunctionDelegate("avcodec", "av_codec_is_decoder", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_codec_is_decoder(@codec); + }; + + vectors.av_codec_is_encoder = (AVCodec* @codec) => + { + vectors.av_codec_is_encoder = FunctionResolver.GetFunctionDelegate("avcodec", "av_codec_is_encoder", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_codec_is_encoder(@codec); + }; + + vectors.av_codec_iterate = (void** @opaque) => + { + vectors.av_codec_iterate = FunctionResolver.GetFunctionDelegate("avcodec", "av_codec_iterate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_codec_iterate(@opaque); + }; + + vectors.av_color_primaries_from_name = (string @name) => + { + vectors.av_color_primaries_from_name = FunctionResolver.GetFunctionDelegate("avutil", "av_color_primaries_from_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_color_primaries_from_name(@name); + }; + + vectors.av_color_primaries_name = (AVColorPrimaries @primaries) => + { + vectors.av_color_primaries_name = FunctionResolver.GetFunctionDelegate("avutil", "av_color_primaries_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_color_primaries_name(@primaries); + }; + + vectors.av_color_range_from_name = (string @name) => + { + vectors.av_color_range_from_name = FunctionResolver.GetFunctionDelegate("avutil", "av_color_range_from_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_color_range_from_name(@name); + }; + + vectors.av_color_range_name = (AVColorRange @range) => + { + vectors.av_color_range_name = FunctionResolver.GetFunctionDelegate("avutil", "av_color_range_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_color_range_name(@range); + }; + + vectors.av_color_space_from_name = (string @name) => + { + vectors.av_color_space_from_name = FunctionResolver.GetFunctionDelegate("avutil", "av_color_space_from_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_color_space_from_name(@name); + }; + + vectors.av_color_space_name = (AVColorSpace @space) => + { + vectors.av_color_space_name = FunctionResolver.GetFunctionDelegate("avutil", "av_color_space_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_color_space_name(@space); + }; + + vectors.av_color_transfer_from_name = (string @name) => + { + vectors.av_color_transfer_from_name = FunctionResolver.GetFunctionDelegate("avutil", "av_color_transfer_from_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_color_transfer_from_name(@name); + }; + + vectors.av_color_transfer_name = (AVColorTransferCharacteristic @transfer) => + { + vectors.av_color_transfer_name = FunctionResolver.GetFunctionDelegate("avutil", "av_color_transfer_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_color_transfer_name(@transfer); + }; + + vectors.av_compare_mod = (ulong @a, ulong @b, ulong @mod) => + { + vectors.av_compare_mod = FunctionResolver.GetFunctionDelegate("avutil", "av_compare_mod", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_compare_mod(@a, @b, @mod); + }; + + vectors.av_compare_ts = (long @ts_a, AVRational @tb_a, long @ts_b, AVRational @tb_b) => + { + vectors.av_compare_ts = FunctionResolver.GetFunctionDelegate("avutil", "av_compare_ts", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_compare_ts(@ts_a, @tb_a, @ts_b, @tb_b); + }; + + vectors.av_content_light_metadata_alloc = (ulong* @size) => + { + vectors.av_content_light_metadata_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_content_light_metadata_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_content_light_metadata_alloc(@size); + }; + + vectors.av_content_light_metadata_create_side_data = (AVFrame* @frame) => + { + vectors.av_content_light_metadata_create_side_data = FunctionResolver.GetFunctionDelegate("avutil", "av_content_light_metadata_create_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_content_light_metadata_create_side_data(@frame); + }; + + vectors.av_cpb_properties_alloc = (ulong* @size) => + { + vectors.av_cpb_properties_alloc = FunctionResolver.GetFunctionDelegate("avcodec", "av_cpb_properties_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_cpb_properties_alloc(@size); + }; + + vectors.av_cpu_count = () => + { + vectors.av_cpu_count = FunctionResolver.GetFunctionDelegate("avutil", "av_cpu_count", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_cpu_count(); + }; + + vectors.av_cpu_force_count = (int @count) => + { + vectors.av_cpu_force_count = FunctionResolver.GetFunctionDelegate("avutil", "av_cpu_force_count", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_cpu_force_count(@count); + }; + + vectors.av_cpu_max_align = () => + { + vectors.av_cpu_max_align = FunctionResolver.GetFunctionDelegate("avutil", "av_cpu_max_align", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_cpu_max_align(); + }; + + vectors.av_d2q = (double @d, int @max) => + { + vectors.av_d2q = FunctionResolver.GetFunctionDelegate("avutil", "av_d2q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_d2q(@d, @max); + }; + + vectors.av_d3d11va_alloc_context = () => + { + vectors.av_d3d11va_alloc_context = FunctionResolver.GetFunctionDelegate("avcodec", "av_d3d11va_alloc_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_d3d11va_alloc_context(); + }; + + vectors.av_default_get_category = (void* @ptr) => + { + vectors.av_default_get_category = FunctionResolver.GetFunctionDelegate("avutil", "av_default_get_category", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_default_get_category(@ptr); + }; + + vectors.av_default_item_name = (void* @ctx) => + { + vectors.av_default_item_name = FunctionResolver.GetFunctionDelegate("avutil", "av_default_item_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_default_item_name(@ctx); + }; + + vectors.av_demuxer_iterate = (void** @opaque) => + { + vectors.av_demuxer_iterate = FunctionResolver.GetFunctionDelegate("avformat", "av_demuxer_iterate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_demuxer_iterate(@opaque); + }; + + vectors.av_dict_copy = (AVDictionary** @dst, AVDictionary* @src, int @flags) => + { + vectors.av_dict_copy = FunctionResolver.GetFunctionDelegate("avutil", "av_dict_copy", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dict_copy(@dst, @src, @flags); + }; + + vectors.av_dict_count = (AVDictionary* @m) => + { + vectors.av_dict_count = FunctionResolver.GetFunctionDelegate("avutil", "av_dict_count", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dict_count(@m); + }; + + vectors.av_dict_free = (AVDictionary** @m) => + { + vectors.av_dict_free = FunctionResolver.GetFunctionDelegate("avutil", "av_dict_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_dict_free(@m); + }; + + vectors.av_dict_get = (AVDictionary* @m, string @key, AVDictionaryEntry* @prev, int @flags) => + { + vectors.av_dict_get = FunctionResolver.GetFunctionDelegate("avutil", "av_dict_get", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dict_get(@m, @key, @prev, @flags); + }; + + vectors.av_dict_get_string = (AVDictionary* @m, byte** @buffer, byte @key_val_sep, byte @pairs_sep) => + { + vectors.av_dict_get_string = FunctionResolver.GetFunctionDelegate("avutil", "av_dict_get_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dict_get_string(@m, @buffer, @key_val_sep, @pairs_sep); + }; + + vectors.av_dict_parse_string = (AVDictionary** @pm, string @str, string @key_val_sep, string @pairs_sep, int @flags) => + { + vectors.av_dict_parse_string = FunctionResolver.GetFunctionDelegate("avutil", "av_dict_parse_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dict_parse_string(@pm, @str, @key_val_sep, @pairs_sep, @flags); + }; + + vectors.av_dict_set = (AVDictionary** @pm, string @key, string @value, int @flags) => + { + vectors.av_dict_set = FunctionResolver.GetFunctionDelegate("avutil", "av_dict_set", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dict_set(@pm, @key, @value, @flags); + }; + + vectors.av_dict_set_int = (AVDictionary** @pm, string @key, long @value, int @flags) => + { + vectors.av_dict_set_int = FunctionResolver.GetFunctionDelegate("avutil", "av_dict_set_int", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dict_set_int(@pm, @key, @value, @flags); + }; + + vectors.av_disposition_from_string = (string @disp) => + { + vectors.av_disposition_from_string = FunctionResolver.GetFunctionDelegate("avformat", "av_disposition_from_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_disposition_from_string(@disp); + }; + + vectors.av_disposition_to_string = (int @disposition) => + { + vectors.av_disposition_to_string = FunctionResolver.GetFunctionDelegate("avformat", "av_disposition_to_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_disposition_to_string(@disposition); + }; + + vectors.av_div_q = (AVRational @b, AVRational @c) => + { + vectors.av_div_q = FunctionResolver.GetFunctionDelegate("avutil", "av_div_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_div_q(@b, @c); + }; + + vectors.av_dump_format = (AVFormatContext* @ic, int @index, string @url, int @is_output) => + { + vectors.av_dump_format = FunctionResolver.GetFunctionDelegate("avformat", "av_dump_format", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_dump_format(@ic, @index, @url, @is_output); + }; + + vectors.av_dynamic_hdr_plus_alloc = (ulong* @size) => + { + vectors.av_dynamic_hdr_plus_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_dynamic_hdr_plus_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dynamic_hdr_plus_alloc(@size); + }; + + vectors.av_dynamic_hdr_plus_create_side_data = (AVFrame* @frame) => + { + vectors.av_dynamic_hdr_plus_create_side_data = FunctionResolver.GetFunctionDelegate("avutil", "av_dynamic_hdr_plus_create_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dynamic_hdr_plus_create_side_data(@frame); + }; + + vectors.av_dynarray_add = (void* @tab_ptr, int* @nb_ptr, void* @elem) => + { + vectors.av_dynarray_add = FunctionResolver.GetFunctionDelegate("avutil", "av_dynarray_add", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_dynarray_add(@tab_ptr, @nb_ptr, @elem); + }; + + vectors.av_dynarray_add_nofree = (void* @tab_ptr, int* @nb_ptr, void* @elem) => + { + vectors.av_dynarray_add_nofree = FunctionResolver.GetFunctionDelegate("avutil", "av_dynarray_add_nofree", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dynarray_add_nofree(@tab_ptr, @nb_ptr, @elem); + }; + + vectors.av_dynarray2_add = (void** @tab_ptr, int* @nb_ptr, ulong @elem_size, byte* @elem_data) => + { + vectors.av_dynarray2_add = FunctionResolver.GetFunctionDelegate("avutil", "av_dynarray2_add", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_dynarray2_add(@tab_ptr, @nb_ptr, @elem_size, @elem_data); + }; + + vectors.av_fast_malloc = (void* @ptr, uint* @size, ulong @min_size) => + { + vectors.av_fast_malloc = FunctionResolver.GetFunctionDelegate("avutil", "av_fast_malloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_fast_malloc(@ptr, @size, @min_size); + }; + + vectors.av_fast_mallocz = (void* @ptr, uint* @size, ulong @min_size) => + { + vectors.av_fast_mallocz = FunctionResolver.GetFunctionDelegate("avutil", "av_fast_mallocz", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_fast_mallocz(@ptr, @size, @min_size); + }; + + vectors.av_fast_padded_malloc = (void* @ptr, uint* @size, ulong @min_size) => + { + vectors.av_fast_padded_malloc = FunctionResolver.GetFunctionDelegate("avcodec", "av_fast_padded_malloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_fast_padded_malloc(@ptr, @size, @min_size); + }; + + vectors.av_fast_padded_mallocz = (void* @ptr, uint* @size, ulong @min_size) => + { + vectors.av_fast_padded_mallocz = FunctionResolver.GetFunctionDelegate("avcodec", "av_fast_padded_mallocz", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_fast_padded_mallocz(@ptr, @size, @min_size); + }; + + vectors.av_fast_realloc = (void* @ptr, uint* @size, ulong @min_size) => + { + vectors.av_fast_realloc = FunctionResolver.GetFunctionDelegate("avutil", "av_fast_realloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_fast_realloc(@ptr, @size, @min_size); + }; + + vectors.av_file_map = (string @filename, byte** @bufptr, ulong* @size, int @log_offset, void* @log_ctx) => + { + vectors.av_file_map = FunctionResolver.GetFunctionDelegate("avutil", "av_file_map", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_file_map(@filename, @bufptr, @size, @log_offset, @log_ctx); + }; + + vectors.av_file_unmap = (byte* @bufptr, ulong @size) => + { + vectors.av_file_unmap = FunctionResolver.GetFunctionDelegate("avutil", "av_file_unmap", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_file_unmap(@bufptr, @size); + }; + + vectors.av_filename_number_test = (string @filename) => + { + vectors.av_filename_number_test = FunctionResolver.GetFunctionDelegate("avformat", "av_filename_number_test", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_filename_number_test(@filename); + }; + + vectors.av_filter_iterate = (void** @opaque) => + { + vectors.av_filter_iterate = FunctionResolver.GetFunctionDelegate("avfilter", "av_filter_iterate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_filter_iterate(@opaque); + }; + + vectors.av_find_best_pix_fmt_of_2 = (AVPixelFormat @dst_pix_fmt1, AVPixelFormat @dst_pix_fmt2, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr) => + { + vectors.av_find_best_pix_fmt_of_2 = FunctionResolver.GetFunctionDelegate("avutil", "av_find_best_pix_fmt_of_2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_find_best_pix_fmt_of_2(@dst_pix_fmt1, @dst_pix_fmt2, @src_pix_fmt, @has_alpha, @loss_ptr); + }; + + vectors.av_find_best_stream = (AVFormatContext* @ic, AVMediaType @type, int @wanted_stream_nb, int @related_stream, AVCodec** @decoder_ret, int @flags) => + { + vectors.av_find_best_stream = FunctionResolver.GetFunctionDelegate("avformat", "av_find_best_stream", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_find_best_stream(@ic, @type, @wanted_stream_nb, @related_stream, @decoder_ret, @flags); + }; + + vectors.av_find_default_stream_index = (AVFormatContext* @s) => + { + vectors.av_find_default_stream_index = FunctionResolver.GetFunctionDelegate("avformat", "av_find_default_stream_index", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_find_default_stream_index(@s); + }; + + vectors.av_find_input_format = (string @short_name) => + { + vectors.av_find_input_format = FunctionResolver.GetFunctionDelegate("avformat", "av_find_input_format", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_find_input_format(@short_name); + }; + + vectors.av_find_nearest_q_idx = (AVRational @q, AVRational* @q_list) => + { + vectors.av_find_nearest_q_idx = FunctionResolver.GetFunctionDelegate("avutil", "av_find_nearest_q_idx", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_find_nearest_q_idx(@q, @q_list); + }; + + vectors.av_find_program_from_stream = (AVFormatContext* @ic, AVProgram* @last, int @s) => + { + vectors.av_find_program_from_stream = FunctionResolver.GetFunctionDelegate("avformat", "av_find_program_from_stream", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_find_program_from_stream(@ic, @last, @s); + }; + + vectors.av_fmt_ctx_get_duration_estimation_method = (AVFormatContext* @ctx) => + { + vectors.av_fmt_ctx_get_duration_estimation_method = FunctionResolver.GetFunctionDelegate("avformat", "av_fmt_ctx_get_duration_estimation_method", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_fmt_ctx_get_duration_estimation_method(@ctx); + }; + + vectors.av_fopen_utf8 = (string @path, string @mode) => + { + vectors.av_fopen_utf8 = FunctionResolver.GetFunctionDelegate("avutil", "av_fopen_utf8", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_fopen_utf8(@path, @mode); + }; + + vectors.av_force_cpu_flags = (int @flags) => + { + vectors.av_force_cpu_flags = FunctionResolver.GetFunctionDelegate("avutil", "av_force_cpu_flags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_force_cpu_flags(@flags); + }; + + vectors.av_format_inject_global_side_data = (AVFormatContext* @s) => + { + vectors.av_format_inject_global_side_data = FunctionResolver.GetFunctionDelegate("avformat", "av_format_inject_global_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_format_inject_global_side_data(@s); + }; + + vectors.av_fourcc_make_string = (byte* @buf, uint @fourcc) => + { + vectors.av_fourcc_make_string = FunctionResolver.GetFunctionDelegate("avutil", "av_fourcc_make_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_fourcc_make_string(@buf, @fourcc); + }; + + vectors.av_frame_alloc = () => + { + vectors.av_frame_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_alloc(); + }; + + vectors.av_frame_apply_cropping = (AVFrame* @frame, int @flags) => + { + vectors.av_frame_apply_cropping = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_apply_cropping", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_apply_cropping(@frame, @flags); + }; + + vectors.av_frame_clone = (AVFrame* @src) => + { + vectors.av_frame_clone = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_clone", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_clone(@src); + }; + + vectors.av_frame_copy = (AVFrame* @dst, AVFrame* @src) => + { + vectors.av_frame_copy = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_copy", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_copy(@dst, @src); + }; + + vectors.av_frame_copy_props = (AVFrame* @dst, AVFrame* @src) => + { + vectors.av_frame_copy_props = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_copy_props", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_copy_props(@dst, @src); + }; + + vectors.av_frame_free = (AVFrame** @frame) => + { + vectors.av_frame_free = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_frame_free(@frame); + }; + + vectors.av_frame_get_buffer = (AVFrame* @frame, int @align) => + { + vectors.av_frame_get_buffer = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_get_buffer", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_get_buffer(@frame, @align); + }; + + vectors.av_frame_get_plane_buffer = (AVFrame* @frame, int @plane) => + { + vectors.av_frame_get_plane_buffer = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_get_plane_buffer", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_get_plane_buffer(@frame, @plane); + }; + + vectors.av_frame_get_side_data = (AVFrame* @frame, AVFrameSideDataType @type) => + { + vectors.av_frame_get_side_data = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_get_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_get_side_data(@frame, @type); + }; + + vectors.av_frame_is_writable = (AVFrame* @frame) => + { + vectors.av_frame_is_writable = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_is_writable", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_is_writable(@frame); + }; + + vectors.av_frame_make_writable = (AVFrame* @frame) => + { + vectors.av_frame_make_writable = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_make_writable", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_make_writable(@frame); + }; + + vectors.av_frame_move_ref = (AVFrame* @dst, AVFrame* @src) => + { + vectors.av_frame_move_ref = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_move_ref", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_frame_move_ref(@dst, @src); + }; + + vectors.av_frame_new_side_data = (AVFrame* @frame, AVFrameSideDataType @type, ulong @size) => + { + vectors.av_frame_new_side_data = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_new_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_new_side_data(@frame, @type, @size); + }; + + vectors.av_frame_new_side_data_from_buf = (AVFrame* @frame, AVFrameSideDataType @type, AVBufferRef* @buf) => + { + vectors.av_frame_new_side_data_from_buf = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_new_side_data_from_buf", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_new_side_data_from_buf(@frame, @type, @buf); + }; + + vectors.av_frame_ref = (AVFrame* @dst, AVFrame* @src) => + { + vectors.av_frame_ref = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_ref", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_ref(@dst, @src); + }; + + vectors.av_frame_remove_side_data = (AVFrame* @frame, AVFrameSideDataType @type) => + { + vectors.av_frame_remove_side_data = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_remove_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_frame_remove_side_data(@frame, @type); + }; + + vectors.av_frame_side_data_name = (AVFrameSideDataType @type) => + { + vectors.av_frame_side_data_name = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_side_data_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_frame_side_data_name(@type); + }; + + vectors.av_frame_unref = (AVFrame* @frame) => + { + vectors.av_frame_unref = FunctionResolver.GetFunctionDelegate("avutil", "av_frame_unref", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_frame_unref(@frame); + }; + + vectors.av_free = (void* @ptr) => + { + vectors.av_free = FunctionResolver.GetFunctionDelegate("avutil", "av_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_free(@ptr); + }; + + vectors.av_freep = (void* @ptr) => + { + vectors.av_freep = FunctionResolver.GetFunctionDelegate("avutil", "av_freep", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_freep(@ptr); + }; + + vectors.av_gcd = (long @a, long @b) => + { + vectors.av_gcd = FunctionResolver.GetFunctionDelegate("avutil", "av_gcd", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_gcd(@a, @b); + }; + + vectors.av_gcd_q = (AVRational @a, AVRational @b, int @max_den, AVRational @def) => + { + vectors.av_gcd_q = FunctionResolver.GetFunctionDelegate("avutil", "av_gcd_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_gcd_q(@a, @b, @max_den, @def); + }; + + vectors.av_get_alt_sample_fmt = (AVSampleFormat @sample_fmt, int @planar) => + { + vectors.av_get_alt_sample_fmt = FunctionResolver.GetFunctionDelegate("avutil", "av_get_alt_sample_fmt", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_alt_sample_fmt(@sample_fmt, @planar); + }; + + vectors.av_get_audio_frame_duration = (AVCodecContext* @avctx, int @frame_bytes) => + { + vectors.av_get_audio_frame_duration = FunctionResolver.GetFunctionDelegate("avcodec", "av_get_audio_frame_duration", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_audio_frame_duration(@avctx, @frame_bytes); + }; + + vectors.av_get_audio_frame_duration2 = (AVCodecParameters* @par, int @frame_bytes) => + { + vectors.av_get_audio_frame_duration2 = FunctionResolver.GetFunctionDelegate("avcodec", "av_get_audio_frame_duration2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_audio_frame_duration2(@par, @frame_bytes); + }; + + vectors.av_get_bits_per_pixel = (AVPixFmtDescriptor* @pixdesc) => + { + vectors.av_get_bits_per_pixel = FunctionResolver.GetFunctionDelegate("avutil", "av_get_bits_per_pixel", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_bits_per_pixel(@pixdesc); + }; + + vectors.av_get_bits_per_sample = (AVCodecID @codec_id) => + { + vectors.av_get_bits_per_sample = FunctionResolver.GetFunctionDelegate("avcodec", "av_get_bits_per_sample", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_bits_per_sample(@codec_id); + }; + + vectors.av_get_bytes_per_sample = (AVSampleFormat @sample_fmt) => + { + vectors.av_get_bytes_per_sample = FunctionResolver.GetFunctionDelegate("avutil", "av_get_bytes_per_sample", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_bytes_per_sample(@sample_fmt); + }; + + vectors.av_get_channel_description = (ulong @channel) => + { + vectors.av_get_channel_description = FunctionResolver.GetFunctionDelegate("avutil", "av_get_channel_description", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_channel_description(@channel); + }; + + vectors.av_get_channel_layout = (string @name) => + { + vectors.av_get_channel_layout = FunctionResolver.GetFunctionDelegate("avutil", "av_get_channel_layout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_channel_layout(@name); + }; + + vectors.av_get_channel_layout_channel_index = (ulong @channel_layout, ulong @channel) => + { + vectors.av_get_channel_layout_channel_index = FunctionResolver.GetFunctionDelegate("avutil", "av_get_channel_layout_channel_index", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_channel_layout_channel_index(@channel_layout, @channel); + }; + + vectors.av_get_channel_layout_nb_channels = (ulong @channel_layout) => + { + vectors.av_get_channel_layout_nb_channels = FunctionResolver.GetFunctionDelegate("avutil", "av_get_channel_layout_nb_channels", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_channel_layout_nb_channels(@channel_layout); + }; + + vectors.av_get_channel_layout_string = (byte* @buf, int @buf_size, int @nb_channels, ulong @channel_layout) => + { + vectors.av_get_channel_layout_string = FunctionResolver.GetFunctionDelegate("avutil", "av_get_channel_layout_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_get_channel_layout_string(@buf, @buf_size, @nb_channels, @channel_layout); + }; + + vectors.av_get_channel_name = (ulong @channel) => + { + vectors.av_get_channel_name = FunctionResolver.GetFunctionDelegate("avutil", "av_get_channel_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_channel_name(@channel); + }; + + vectors.av_get_colorspace_name = (AVColorSpace @val) => + { + vectors.av_get_colorspace_name = FunctionResolver.GetFunctionDelegate("avutil", "av_get_colorspace_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_colorspace_name(@val); + }; + + vectors.av_get_cpu_flags = () => + { + vectors.av_get_cpu_flags = FunctionResolver.GetFunctionDelegate("avutil", "av_get_cpu_flags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_cpu_flags(); + }; + + vectors.av_get_default_channel_layout = (int @nb_channels) => + { + vectors.av_get_default_channel_layout = FunctionResolver.GetFunctionDelegate("avutil", "av_get_default_channel_layout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_default_channel_layout(@nb_channels); + }; + + vectors.av_get_exact_bits_per_sample = (AVCodecID @codec_id) => + { + vectors.av_get_exact_bits_per_sample = FunctionResolver.GetFunctionDelegate("avcodec", "av_get_exact_bits_per_sample", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_exact_bits_per_sample(@codec_id); + }; + + vectors.av_get_extended_channel_layout = (string @name, ulong* @channel_layout, int* @nb_channels) => + { + vectors.av_get_extended_channel_layout = FunctionResolver.GetFunctionDelegate("avutil", "av_get_extended_channel_layout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_extended_channel_layout(@name, @channel_layout, @nb_channels); + }; + + vectors.av_get_frame_filename = (byte* @buf, int @buf_size, string @path, int @number) => + { + vectors.av_get_frame_filename = FunctionResolver.GetFunctionDelegate("avformat", "av_get_frame_filename", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_frame_filename(@buf, @buf_size, @path, @number); + }; + + vectors.av_get_frame_filename2 = (byte* @buf, int @buf_size, string @path, int @number, int @flags) => + { + vectors.av_get_frame_filename2 = FunctionResolver.GetFunctionDelegate("avformat", "av_get_frame_filename2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_frame_filename2(@buf, @buf_size, @path, @number, @flags); + }; + + vectors.av_get_media_type_string = (AVMediaType @media_type) => + { + vectors.av_get_media_type_string = FunctionResolver.GetFunctionDelegate("avutil", "av_get_media_type_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_media_type_string(@media_type); + }; + + vectors.av_get_output_timestamp = (AVFormatContext* @s, int @stream, long* @dts, long* @wall) => + { + vectors.av_get_output_timestamp = FunctionResolver.GetFunctionDelegate("avformat", "av_get_output_timestamp", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_output_timestamp(@s, @stream, @dts, @wall); + }; + + vectors.av_get_packed_sample_fmt = (AVSampleFormat @sample_fmt) => + { + vectors.av_get_packed_sample_fmt = FunctionResolver.GetFunctionDelegate("avutil", "av_get_packed_sample_fmt", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_packed_sample_fmt(@sample_fmt); + }; + + vectors.av_get_packet = (AVIOContext* @s, AVPacket* @pkt, int @size) => + { + vectors.av_get_packet = FunctionResolver.GetFunctionDelegate("avformat", "av_get_packet", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_packet(@s, @pkt, @size); + }; + + vectors.av_get_padded_bits_per_pixel = (AVPixFmtDescriptor* @pixdesc) => + { + vectors.av_get_padded_bits_per_pixel = FunctionResolver.GetFunctionDelegate("avutil", "av_get_padded_bits_per_pixel", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_padded_bits_per_pixel(@pixdesc); + }; + + vectors.av_get_pcm_codec = (AVSampleFormat @fmt, int @be) => + { + vectors.av_get_pcm_codec = FunctionResolver.GetFunctionDelegate("avcodec", "av_get_pcm_codec", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_pcm_codec(@fmt, @be); + }; + + vectors.av_get_picture_type_char = (AVPictureType @pict_type) => + { + vectors.av_get_picture_type_char = FunctionResolver.GetFunctionDelegate("avutil", "av_get_picture_type_char", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_picture_type_char(@pict_type); + }; + + vectors.av_get_pix_fmt = (string @name) => + { + vectors.av_get_pix_fmt = FunctionResolver.GetFunctionDelegate("avutil", "av_get_pix_fmt", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_pix_fmt(@name); + }; + + vectors.av_get_pix_fmt_loss = (AVPixelFormat @dst_pix_fmt, AVPixelFormat @src_pix_fmt, int @has_alpha) => + { + vectors.av_get_pix_fmt_loss = FunctionResolver.GetFunctionDelegate("avutil", "av_get_pix_fmt_loss", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_pix_fmt_loss(@dst_pix_fmt, @src_pix_fmt, @has_alpha); + }; + + vectors.av_get_pix_fmt_name = (AVPixelFormat @pix_fmt) => + { + vectors.av_get_pix_fmt_name = FunctionResolver.GetFunctionDelegate("avutil", "av_get_pix_fmt_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_pix_fmt_name(@pix_fmt); + }; + + vectors.av_get_pix_fmt_string = (byte* @buf, int @buf_size, AVPixelFormat @pix_fmt) => + { + vectors.av_get_pix_fmt_string = FunctionResolver.GetFunctionDelegate("avutil", "av_get_pix_fmt_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_pix_fmt_string(@buf, @buf_size, @pix_fmt); + }; + + vectors.av_get_planar_sample_fmt = (AVSampleFormat @sample_fmt) => + { + vectors.av_get_planar_sample_fmt = FunctionResolver.GetFunctionDelegate("avutil", "av_get_planar_sample_fmt", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_planar_sample_fmt(@sample_fmt); + }; + + vectors.av_get_profile_name = (AVCodec* @codec, int @profile) => + { + vectors.av_get_profile_name = FunctionResolver.GetFunctionDelegate("avcodec", "av_get_profile_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_profile_name(@codec, @profile); + }; + + vectors.av_get_sample_fmt = (string @name) => + { + vectors.av_get_sample_fmt = FunctionResolver.GetFunctionDelegate("avutil", "av_get_sample_fmt", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_sample_fmt(@name); + }; + + vectors.av_get_sample_fmt_name = (AVSampleFormat @sample_fmt) => + { + vectors.av_get_sample_fmt_name = FunctionResolver.GetFunctionDelegate("avutil", "av_get_sample_fmt_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_sample_fmt_name(@sample_fmt); + }; + + vectors.av_get_sample_fmt_string = (byte* @buf, int @buf_size, AVSampleFormat @sample_fmt) => + { + vectors.av_get_sample_fmt_string = FunctionResolver.GetFunctionDelegate("avutil", "av_get_sample_fmt_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_sample_fmt_string(@buf, @buf_size, @sample_fmt); + }; + + vectors.av_get_standard_channel_layout = (uint @index, ulong* @layout, byte** @name) => + { + vectors.av_get_standard_channel_layout = FunctionResolver.GetFunctionDelegate("avutil", "av_get_standard_channel_layout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_standard_channel_layout(@index, @layout, @name); + }; + + vectors.av_get_time_base_q = () => + { + vectors.av_get_time_base_q = FunctionResolver.GetFunctionDelegate("avutil", "av_get_time_base_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_get_time_base_q(); + }; + + vectors.av_gettime = () => + { + vectors.av_gettime = FunctionResolver.GetFunctionDelegate("avutil", "av_gettime", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_gettime(); + }; + + vectors.av_gettime_relative = () => + { + vectors.av_gettime_relative = FunctionResolver.GetFunctionDelegate("avutil", "av_gettime_relative", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_gettime_relative(); + }; + + vectors.av_gettime_relative_is_monotonic = () => + { + vectors.av_gettime_relative_is_monotonic = FunctionResolver.GetFunctionDelegate("avutil", "av_gettime_relative_is_monotonic", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_gettime_relative_is_monotonic(); + }; + + vectors.av_grow_packet = (AVPacket* @pkt, int @grow_by) => + { + vectors.av_grow_packet = FunctionResolver.GetFunctionDelegate("avcodec", "av_grow_packet", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_grow_packet(@pkt, @grow_by); + }; + + vectors.av_guess_codec = (AVOutputFormat* @fmt, string @short_name, string @filename, string @mime_type, AVMediaType @type) => + { + vectors.av_guess_codec = FunctionResolver.GetFunctionDelegate("avformat", "av_guess_codec", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_guess_codec(@fmt, @short_name, @filename, @mime_type, @type); + }; + + vectors.av_guess_format = (string @short_name, string @filename, string @mime_type) => + { + vectors.av_guess_format = FunctionResolver.GetFunctionDelegate("avformat", "av_guess_format", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_guess_format(@short_name, @filename, @mime_type); + }; + + vectors.av_guess_frame_rate = (AVFormatContext* @ctx, AVStream* @stream, AVFrame* @frame) => + { + vectors.av_guess_frame_rate = FunctionResolver.GetFunctionDelegate("avformat", "av_guess_frame_rate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_guess_frame_rate(@ctx, @stream, @frame); + }; + + vectors.av_guess_sample_aspect_ratio = (AVFormatContext* @format, AVStream* @stream, AVFrame* @frame) => + { + vectors.av_guess_sample_aspect_ratio = FunctionResolver.GetFunctionDelegate("avformat", "av_guess_sample_aspect_ratio", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_guess_sample_aspect_ratio(@format, @stream, @frame); + }; + + vectors.av_hex_dump = (_iobuf* @f, byte* @buf, int @size) => + { + vectors.av_hex_dump = FunctionResolver.GetFunctionDelegate("avformat", "av_hex_dump", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_hex_dump(@f, @buf, @size); + }; + + vectors.av_hex_dump_log = (void* @avcl, int @level, byte* @buf, int @size) => + { + vectors.av_hex_dump_log = FunctionResolver.GetFunctionDelegate("avformat", "av_hex_dump_log", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_hex_dump_log(@avcl, @level, @buf, @size); + }; + + vectors.av_hwdevice_ctx_alloc = (AVHWDeviceType @type) => + { + vectors.av_hwdevice_ctx_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_hwdevice_ctx_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwdevice_ctx_alloc(@type); + }; + + vectors.av_hwdevice_ctx_create = (AVBufferRef** @device_ctx, AVHWDeviceType @type, string @device, AVDictionary* @opts, int @flags) => + { + vectors.av_hwdevice_ctx_create = FunctionResolver.GetFunctionDelegate("avutil", "av_hwdevice_ctx_create", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwdevice_ctx_create(@device_ctx, @type, @device, @opts, @flags); + }; + + vectors.av_hwdevice_ctx_create_derived = (AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, int @flags) => + { + vectors.av_hwdevice_ctx_create_derived = FunctionResolver.GetFunctionDelegate("avutil", "av_hwdevice_ctx_create_derived", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwdevice_ctx_create_derived(@dst_ctx, @type, @src_ctx, @flags); + }; + + vectors.av_hwdevice_ctx_create_derived_opts = (AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, AVDictionary* @options, int @flags) => + { + vectors.av_hwdevice_ctx_create_derived_opts = FunctionResolver.GetFunctionDelegate("avutil", "av_hwdevice_ctx_create_derived_opts", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwdevice_ctx_create_derived_opts(@dst_ctx, @type, @src_ctx, @options, @flags); + }; + + vectors.av_hwdevice_ctx_init = (AVBufferRef* @ref) => + { + vectors.av_hwdevice_ctx_init = FunctionResolver.GetFunctionDelegate("avutil", "av_hwdevice_ctx_init", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwdevice_ctx_init(@ref); + }; + + vectors.av_hwdevice_find_type_by_name = (string @name) => + { + vectors.av_hwdevice_find_type_by_name = FunctionResolver.GetFunctionDelegate("avutil", "av_hwdevice_find_type_by_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwdevice_find_type_by_name(@name); + }; + + vectors.av_hwdevice_get_hwframe_constraints = (AVBufferRef* @ref, void* @hwconfig) => + { + vectors.av_hwdevice_get_hwframe_constraints = FunctionResolver.GetFunctionDelegate("avutil", "av_hwdevice_get_hwframe_constraints", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwdevice_get_hwframe_constraints(@ref, @hwconfig); + }; + + vectors.av_hwdevice_get_type_name = (AVHWDeviceType @type) => + { + vectors.av_hwdevice_get_type_name = FunctionResolver.GetFunctionDelegate("avutil", "av_hwdevice_get_type_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwdevice_get_type_name(@type); + }; + + vectors.av_hwdevice_hwconfig_alloc = (AVBufferRef* @device_ctx) => + { + vectors.av_hwdevice_hwconfig_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_hwdevice_hwconfig_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwdevice_hwconfig_alloc(@device_ctx); + }; + + vectors.av_hwdevice_iterate_types = (AVHWDeviceType @prev) => + { + vectors.av_hwdevice_iterate_types = FunctionResolver.GetFunctionDelegate("avutil", "av_hwdevice_iterate_types", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwdevice_iterate_types(@prev); + }; + + vectors.av_hwframe_constraints_free = (AVHWFramesConstraints** @constraints) => + { + vectors.av_hwframe_constraints_free = FunctionResolver.GetFunctionDelegate("avutil", "av_hwframe_constraints_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_hwframe_constraints_free(@constraints); + }; + + vectors.av_hwframe_ctx_alloc = (AVBufferRef* @device_ctx) => + { + vectors.av_hwframe_ctx_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_hwframe_ctx_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwframe_ctx_alloc(@device_ctx); + }; + + vectors.av_hwframe_ctx_create_derived = (AVBufferRef** @derived_frame_ctx, AVPixelFormat @format, AVBufferRef* @derived_device_ctx, AVBufferRef* @source_frame_ctx, int @flags) => + { + vectors.av_hwframe_ctx_create_derived = FunctionResolver.GetFunctionDelegate("avutil", "av_hwframe_ctx_create_derived", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwframe_ctx_create_derived(@derived_frame_ctx, @format, @derived_device_ctx, @source_frame_ctx, @flags); + }; + + vectors.av_hwframe_ctx_init = (AVBufferRef* @ref) => + { + vectors.av_hwframe_ctx_init = FunctionResolver.GetFunctionDelegate("avutil", "av_hwframe_ctx_init", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwframe_ctx_init(@ref); + }; + + vectors.av_hwframe_get_buffer = (AVBufferRef* @hwframe_ctx, AVFrame* @frame, int @flags) => + { + vectors.av_hwframe_get_buffer = FunctionResolver.GetFunctionDelegate("avutil", "av_hwframe_get_buffer", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwframe_get_buffer(@hwframe_ctx, @frame, @flags); + }; + + vectors.av_hwframe_map = (AVFrame* @dst, AVFrame* @src, int @flags) => + { + vectors.av_hwframe_map = FunctionResolver.GetFunctionDelegate("avutil", "av_hwframe_map", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwframe_map(@dst, @src, @flags); + }; + + vectors.av_hwframe_transfer_data = (AVFrame* @dst, AVFrame* @src, int @flags) => + { + vectors.av_hwframe_transfer_data = FunctionResolver.GetFunctionDelegate("avutil", "av_hwframe_transfer_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwframe_transfer_data(@dst, @src, @flags); + }; + + vectors.av_hwframe_transfer_get_formats = (AVBufferRef* @hwframe_ctx, AVHWFrameTransferDirection @dir, AVPixelFormat** @formats, int @flags) => + { + vectors.av_hwframe_transfer_get_formats = FunctionResolver.GetFunctionDelegate("avutil", "av_hwframe_transfer_get_formats", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_hwframe_transfer_get_formats(@hwframe_ctx, @dir, @formats, @flags); + }; + + vectors.av_image_alloc = (ref byte_ptrArray4 @pointers, ref int_array4 @linesizes, int @w, int @h, AVPixelFormat @pix_fmt, int @align) => + { + vectors.av_image_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_image_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_alloc(ref @pointers, ref @linesizes, @w, @h, @pix_fmt, @align); + }; + + vectors.av_image_check_sar = (uint @w, uint @h, AVRational @sar) => + { + vectors.av_image_check_sar = FunctionResolver.GetFunctionDelegate("avutil", "av_image_check_sar", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_check_sar(@w, @h, @sar); + }; + + vectors.av_image_check_size = (uint @w, uint @h, int @log_offset, void* @log_ctx) => + { + vectors.av_image_check_size = FunctionResolver.GetFunctionDelegate("avutil", "av_image_check_size", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_check_size(@w, @h, @log_offset, @log_ctx); + }; + + vectors.av_image_check_size2 = (uint @w, uint @h, long @max_pixels, AVPixelFormat @pix_fmt, int @log_offset, void* @log_ctx) => + { + vectors.av_image_check_size2 = FunctionResolver.GetFunctionDelegate("avutil", "av_image_check_size2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_check_size2(@w, @h, @max_pixels, @pix_fmt, @log_offset, @log_ctx); + }; + + vectors.av_image_copy = (ref byte_ptrArray4 @dst_data, ref int_array4 @dst_linesizes, in byte_ptrArray4 @src_data, in int_array4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height) => + { + vectors.av_image_copy = FunctionResolver.GetFunctionDelegate("avutil", "av_image_copy", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_image_copy(ref @dst_data, ref @dst_linesizes, @src_data, @src_linesizes, @pix_fmt, @width, @height); + }; + + vectors.av_image_copy_plane = (byte* @dst, int @dst_linesize, byte* @src, int @src_linesize, int @bytewidth, int @height) => + { + vectors.av_image_copy_plane = FunctionResolver.GetFunctionDelegate("avutil", "av_image_copy_plane", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_image_copy_plane(@dst, @dst_linesize, @src, @src_linesize, @bytewidth, @height); + }; + + vectors.av_image_copy_plane_uc_from = (byte* @dst, long @dst_linesize, byte* @src, long @src_linesize, long @bytewidth, int @height) => + { + vectors.av_image_copy_plane_uc_from = FunctionResolver.GetFunctionDelegate("avutil", "av_image_copy_plane_uc_from", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_image_copy_plane_uc_from(@dst, @dst_linesize, @src, @src_linesize, @bytewidth, @height); + }; + + vectors.av_image_copy_to_buffer = (byte* @dst, int @dst_size, in byte_ptrArray4 @src_data, in int_array4 @src_linesize, AVPixelFormat @pix_fmt, int @width, int @height, int @align) => + { + vectors.av_image_copy_to_buffer = FunctionResolver.GetFunctionDelegate("avutil", "av_image_copy_to_buffer", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_copy_to_buffer(@dst, @dst_size, @src_data, @src_linesize, @pix_fmt, @width, @height, @align); + }; + + vectors.av_image_copy_uc_from = (ref byte_ptrArray4 @dst_data, in long_array4 @dst_linesizes, in byte_ptrArray4 @src_data, in long_array4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height) => + { + vectors.av_image_copy_uc_from = FunctionResolver.GetFunctionDelegate("avutil", "av_image_copy_uc_from", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_image_copy_uc_from(ref @dst_data, @dst_linesizes, @src_data, @src_linesizes, @pix_fmt, @width, @height); + }; + + vectors.av_image_fill_arrays = (ref byte_ptrArray4 @dst_data, ref int_array4 @dst_linesize, byte* @src, AVPixelFormat @pix_fmt, int @width, int @height, int @align) => + { + vectors.av_image_fill_arrays = FunctionResolver.GetFunctionDelegate("avutil", "av_image_fill_arrays", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_fill_arrays(ref @dst_data, ref @dst_linesize, @src, @pix_fmt, @width, @height, @align); + }; + + vectors.av_image_fill_black = (ref byte_ptrArray4 @dst_data, in long_array4 @dst_linesize, AVPixelFormat @pix_fmt, AVColorRange @range, int @width, int @height) => + { + vectors.av_image_fill_black = FunctionResolver.GetFunctionDelegate("avutil", "av_image_fill_black", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_fill_black(ref @dst_data, @dst_linesize, @pix_fmt, @range, @width, @height); + }; + + vectors.av_image_fill_linesizes = (ref int_array4 @linesizes, AVPixelFormat @pix_fmt, int @width) => + { + vectors.av_image_fill_linesizes = FunctionResolver.GetFunctionDelegate("avutil", "av_image_fill_linesizes", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_fill_linesizes(ref @linesizes, @pix_fmt, @width); + }; + + vectors.av_image_fill_max_pixsteps = (ref int_array4 @max_pixsteps, ref int_array4 @max_pixstep_comps, AVPixFmtDescriptor* @pixdesc) => + { + vectors.av_image_fill_max_pixsteps = FunctionResolver.GetFunctionDelegate("avutil", "av_image_fill_max_pixsteps", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_image_fill_max_pixsteps(ref @max_pixsteps, ref @max_pixstep_comps, @pixdesc); + }; + + vectors.av_image_fill_plane_sizes = (ref ulong_array4 @size, AVPixelFormat @pix_fmt, int @height, in long_array4 @linesizes) => + { + vectors.av_image_fill_plane_sizes = FunctionResolver.GetFunctionDelegate("avutil", "av_image_fill_plane_sizes", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_fill_plane_sizes(ref @size, @pix_fmt, @height, @linesizes); + }; + + vectors.av_image_fill_pointers = (ref byte_ptrArray4 @data, AVPixelFormat @pix_fmt, int @height, byte* @ptr, in int_array4 @linesizes) => + { + vectors.av_image_fill_pointers = FunctionResolver.GetFunctionDelegate("avutil", "av_image_fill_pointers", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_fill_pointers(ref @data, @pix_fmt, @height, @ptr, @linesizes); + }; + + vectors.av_image_get_buffer_size = (AVPixelFormat @pix_fmt, int @width, int @height, int @align) => + { + vectors.av_image_get_buffer_size = FunctionResolver.GetFunctionDelegate("avutil", "av_image_get_buffer_size", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_get_buffer_size(@pix_fmt, @width, @height, @align); + }; + + vectors.av_image_get_linesize = (AVPixelFormat @pix_fmt, int @width, int @plane) => + { + vectors.av_image_get_linesize = FunctionResolver.GetFunctionDelegate("avutil", "av_image_get_linesize", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_image_get_linesize(@pix_fmt, @width, @plane); + }; + + vectors.av_index_search_timestamp = (AVStream* @st, long @timestamp, int @flags) => + { + vectors.av_index_search_timestamp = FunctionResolver.GetFunctionDelegate("avformat", "av_index_search_timestamp", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_index_search_timestamp(@st, @timestamp, @flags); + }; + + vectors.av_init_packet = (AVPacket* @pkt) => + { + vectors.av_init_packet = FunctionResolver.GetFunctionDelegate("avcodec", "av_init_packet", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_init_packet(@pkt); + }; + + vectors.av_input_audio_device_next = (AVInputFormat* @d) => + { + vectors.av_input_audio_device_next = FunctionResolver.GetFunctionDelegate("avdevice", "av_input_audio_device_next", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_input_audio_device_next(@d); + }; + + vectors.av_input_video_device_next = (AVInputFormat* @d) => + { + vectors.av_input_video_device_next = FunctionResolver.GetFunctionDelegate("avdevice", "av_input_video_device_next", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_input_video_device_next(@d); + }; + + vectors.av_int_list_length_for_size = (uint @elsize, void* @list, ulong @term) => + { + vectors.av_int_list_length_for_size = FunctionResolver.GetFunctionDelegate("avutil", "av_int_list_length_for_size", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_int_list_length_for_size(@elsize, @list, @term); + }; + + vectors.av_interleaved_write_frame = (AVFormatContext* @s, AVPacket* @pkt) => + { + vectors.av_interleaved_write_frame = FunctionResolver.GetFunctionDelegate("avformat", "av_interleaved_write_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_interleaved_write_frame(@s, @pkt); + }; + + vectors.av_interleaved_write_uncoded_frame = (AVFormatContext* @s, int @stream_index, AVFrame* @frame) => + { + vectors.av_interleaved_write_uncoded_frame = FunctionResolver.GetFunctionDelegate("avformat", "av_interleaved_write_uncoded_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_interleaved_write_uncoded_frame(@s, @stream_index, @frame); + }; + + vectors.av_log = (void* @avcl, int @level, string @fmt) => + { + vectors.av_log = FunctionResolver.GetFunctionDelegate("avutil", "av_log", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_log(@avcl, @level, @fmt); + }; + + vectors.av_log_default_callback = (void* @avcl, int @level, string @fmt, byte* @vl) => + { + vectors.av_log_default_callback = FunctionResolver.GetFunctionDelegate("avutil", "av_log_default_callback", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_log_default_callback(@avcl, @level, @fmt, @vl); + }; + + vectors.av_log_format_line = (void* @ptr, int @level, string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix) => + { + vectors.av_log_format_line = FunctionResolver.GetFunctionDelegate("avutil", "av_log_format_line", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_log_format_line(@ptr, @level, @fmt, @vl, @line, @line_size, @print_prefix); + }; + + vectors.av_log_format_line2 = (void* @ptr, int @level, string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix) => + { + vectors.av_log_format_line2 = FunctionResolver.GetFunctionDelegate("avutil", "av_log_format_line2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_log_format_line2(@ptr, @level, @fmt, @vl, @line, @line_size, @print_prefix); + }; + + vectors.av_log_get_flags = () => + { + vectors.av_log_get_flags = FunctionResolver.GetFunctionDelegate("avutil", "av_log_get_flags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_log_get_flags(); + }; + + vectors.av_log_get_level = () => + { + vectors.av_log_get_level = FunctionResolver.GetFunctionDelegate("avutil", "av_log_get_level", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_log_get_level(); + }; + + vectors.av_log_once = (void* @avcl, int @initial_level, int @subsequent_level, int* @state, string @fmt) => + { + vectors.av_log_once = FunctionResolver.GetFunctionDelegate("avutil", "av_log_once", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_log_once(@avcl, @initial_level, @subsequent_level, @state, @fmt); + }; + + vectors.av_log_set_callback = (av_log_set_callback_callback_func @callback) => + { + vectors.av_log_set_callback = FunctionResolver.GetFunctionDelegate("avutil", "av_log_set_callback", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_log_set_callback(@callback); + }; + + vectors.av_log_set_flags = (int @arg) => + { + vectors.av_log_set_flags = FunctionResolver.GetFunctionDelegate("avutil", "av_log_set_flags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_log_set_flags(@arg); + }; + + vectors.av_log_set_level = (int @level) => + { + vectors.av_log_set_level = FunctionResolver.GetFunctionDelegate("avutil", "av_log_set_level", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_log_set_level(@level); + }; + + vectors.av_log2 = (uint @v) => + { + vectors.av_log2 = FunctionResolver.GetFunctionDelegate("avutil", "av_log2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_log2(@v); + }; + + vectors.av_log2_16bit = (uint @v) => + { + vectors.av_log2_16bit = FunctionResolver.GetFunctionDelegate("avutil", "av_log2_16bit", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_log2_16bit(@v); + }; + + vectors.av_malloc = (ulong @size) => + { + vectors.av_malloc = FunctionResolver.GetFunctionDelegate("avutil", "av_malloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_malloc(@size); + }; + + vectors.av_malloc_array = (ulong @nmemb, ulong @size) => + { + vectors.av_malloc_array = FunctionResolver.GetFunctionDelegate("avutil", "av_malloc_array", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_malloc_array(@nmemb, @size); + }; + + vectors.av_mallocz = (ulong @size) => + { + vectors.av_mallocz = FunctionResolver.GetFunctionDelegate("avutil", "av_mallocz", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_mallocz(@size); + }; + + vectors.av_mallocz_array = (ulong @nmemb, ulong @size) => + { + vectors.av_mallocz_array = FunctionResolver.GetFunctionDelegate("avutil", "av_mallocz_array", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_mallocz_array(@nmemb, @size); + }; + + vectors.av_mastering_display_metadata_alloc = () => + { + vectors.av_mastering_display_metadata_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_mastering_display_metadata_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_mastering_display_metadata_alloc(); + }; + + vectors.av_mastering_display_metadata_create_side_data = (AVFrame* @frame) => + { + vectors.av_mastering_display_metadata_create_side_data = FunctionResolver.GetFunctionDelegate("avutil", "av_mastering_display_metadata_create_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_mastering_display_metadata_create_side_data(@frame); + }; + + vectors.av_match_ext = (string @filename, string @extensions) => + { + vectors.av_match_ext = FunctionResolver.GetFunctionDelegate("avformat", "av_match_ext", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_match_ext(@filename, @extensions); + }; + + vectors.av_max_alloc = (ulong @max) => + { + vectors.av_max_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_max_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_max_alloc(@max); + }; + + vectors.av_memcpy_backptr = (byte* @dst, int @back, int @cnt) => + { + vectors.av_memcpy_backptr = FunctionResolver.GetFunctionDelegate("avutil", "av_memcpy_backptr", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_memcpy_backptr(@dst, @back, @cnt); + }; + + vectors.av_memdup = (void* @p, ulong @size) => + { + vectors.av_memdup = FunctionResolver.GetFunctionDelegate("avutil", "av_memdup", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_memdup(@p, @size); + }; + + vectors.av_mul_q = (AVRational @b, AVRational @c) => + { + vectors.av_mul_q = FunctionResolver.GetFunctionDelegate("avutil", "av_mul_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_mul_q(@b, @c); + }; + + vectors.av_muxer_iterate = (void** @opaque) => + { + vectors.av_muxer_iterate = FunctionResolver.GetFunctionDelegate("avformat", "av_muxer_iterate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_muxer_iterate(@opaque); + }; + + vectors.av_nearer_q = (AVRational @q, AVRational @q1, AVRational @q2) => + { + vectors.av_nearer_q = FunctionResolver.GetFunctionDelegate("avutil", "av_nearer_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_nearer_q(@q, @q1, @q2); + }; + + vectors.av_new_packet = (AVPacket* @pkt, int @size) => + { + vectors.av_new_packet = FunctionResolver.GetFunctionDelegate("avcodec", "av_new_packet", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_new_packet(@pkt, @size); + }; + + vectors.av_new_program = (AVFormatContext* @s, int @id) => + { + vectors.av_new_program = FunctionResolver.GetFunctionDelegate("avformat", "av_new_program", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_new_program(@s, @id); + }; + + vectors.av_opt_child_class_iterate = (AVClass* @parent, void** @iter) => + { + vectors.av_opt_child_class_iterate = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_child_class_iterate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_child_class_iterate(@parent, @iter); + }; + + vectors.av_opt_child_next = (void* @obj, void* @prev) => + { + vectors.av_opt_child_next = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_child_next", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_child_next(@obj, @prev); + }; + + vectors.av_opt_copy = (void* @dest, void* @src) => + { + vectors.av_opt_copy = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_copy", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_copy(@dest, @src); + }; + + vectors.av_opt_eval_double = (void* @obj, AVOption* @o, string @val, double* @double_out) => + { + vectors.av_opt_eval_double = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_eval_double", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_eval_double(@obj, @o, @val, @double_out); + }; + + vectors.av_opt_eval_flags = (void* @obj, AVOption* @o, string @val, int* @flags_out) => + { + vectors.av_opt_eval_flags = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_eval_flags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_eval_flags(@obj, @o, @val, @flags_out); + }; + + vectors.av_opt_eval_float = (void* @obj, AVOption* @o, string @val, float* @float_out) => + { + vectors.av_opt_eval_float = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_eval_float", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_eval_float(@obj, @o, @val, @float_out); + }; + + vectors.av_opt_eval_int = (void* @obj, AVOption* @o, string @val, int* @int_out) => + { + vectors.av_opt_eval_int = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_eval_int", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_eval_int(@obj, @o, @val, @int_out); + }; + + vectors.av_opt_eval_int64 = (void* @obj, AVOption* @o, string @val, long* @int64_out) => + { + vectors.av_opt_eval_int64 = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_eval_int64", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_eval_int64(@obj, @o, @val, @int64_out); + }; + + vectors.av_opt_eval_q = (void* @obj, AVOption* @o, string @val, AVRational* @q_out) => + { + vectors.av_opt_eval_q = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_eval_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_eval_q(@obj, @o, @val, @q_out); + }; + + vectors.av_opt_find = (void* @obj, string @name, string @unit, int @opt_flags, int @search_flags) => + { + vectors.av_opt_find = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_find", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_find(@obj, @name, @unit, @opt_flags, @search_flags); + }; + + vectors.av_opt_find2 = (void* @obj, string @name, string @unit, int @opt_flags, int @search_flags, void** @target_obj) => + { + vectors.av_opt_find2 = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_find2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_find2(@obj, @name, @unit, @opt_flags, @search_flags, @target_obj); + }; + + vectors.av_opt_flag_is_set = (void* @obj, string @field_name, string @flag_name) => + { + vectors.av_opt_flag_is_set = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_flag_is_set", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_flag_is_set(@obj, @field_name, @flag_name); + }; + + vectors.av_opt_free = (void* @obj) => + { + vectors.av_opt_free = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_opt_free(@obj); + }; + + vectors.av_opt_freep_ranges = (AVOptionRanges** @ranges) => + { + vectors.av_opt_freep_ranges = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_freep_ranges", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_opt_freep_ranges(@ranges); + }; + + vectors.av_opt_get = (void* @obj, string @name, int @search_flags, byte** @out_val) => + { + vectors.av_opt_get = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get(@obj, @name, @search_flags, @out_val); + }; + + vectors.av_opt_get_channel_layout = (void* @obj, string @name, int @search_flags, long* @ch_layout) => + { + vectors.av_opt_get_channel_layout = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_channel_layout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_channel_layout(@obj, @name, @search_flags, @ch_layout); + }; + + vectors.av_opt_get_chlayout = (void* @obj, string @name, int @search_flags, AVChannelLayout* @layout) => + { + vectors.av_opt_get_chlayout = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_chlayout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_chlayout(@obj, @name, @search_flags, @layout); + }; + + vectors.av_opt_get_dict_val = (void* @obj, string @name, int @search_flags, AVDictionary** @out_val) => + { + vectors.av_opt_get_dict_val = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_dict_val", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_dict_val(@obj, @name, @search_flags, @out_val); + }; + + vectors.av_opt_get_double = (void* @obj, string @name, int @search_flags, double* @out_val) => + { + vectors.av_opt_get_double = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_double", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_double(@obj, @name, @search_flags, @out_val); + }; + + vectors.av_opt_get_image_size = (void* @obj, string @name, int @search_flags, int* @w_out, int* @h_out) => + { + vectors.av_opt_get_image_size = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_image_size", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_image_size(@obj, @name, @search_flags, @w_out, @h_out); + }; + + vectors.av_opt_get_int = (void* @obj, string @name, int @search_flags, long* @out_val) => + { + vectors.av_opt_get_int = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_int", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_int(@obj, @name, @search_flags, @out_val); + }; + + vectors.av_opt_get_key_value = (byte** @ropts, string @key_val_sep, string @pairs_sep, uint @flags, byte** @rkey, byte** @rval) => + { + vectors.av_opt_get_key_value = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_key_value", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_key_value(@ropts, @key_val_sep, @pairs_sep, @flags, @rkey, @rval); + }; + + vectors.av_opt_get_pixel_fmt = (void* @obj, string @name, int @search_flags, AVPixelFormat* @out_fmt) => + { + vectors.av_opt_get_pixel_fmt = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_pixel_fmt", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_pixel_fmt(@obj, @name, @search_flags, @out_fmt); + }; + + vectors.av_opt_get_q = (void* @obj, string @name, int @search_flags, AVRational* @out_val) => + { + vectors.av_opt_get_q = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_q(@obj, @name, @search_flags, @out_val); + }; + + vectors.av_opt_get_sample_fmt = (void* @obj, string @name, int @search_flags, AVSampleFormat* @out_fmt) => + { + vectors.av_opt_get_sample_fmt = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_sample_fmt", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_sample_fmt(@obj, @name, @search_flags, @out_fmt); + }; + + vectors.av_opt_get_video_rate = (void* @obj, string @name, int @search_flags, AVRational* @out_val) => + { + vectors.av_opt_get_video_rate = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_get_video_rate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_get_video_rate(@obj, @name, @search_flags, @out_val); + }; + + vectors.av_opt_is_set_to_default = (void* @obj, AVOption* @o) => + { + vectors.av_opt_is_set_to_default = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_is_set_to_default", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_is_set_to_default(@obj, @o); + }; + + vectors.av_opt_is_set_to_default_by_name = (void* @obj, string @name, int @search_flags) => + { + vectors.av_opt_is_set_to_default_by_name = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_is_set_to_default_by_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_is_set_to_default_by_name(@obj, @name, @search_flags); + }; + + vectors.av_opt_next = (void* @obj, AVOption* @prev) => + { + vectors.av_opt_next = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_next", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_next(@obj, @prev); + }; + + vectors.av_opt_ptr = (AVClass* @avclass, void* @obj, string @name) => + { + vectors.av_opt_ptr = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_ptr", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_ptr(@avclass, @obj, @name); + }; + + vectors.av_opt_query_ranges = (AVOptionRanges** @p0, void* @obj, string @key, int @flags) => + { + vectors.av_opt_query_ranges = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_query_ranges", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_query_ranges(@p0, @obj, @key, @flags); + }; + + vectors.av_opt_query_ranges_default = (AVOptionRanges** @p0, void* @obj, string @key, int @flags) => + { + vectors.av_opt_query_ranges_default = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_query_ranges_default", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_query_ranges_default(@p0, @obj, @key, @flags); + }; + + vectors.av_opt_serialize = (void* @obj, int @opt_flags, int @flags, byte** @buffer, byte @key_val_sep, byte @pairs_sep) => + { + vectors.av_opt_serialize = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_serialize", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_serialize(@obj, @opt_flags, @flags, @buffer, @key_val_sep, @pairs_sep); + }; + + vectors.av_opt_set = (void* @obj, string @name, string @val, int @search_flags) => + { + vectors.av_opt_set = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set(@obj, @name, @val, @search_flags); + }; + + vectors.av_opt_set_bin = (void* @obj, string @name, byte* @val, int @size, int @search_flags) => + { + vectors.av_opt_set_bin = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_bin", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_bin(@obj, @name, @val, @size, @search_flags); + }; + + vectors.av_opt_set_channel_layout = (void* @obj, string @name, long @ch_layout, int @search_flags) => + { + vectors.av_opt_set_channel_layout = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_channel_layout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_channel_layout(@obj, @name, @ch_layout, @search_flags); + }; + + vectors.av_opt_set_chlayout = (void* @obj, string @name, AVChannelLayout* @layout, int @search_flags) => + { + vectors.av_opt_set_chlayout = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_chlayout", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_chlayout(@obj, @name, @layout, @search_flags); + }; + + vectors.av_opt_set_defaults = (void* @s) => + { + vectors.av_opt_set_defaults = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_defaults", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_opt_set_defaults(@s); + }; + + vectors.av_opt_set_defaults2 = (void* @s, int @mask, int @flags) => + { + vectors.av_opt_set_defaults2 = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_defaults2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_opt_set_defaults2(@s, @mask, @flags); + }; + + vectors.av_opt_set_dict = (void* @obj, AVDictionary** @options) => + { + vectors.av_opt_set_dict = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_dict", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_dict(@obj, @options); + }; + + vectors.av_opt_set_dict_val = (void* @obj, string @name, AVDictionary* @val, int @search_flags) => + { + vectors.av_opt_set_dict_val = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_dict_val", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_dict_val(@obj, @name, @val, @search_flags); + }; + + vectors.av_opt_set_dict2 = (void* @obj, AVDictionary** @options, int @search_flags) => + { + vectors.av_opt_set_dict2 = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_dict2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_dict2(@obj, @options, @search_flags); + }; + + vectors.av_opt_set_double = (void* @obj, string @name, double @val, int @search_flags) => + { + vectors.av_opt_set_double = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_double", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_double(@obj, @name, @val, @search_flags); + }; + + vectors.av_opt_set_from_string = (void* @ctx, string @opts, byte** @shorthand, string @key_val_sep, string @pairs_sep) => + { + vectors.av_opt_set_from_string = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_from_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_from_string(@ctx, @opts, @shorthand, @key_val_sep, @pairs_sep); + }; + + vectors.av_opt_set_image_size = (void* @obj, string @name, int @w, int @h, int @search_flags) => + { + vectors.av_opt_set_image_size = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_image_size", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_image_size(@obj, @name, @w, @h, @search_flags); + }; + + vectors.av_opt_set_int = (void* @obj, string @name, long @val, int @search_flags) => + { + vectors.av_opt_set_int = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_int", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_int(@obj, @name, @val, @search_flags); + }; + + vectors.av_opt_set_pixel_fmt = (void* @obj, string @name, AVPixelFormat @fmt, int @search_flags) => + { + vectors.av_opt_set_pixel_fmt = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_pixel_fmt", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_pixel_fmt(@obj, @name, @fmt, @search_flags); + }; + + vectors.av_opt_set_q = (void* @obj, string @name, AVRational @val, int @search_flags) => + { + vectors.av_opt_set_q = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_q(@obj, @name, @val, @search_flags); + }; + + vectors.av_opt_set_sample_fmt = (void* @obj, string @name, AVSampleFormat @fmt, int @search_flags) => + { + vectors.av_opt_set_sample_fmt = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_sample_fmt", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_sample_fmt(@obj, @name, @fmt, @search_flags); + }; + + vectors.av_opt_set_video_rate = (void* @obj, string @name, AVRational @val, int @search_flags) => + { + vectors.av_opt_set_video_rate = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_set_video_rate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_set_video_rate(@obj, @name, @val, @search_flags); + }; + + vectors.av_opt_show2 = (void* @obj, void* @av_log_obj, int @req_flags, int @rej_flags) => + { + vectors.av_opt_show2 = FunctionResolver.GetFunctionDelegate("avutil", "av_opt_show2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_opt_show2(@obj, @av_log_obj, @req_flags, @rej_flags); + }; + + vectors.av_output_audio_device_next = (AVOutputFormat* @d) => + { + vectors.av_output_audio_device_next = FunctionResolver.GetFunctionDelegate("avdevice", "av_output_audio_device_next", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_output_audio_device_next(@d); + }; + + vectors.av_output_video_device_next = (AVOutputFormat* @d) => + { + vectors.av_output_video_device_next = FunctionResolver.GetFunctionDelegate("avdevice", "av_output_video_device_next", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_output_video_device_next(@d); + }; + + vectors.av_packet_add_side_data = (AVPacket* @pkt, AVPacketSideDataType @type, byte* @data, ulong @size) => + { + vectors.av_packet_add_side_data = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_add_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_add_side_data(@pkt, @type, @data, @size); + }; + + vectors.av_packet_alloc = () => + { + vectors.av_packet_alloc = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_alloc(); + }; + + vectors.av_packet_clone = (AVPacket* @src) => + { + vectors.av_packet_clone = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_clone", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_clone(@src); + }; + + vectors.av_packet_copy_props = (AVPacket* @dst, AVPacket* @src) => + { + vectors.av_packet_copy_props = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_copy_props", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_copy_props(@dst, @src); + }; + + vectors.av_packet_free = (AVPacket** @pkt) => + { + vectors.av_packet_free = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_packet_free(@pkt); + }; + + vectors.av_packet_free_side_data = (AVPacket* @pkt) => + { + vectors.av_packet_free_side_data = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_free_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_packet_free_side_data(@pkt); + }; + + vectors.av_packet_from_data = (AVPacket* @pkt, byte* @data, int @size) => + { + vectors.av_packet_from_data = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_from_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_from_data(@pkt, @data, @size); + }; + + vectors.av_packet_get_side_data = (AVPacket* @pkt, AVPacketSideDataType @type, ulong* @size) => + { + vectors.av_packet_get_side_data = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_get_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_get_side_data(@pkt, @type, @size); + }; + + vectors.av_packet_make_refcounted = (AVPacket* @pkt) => + { + vectors.av_packet_make_refcounted = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_make_refcounted", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_make_refcounted(@pkt); + }; + + vectors.av_packet_make_writable = (AVPacket* @pkt) => + { + vectors.av_packet_make_writable = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_make_writable", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_make_writable(@pkt); + }; + + vectors.av_packet_move_ref = (AVPacket* @dst, AVPacket* @src) => + { + vectors.av_packet_move_ref = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_move_ref", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_packet_move_ref(@dst, @src); + }; + + vectors.av_packet_new_side_data = (AVPacket* @pkt, AVPacketSideDataType @type, ulong @size) => + { + vectors.av_packet_new_side_data = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_new_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_new_side_data(@pkt, @type, @size); + }; + + vectors.av_packet_pack_dictionary = (AVDictionary* @dict, ulong* @size) => + { + vectors.av_packet_pack_dictionary = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_pack_dictionary", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_pack_dictionary(@dict, @size); + }; + + vectors.av_packet_ref = (AVPacket* @dst, AVPacket* @src) => + { + vectors.av_packet_ref = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_ref", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_ref(@dst, @src); + }; + + vectors.av_packet_rescale_ts = (AVPacket* @pkt, AVRational @tb_src, AVRational @tb_dst) => + { + vectors.av_packet_rescale_ts = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_rescale_ts", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_packet_rescale_ts(@pkt, @tb_src, @tb_dst); + }; + + vectors.av_packet_shrink_side_data = (AVPacket* @pkt, AVPacketSideDataType @type, ulong @size) => + { + vectors.av_packet_shrink_side_data = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_shrink_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_shrink_side_data(@pkt, @type, @size); + }; + + vectors.av_packet_side_data_name = (AVPacketSideDataType @type) => + { + vectors.av_packet_side_data_name = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_side_data_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_side_data_name(@type); + }; + + vectors.av_packet_unpack_dictionary = (byte* @data, ulong @size, AVDictionary** @dict) => + { + vectors.av_packet_unpack_dictionary = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_unpack_dictionary", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_packet_unpack_dictionary(@data, @size, @dict); + }; + + vectors.av_packet_unref = (AVPacket* @pkt) => + { + vectors.av_packet_unref = FunctionResolver.GetFunctionDelegate("avcodec", "av_packet_unref", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_packet_unref(@pkt); + }; + + vectors.av_parse_cpu_caps = (uint* @flags, string @s) => + { + vectors.av_parse_cpu_caps = FunctionResolver.GetFunctionDelegate("avutil", "av_parse_cpu_caps", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_parse_cpu_caps(@flags, @s); + }; + + vectors.av_parser_close = (AVCodecParserContext* @s) => + { + vectors.av_parser_close = FunctionResolver.GetFunctionDelegate("avcodec", "av_parser_close", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_parser_close(@s); + }; + + vectors.av_parser_init = (int @codec_id) => + { + vectors.av_parser_init = FunctionResolver.GetFunctionDelegate("avcodec", "av_parser_init", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_parser_init(@codec_id); + }; + + vectors.av_parser_iterate = (void** @opaque) => + { + vectors.av_parser_iterate = FunctionResolver.GetFunctionDelegate("avcodec", "av_parser_iterate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_parser_iterate(@opaque); + }; + + vectors.av_parser_parse2 = (AVCodecParserContext* @s, AVCodecContext* @avctx, byte** @poutbuf, int* @poutbuf_size, byte* @buf, int @buf_size, long @pts, long @dts, long @pos) => + { + vectors.av_parser_parse2 = FunctionResolver.GetFunctionDelegate("avcodec", "av_parser_parse2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_parser_parse2(@s, @avctx, @poutbuf, @poutbuf_size, @buf, @buf_size, @pts, @dts, @pos); + }; + + vectors.av_pix_fmt_count_planes = (AVPixelFormat @pix_fmt) => + { + vectors.av_pix_fmt_count_planes = FunctionResolver.GetFunctionDelegate("avutil", "av_pix_fmt_count_planes", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_pix_fmt_count_planes(@pix_fmt); + }; + + vectors.av_pix_fmt_desc_get = (AVPixelFormat @pix_fmt) => + { + vectors.av_pix_fmt_desc_get = FunctionResolver.GetFunctionDelegate("avutil", "av_pix_fmt_desc_get", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_pix_fmt_desc_get(@pix_fmt); + }; + + vectors.av_pix_fmt_desc_get_id = (AVPixFmtDescriptor* @desc) => + { + vectors.av_pix_fmt_desc_get_id = FunctionResolver.GetFunctionDelegate("avutil", "av_pix_fmt_desc_get_id", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_pix_fmt_desc_get_id(@desc); + }; + + vectors.av_pix_fmt_desc_next = (AVPixFmtDescriptor* @prev) => + { + vectors.av_pix_fmt_desc_next = FunctionResolver.GetFunctionDelegate("avutil", "av_pix_fmt_desc_next", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_pix_fmt_desc_next(@prev); + }; + + vectors.av_pix_fmt_get_chroma_sub_sample = (AVPixelFormat @pix_fmt, int* @h_shift, int* @v_shift) => + { + vectors.av_pix_fmt_get_chroma_sub_sample = FunctionResolver.GetFunctionDelegate("avutil", "av_pix_fmt_get_chroma_sub_sample", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_pix_fmt_get_chroma_sub_sample(@pix_fmt, @h_shift, @v_shift); + }; + + vectors.av_pix_fmt_swap_endianness = (AVPixelFormat @pix_fmt) => + { + vectors.av_pix_fmt_swap_endianness = FunctionResolver.GetFunctionDelegate("avutil", "av_pix_fmt_swap_endianness", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_pix_fmt_swap_endianness(@pix_fmt); + }; + + vectors.av_pkt_dump_log2 = (void* @avcl, int @level, AVPacket* @pkt, int @dump_payload, AVStream* @st) => + { + vectors.av_pkt_dump_log2 = FunctionResolver.GetFunctionDelegate("avformat", "av_pkt_dump_log2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_pkt_dump_log2(@avcl, @level, @pkt, @dump_payload, @st); + }; + + vectors.av_pkt_dump2 = (_iobuf* @f, AVPacket* @pkt, int @dump_payload, AVStream* @st) => + { + vectors.av_pkt_dump2 = FunctionResolver.GetFunctionDelegate("avformat", "av_pkt_dump2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_pkt_dump2(@f, @pkt, @dump_payload, @st); + }; + + vectors.av_probe_input_buffer = (AVIOContext* @pb, AVInputFormat** @fmt, string @url, void* @logctx, uint @offset, uint @max_probe_size) => + { + vectors.av_probe_input_buffer = FunctionResolver.GetFunctionDelegate("avformat", "av_probe_input_buffer", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_probe_input_buffer(@pb, @fmt, @url, @logctx, @offset, @max_probe_size); + }; + + vectors.av_probe_input_buffer2 = (AVIOContext* @pb, AVInputFormat** @fmt, string @url, void* @logctx, uint @offset, uint @max_probe_size) => + { + vectors.av_probe_input_buffer2 = FunctionResolver.GetFunctionDelegate("avformat", "av_probe_input_buffer2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_probe_input_buffer2(@pb, @fmt, @url, @logctx, @offset, @max_probe_size); + }; + + vectors.av_probe_input_format = (AVProbeData* @pd, int @is_opened) => + { + vectors.av_probe_input_format = FunctionResolver.GetFunctionDelegate("avformat", "av_probe_input_format", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_probe_input_format(@pd, @is_opened); + }; + + vectors.av_probe_input_format2 = (AVProbeData* @pd, int @is_opened, int* @score_max) => + { + vectors.av_probe_input_format2 = FunctionResolver.GetFunctionDelegate("avformat", "av_probe_input_format2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_probe_input_format2(@pd, @is_opened, @score_max); + }; + + vectors.av_probe_input_format3 = (AVProbeData* @pd, int @is_opened, int* @score_ret) => + { + vectors.av_probe_input_format3 = FunctionResolver.GetFunctionDelegate("avformat", "av_probe_input_format3", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_probe_input_format3(@pd, @is_opened, @score_ret); + }; + + vectors.av_program_add_stream_index = (AVFormatContext* @ac, int @progid, uint @idx) => + { + vectors.av_program_add_stream_index = FunctionResolver.GetFunctionDelegate("avformat", "av_program_add_stream_index", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_program_add_stream_index(@ac, @progid, @idx); + }; + + vectors.av_q2intfloat = (AVRational @q) => + { + vectors.av_q2intfloat = FunctionResolver.GetFunctionDelegate("avutil", "av_q2intfloat", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_q2intfloat(@q); + }; + + vectors.av_read_frame = (AVFormatContext* @s, AVPacket* @pkt) => + { + vectors.av_read_frame = FunctionResolver.GetFunctionDelegate("avformat", "av_read_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_read_frame(@s, @pkt); + }; + + vectors.av_read_image_line = (ushort* @dst, in byte_ptrArray4 @data, in int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component) => + { + vectors.av_read_image_line = FunctionResolver.GetFunctionDelegate("avutil", "av_read_image_line", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_read_image_line(@dst, @data, @linesize, @desc, @x, @y, @c, @w, @read_pal_component); + }; + + vectors.av_read_image_line2 = (void* @dst, in byte_ptrArray4 @data, in int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component, int @dst_element_size) => + { + vectors.av_read_image_line2 = FunctionResolver.GetFunctionDelegate("avutil", "av_read_image_line2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_read_image_line2(@dst, @data, @linesize, @desc, @x, @y, @c, @w, @read_pal_component, @dst_element_size); + }; + + vectors.av_read_pause = (AVFormatContext* @s) => + { + vectors.av_read_pause = FunctionResolver.GetFunctionDelegate("avformat", "av_read_pause", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_read_pause(@s); + }; + + vectors.av_read_play = (AVFormatContext* @s) => + { + vectors.av_read_play = FunctionResolver.GetFunctionDelegate("avformat", "av_read_play", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_read_play(@s); + }; + + vectors.av_realloc = (void* @ptr, ulong @size) => + { + vectors.av_realloc = FunctionResolver.GetFunctionDelegate("avutil", "av_realloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_realloc(@ptr, @size); + }; + + vectors.av_realloc_array = (void* @ptr, ulong @nmemb, ulong @size) => + { + vectors.av_realloc_array = FunctionResolver.GetFunctionDelegate("avutil", "av_realloc_array", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_realloc_array(@ptr, @nmemb, @size); + }; + + vectors.av_realloc_f = (void* @ptr, ulong @nelem, ulong @elsize) => + { + vectors.av_realloc_f = FunctionResolver.GetFunctionDelegate("avutil", "av_realloc_f", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_realloc_f(@ptr, @nelem, @elsize); + }; + + vectors.av_reallocp = (void* @ptr, ulong @size) => + { + vectors.av_reallocp = FunctionResolver.GetFunctionDelegate("avutil", "av_reallocp", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_reallocp(@ptr, @size); + }; + + vectors.av_reallocp_array = (void* @ptr, ulong @nmemb, ulong @size) => + { + vectors.av_reallocp_array = FunctionResolver.GetFunctionDelegate("avutil", "av_reallocp_array", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_reallocp_array(@ptr, @nmemb, @size); + }; + + vectors.av_reduce = (int* @dst_num, int* @dst_den, long @num, long @den, long @max) => + { + vectors.av_reduce = FunctionResolver.GetFunctionDelegate("avutil", "av_reduce", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_reduce(@dst_num, @dst_den, @num, @den, @max); + }; + + vectors.av_rescale = (long @a, long @b, long @c) => + { + vectors.av_rescale = FunctionResolver.GetFunctionDelegate("avutil", "av_rescale", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_rescale(@a, @b, @c); + }; + + vectors.av_rescale_delta = (AVRational @in_tb, long @in_ts, AVRational @fs_tb, int @duration, long* @last, AVRational @out_tb) => + { + vectors.av_rescale_delta = FunctionResolver.GetFunctionDelegate("avutil", "av_rescale_delta", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_rescale_delta(@in_tb, @in_ts, @fs_tb, @duration, @last, @out_tb); + }; + + vectors.av_rescale_q = (long @a, AVRational @bq, AVRational @cq) => + { + vectors.av_rescale_q = FunctionResolver.GetFunctionDelegate("avutil", "av_rescale_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_rescale_q(@a, @bq, @cq); + }; + + vectors.av_rescale_q_rnd = (long @a, AVRational @bq, AVRational @cq, AVRounding @rnd) => + { + vectors.av_rescale_q_rnd = FunctionResolver.GetFunctionDelegate("avutil", "av_rescale_q_rnd", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_rescale_q_rnd(@a, @bq, @cq, @rnd); + }; + + vectors.av_rescale_rnd = (long @a, long @b, long @c, AVRounding @rnd) => + { + vectors.av_rescale_rnd = FunctionResolver.GetFunctionDelegate("avutil", "av_rescale_rnd", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_rescale_rnd(@a, @b, @c, @rnd); + }; + + vectors.av_sample_fmt_is_planar = (AVSampleFormat @sample_fmt) => + { + vectors.av_sample_fmt_is_planar = FunctionResolver.GetFunctionDelegate("avutil", "av_sample_fmt_is_planar", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_sample_fmt_is_planar(@sample_fmt); + }; + + vectors.av_samples_alloc = (byte** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) => + { + vectors.av_samples_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_samples_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_samples_alloc(@audio_data, @linesize, @nb_channels, @nb_samples, @sample_fmt, @align); + }; + + vectors.av_samples_alloc_array_and_samples = (byte*** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) => + { + vectors.av_samples_alloc_array_and_samples = FunctionResolver.GetFunctionDelegate("avutil", "av_samples_alloc_array_and_samples", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_samples_alloc_array_and_samples(@audio_data, @linesize, @nb_channels, @nb_samples, @sample_fmt, @align); + }; + + vectors.av_samples_copy = (byte** @dst, byte** @src, int @dst_offset, int @src_offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt) => + { + vectors.av_samples_copy = FunctionResolver.GetFunctionDelegate("avutil", "av_samples_copy", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_samples_copy(@dst, @src, @dst_offset, @src_offset, @nb_samples, @nb_channels, @sample_fmt); + }; + + vectors.av_samples_fill_arrays = (byte** @audio_data, int* @linesize, byte* @buf, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) => + { + vectors.av_samples_fill_arrays = FunctionResolver.GetFunctionDelegate("avutil", "av_samples_fill_arrays", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_samples_fill_arrays(@audio_data, @linesize, @buf, @nb_channels, @nb_samples, @sample_fmt, @align); + }; + + vectors.av_samples_get_buffer_size = (int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) => + { + vectors.av_samples_get_buffer_size = FunctionResolver.GetFunctionDelegate("avutil", "av_samples_get_buffer_size", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_samples_get_buffer_size(@linesize, @nb_channels, @nb_samples, @sample_fmt, @align); + }; + + vectors.av_samples_set_silence = (byte** @audio_data, int @offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt) => + { + vectors.av_samples_set_silence = FunctionResolver.GetFunctionDelegate("avutil", "av_samples_set_silence", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_samples_set_silence(@audio_data, @offset, @nb_samples, @nb_channels, @sample_fmt); + }; + + vectors.av_sdp_create = (AVFormatContext** @ac, int @n_files, byte* @buf, int @size) => + { + vectors.av_sdp_create = FunctionResolver.GetFunctionDelegate("avformat", "av_sdp_create", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_sdp_create(@ac, @n_files, @buf, @size); + }; + + vectors.av_seek_frame = (AVFormatContext* @s, int @stream_index, long @timestamp, int @flags) => + { + vectors.av_seek_frame = FunctionResolver.GetFunctionDelegate("avformat", "av_seek_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_seek_frame(@s, @stream_index, @timestamp, @flags); + }; + + vectors.av_set_options_string = (void* @ctx, string @opts, string @key_val_sep, string @pairs_sep) => + { + vectors.av_set_options_string = FunctionResolver.GetFunctionDelegate("avutil", "av_set_options_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_set_options_string(@ctx, @opts, @key_val_sep, @pairs_sep); + }; + + vectors.av_shrink_packet = (AVPacket* @pkt, int @size) => + { + vectors.av_shrink_packet = FunctionResolver.GetFunctionDelegate("avcodec", "av_shrink_packet", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_shrink_packet(@pkt, @size); + }; + + vectors.av_size_mult = (ulong @a, ulong @b, ulong* @r) => + { + vectors.av_size_mult = FunctionResolver.GetFunctionDelegate("avutil", "av_size_mult", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_size_mult(@a, @b, @r); + }; + + vectors.av_strdup = (string @s) => + { + vectors.av_strdup = FunctionResolver.GetFunctionDelegate("avutil", "av_strdup", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_strdup(@s); + }; + + vectors.av_stream_add_side_data = (AVStream* @st, AVPacketSideDataType @type, byte* @data, ulong @size) => + { + vectors.av_stream_add_side_data = FunctionResolver.GetFunctionDelegate("avformat", "av_stream_add_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_stream_add_side_data(@st, @type, @data, @size); + }; + + vectors.av_stream_get_class = () => + { + vectors.av_stream_get_class = FunctionResolver.GetFunctionDelegate("avformat", "av_stream_get_class", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_stream_get_class(); + }; + + vectors.av_stream_get_codec_timebase = (AVStream* @st) => + { + vectors.av_stream_get_codec_timebase = FunctionResolver.GetFunctionDelegate("avformat", "av_stream_get_codec_timebase", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_stream_get_codec_timebase(@st); + }; + + vectors.av_stream_get_end_pts = (AVStream* @st) => + { + vectors.av_stream_get_end_pts = FunctionResolver.GetFunctionDelegate("avformat", "av_stream_get_end_pts", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_stream_get_end_pts(@st); + }; + + vectors.av_stream_get_parser = (AVStream* @s) => + { + vectors.av_stream_get_parser = FunctionResolver.GetFunctionDelegate("avformat", "av_stream_get_parser", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_stream_get_parser(@s); + }; + + vectors.av_stream_get_side_data = (AVStream* @stream, AVPacketSideDataType @type, ulong* @size) => + { + vectors.av_stream_get_side_data = FunctionResolver.GetFunctionDelegate("avformat", "av_stream_get_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_stream_get_side_data(@stream, @type, @size); + }; + + vectors.av_stream_new_side_data = (AVStream* @stream, AVPacketSideDataType @type, ulong @size) => + { + vectors.av_stream_new_side_data = FunctionResolver.GetFunctionDelegate("avformat", "av_stream_new_side_data", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_stream_new_side_data(@stream, @type, @size); + }; + + vectors.av_strerror = (int @errnum, byte* @errbuf, ulong @errbuf_size) => + { + vectors.av_strerror = FunctionResolver.GetFunctionDelegate("avutil", "av_strerror", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_strerror(@errnum, @errbuf, @errbuf_size); + }; + + vectors.av_strndup = (string @s, ulong @len) => + { + vectors.av_strndup = FunctionResolver.GetFunctionDelegate("avutil", "av_strndup", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_strndup(@s, @len); + }; + + vectors.av_sub_q = (AVRational @b, AVRational @c) => + { + vectors.av_sub_q = FunctionResolver.GetFunctionDelegate("avutil", "av_sub_q", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_sub_q(@b, @c); + }; + + vectors.av_tempfile = (string @prefix, byte** @filename, int @log_offset, void* @log_ctx) => + { + vectors.av_tempfile = FunctionResolver.GetFunctionDelegate("avutil", "av_tempfile", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_tempfile(@prefix, @filename, @log_offset, @log_ctx); + }; + + vectors.av_timecode_adjust_ntsc_framenum2 = (int @framenum, int @fps) => + { + vectors.av_timecode_adjust_ntsc_framenum2 = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_adjust_ntsc_framenum2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_adjust_ntsc_framenum2(@framenum, @fps); + }; + + vectors.av_timecode_check_frame_rate = (AVRational @rate) => + { + vectors.av_timecode_check_frame_rate = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_check_frame_rate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_check_frame_rate(@rate); + }; + + vectors.av_timecode_get_smpte = (AVRational @rate, int @drop, int @hh, int @mm, int @ss, int @ff) => + { + vectors.av_timecode_get_smpte = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_get_smpte", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_get_smpte(@rate, @drop, @hh, @mm, @ss, @ff); + }; + + vectors.av_timecode_get_smpte_from_framenum = (AVTimecode* @tc, int @framenum) => + { + vectors.av_timecode_get_smpte_from_framenum = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_get_smpte_from_framenum", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_get_smpte_from_framenum(@tc, @framenum); + }; + + vectors.av_timecode_init = (AVTimecode* @tc, AVRational @rate, int @flags, int @frame_start, void* @log_ctx) => + { + vectors.av_timecode_init = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_init", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_init(@tc, @rate, @flags, @frame_start, @log_ctx); + }; + + vectors.av_timecode_init_from_components = (AVTimecode* @tc, AVRational @rate, int @flags, int @hh, int @mm, int @ss, int @ff, void* @log_ctx) => + { + vectors.av_timecode_init_from_components = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_init_from_components", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_init_from_components(@tc, @rate, @flags, @hh, @mm, @ss, @ff, @log_ctx); + }; + + vectors.av_timecode_init_from_string = (AVTimecode* @tc, AVRational @rate, string @str, void* @log_ctx) => + { + vectors.av_timecode_init_from_string = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_init_from_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_init_from_string(@tc, @rate, @str, @log_ctx); + }; + + vectors.av_timecode_make_mpeg_tc_string = (byte* @buf, uint @tc25bit) => + { + vectors.av_timecode_make_mpeg_tc_string = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_make_mpeg_tc_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_make_mpeg_tc_string(@buf, @tc25bit); + }; + + vectors.av_timecode_make_smpte_tc_string = (byte* @buf, uint @tcsmpte, int @prevent_df) => + { + vectors.av_timecode_make_smpte_tc_string = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_make_smpte_tc_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_make_smpte_tc_string(@buf, @tcsmpte, @prevent_df); + }; + + vectors.av_timecode_make_smpte_tc_string2 = (byte* @buf, AVRational @rate, uint @tcsmpte, int @prevent_df, int @skip_field) => + { + vectors.av_timecode_make_smpte_tc_string2 = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_make_smpte_tc_string2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_make_smpte_tc_string2(@buf, @rate, @tcsmpte, @prevent_df, @skip_field); + }; + + vectors.av_timecode_make_string = (AVTimecode* @tc, byte* @buf, int @framenum) => + { + vectors.av_timecode_make_string = FunctionResolver.GetFunctionDelegate("avutil", "av_timecode_make_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_timecode_make_string(@tc, @buf, @framenum); + }; + + vectors.av_tree_destroy = (AVTreeNode* @t) => + { + vectors.av_tree_destroy = FunctionResolver.GetFunctionDelegate("avutil", "av_tree_destroy", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_tree_destroy(@t); + }; + + vectors.av_tree_enumerate = (AVTreeNode* @t, void* @opaque, av_tree_enumerate_cmp_func @cmp, av_tree_enumerate_enu_func @enu) => + { + vectors.av_tree_enumerate = FunctionResolver.GetFunctionDelegate("avutil", "av_tree_enumerate", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_tree_enumerate(@t, @opaque, @cmp, @enu); + }; + + vectors.av_tree_find = (AVTreeNode* @root, void* @key, av_tree_find_cmp_func @cmp, ref void_ptrArray2 @next) => + { + vectors.av_tree_find = FunctionResolver.GetFunctionDelegate("avutil", "av_tree_find", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_tree_find(@root, @key, @cmp, ref @next); + }; + + vectors.av_tree_insert = (AVTreeNode** @rootp, void* @key, av_tree_insert_cmp_func @cmp, AVTreeNode** @next) => + { + vectors.av_tree_insert = FunctionResolver.GetFunctionDelegate("avutil", "av_tree_insert", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_tree_insert(@rootp, @key, @cmp, @next); + }; + + vectors.av_tree_node_alloc = () => + { + vectors.av_tree_node_alloc = FunctionResolver.GetFunctionDelegate("avutil", "av_tree_node_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_tree_node_alloc(); + }; + + vectors.av_url_split = (byte* @proto, int @proto_size, byte* @authorization, int @authorization_size, byte* @hostname, int @hostname_size, int* @port_ptr, byte* @path, int @path_size, string @url) => + { + vectors.av_url_split = FunctionResolver.GetFunctionDelegate("avformat", "av_url_split", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_url_split(@proto, @proto_size, @authorization, @authorization_size, @hostname, @hostname_size, @port_ptr, @path, @path_size, @url); + }; + + vectors.av_usleep = (uint @usec) => + { + vectors.av_usleep = FunctionResolver.GetFunctionDelegate("avutil", "av_usleep", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_usleep(@usec); + }; + + vectors.av_version_info = () => + { + vectors.av_version_info = FunctionResolver.GetFunctionDelegate("avutil", "av_version_info", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_version_info(); + }; + + vectors.av_vlog = (void* @avcl, int @level, string @fmt, byte* @vl) => + { + vectors.av_vlog = FunctionResolver.GetFunctionDelegate("avutil", "av_vlog", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_vlog(@avcl, @level, @fmt, @vl); + }; + + vectors.av_write_frame = (AVFormatContext* @s, AVPacket* @pkt) => + { + vectors.av_write_frame = FunctionResolver.GetFunctionDelegate("avformat", "av_write_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_write_frame(@s, @pkt); + }; + + vectors.av_write_image_line = (ushort* @src, ref byte_ptrArray4 @data, in int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w) => + { + vectors.av_write_image_line = FunctionResolver.GetFunctionDelegate("avutil", "av_write_image_line", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_write_image_line(@src, ref @data, @linesize, @desc, @x, @y, @c, @w); + }; + + vectors.av_write_image_line2 = (void* @src, ref byte_ptrArray4 @data, in int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @src_element_size) => + { + vectors.av_write_image_line2 = FunctionResolver.GetFunctionDelegate("avutil", "av_write_image_line2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.av_write_image_line2(@src, ref @data, @linesize, @desc, @x, @y, @c, @w, @src_element_size); + }; + + vectors.av_write_trailer = (AVFormatContext* @s) => + { + vectors.av_write_trailer = FunctionResolver.GetFunctionDelegate("avformat", "av_write_trailer", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_write_trailer(@s); + }; + + vectors.av_write_uncoded_frame = (AVFormatContext* @s, int @stream_index, AVFrame* @frame) => + { + vectors.av_write_uncoded_frame = FunctionResolver.GetFunctionDelegate("avformat", "av_write_uncoded_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_write_uncoded_frame(@s, @stream_index, @frame); + }; + + vectors.av_write_uncoded_frame_query = (AVFormatContext* @s, int @stream_index) => + { + vectors.av_write_uncoded_frame_query = FunctionResolver.GetFunctionDelegate("avformat", "av_write_uncoded_frame_query", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_write_uncoded_frame_query(@s, @stream_index); + }; + + vectors.av_xiphlacing = (byte* @s, uint @v) => + { + vectors.av_xiphlacing = FunctionResolver.GetFunctionDelegate("avcodec", "av_xiphlacing", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.av_xiphlacing(@s, @v); + }; + + vectors.avcodec_align_dimensions = (AVCodecContext* @s, int* @width, int* @height) => + { + vectors.avcodec_align_dimensions = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_align_dimensions", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avcodec_align_dimensions(@s, @width, @height); + }; + + vectors.avcodec_align_dimensions2 = (AVCodecContext* @s, int* @width, int* @height, ref int_array8 @linesize_align) => + { + vectors.avcodec_align_dimensions2 = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_align_dimensions2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avcodec_align_dimensions2(@s, @width, @height, ref @linesize_align); + }; + + vectors.avcodec_alloc_context3 = (AVCodec* @codec) => + { + vectors.avcodec_alloc_context3 = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_alloc_context3", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_alloc_context3(@codec); + }; + + vectors.avcodec_chroma_pos_to_enum = (int @xpos, int @ypos) => + { + vectors.avcodec_chroma_pos_to_enum = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_chroma_pos_to_enum", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_chroma_pos_to_enum(@xpos, @ypos); + }; + + vectors.avcodec_close = (AVCodecContext* @avctx) => + { + vectors.avcodec_close = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_close", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_close(@avctx); + }; + + vectors.avcodec_configuration = () => + { + vectors.avcodec_configuration = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_configuration", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_configuration(); + }; + + vectors.avcodec_decode_subtitle2 = (AVCodecContext* @avctx, AVSubtitle* @sub, int* @got_sub_ptr, AVPacket* @avpkt) => + { + vectors.avcodec_decode_subtitle2 = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_decode_subtitle2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_decode_subtitle2(@avctx, @sub, @got_sub_ptr, @avpkt); + }; + + vectors.avcodec_default_execute = (AVCodecContext* @c, avcodec_default_execute_func_func @func, void* @arg, int* @ret, int @count, int @size) => + { + vectors.avcodec_default_execute = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_default_execute", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_default_execute(@c, @func, @arg, @ret, @count, @size); + }; + + vectors.avcodec_default_execute2 = (AVCodecContext* @c, avcodec_default_execute2_func_func @func, void* @arg, int* @ret, int @count) => + { + vectors.avcodec_default_execute2 = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_default_execute2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_default_execute2(@c, @func, @arg, @ret, @count); + }; + + vectors.avcodec_default_get_buffer2 = (AVCodecContext* @s, AVFrame* @frame, int @flags) => + { + vectors.avcodec_default_get_buffer2 = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_default_get_buffer2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_default_get_buffer2(@s, @frame, @flags); + }; + + vectors.avcodec_default_get_encode_buffer = (AVCodecContext* @s, AVPacket* @pkt, int @flags) => + { + vectors.avcodec_default_get_encode_buffer = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_default_get_encode_buffer", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_default_get_encode_buffer(@s, @pkt, @flags); + }; + + vectors.avcodec_default_get_format = (AVCodecContext* @s, AVPixelFormat* @fmt) => + { + vectors.avcodec_default_get_format = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_default_get_format", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_default_get_format(@s, @fmt); + }; + + vectors.avcodec_descriptor_get = (AVCodecID @id) => + { + vectors.avcodec_descriptor_get = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_descriptor_get", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_descriptor_get(@id); + }; + + vectors.avcodec_descriptor_get_by_name = (string @name) => + { + vectors.avcodec_descriptor_get_by_name = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_descriptor_get_by_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_descriptor_get_by_name(@name); + }; + + vectors.avcodec_descriptor_next = (AVCodecDescriptor* @prev) => + { + vectors.avcodec_descriptor_next = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_descriptor_next", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_descriptor_next(@prev); + }; + + vectors.avcodec_encode_subtitle = (AVCodecContext* @avctx, byte* @buf, int @buf_size, AVSubtitle* @sub) => + { + vectors.avcodec_encode_subtitle = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_encode_subtitle", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_encode_subtitle(@avctx, @buf, @buf_size, @sub); + }; + + vectors.avcodec_enum_to_chroma_pos = (int* @xpos, int* @ypos, AVChromaLocation @pos) => + { + vectors.avcodec_enum_to_chroma_pos = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_enum_to_chroma_pos", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_enum_to_chroma_pos(@xpos, @ypos, @pos); + }; + + vectors.avcodec_fill_audio_frame = (AVFrame* @frame, int @nb_channels, AVSampleFormat @sample_fmt, byte* @buf, int @buf_size, int @align) => + { + vectors.avcodec_fill_audio_frame = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_fill_audio_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_fill_audio_frame(@frame, @nb_channels, @sample_fmt, @buf, @buf_size, @align); + }; + + vectors.avcodec_find_best_pix_fmt_of_list = (AVPixelFormat* @pix_fmt_list, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr) => + { + vectors.avcodec_find_best_pix_fmt_of_list = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_find_best_pix_fmt_of_list", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_find_best_pix_fmt_of_list(@pix_fmt_list, @src_pix_fmt, @has_alpha, @loss_ptr); + }; + + vectors.avcodec_find_decoder = (AVCodecID @id) => + { + vectors.avcodec_find_decoder = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_find_decoder", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_find_decoder(@id); + }; + + vectors.avcodec_find_decoder_by_name = (string @name) => + { + vectors.avcodec_find_decoder_by_name = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_find_decoder_by_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_find_decoder_by_name(@name); + }; + + vectors.avcodec_find_encoder = (AVCodecID @id) => + { + vectors.avcodec_find_encoder = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_find_encoder", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_find_encoder(@id); + }; + + vectors.avcodec_find_encoder_by_name = (string @name) => + { + vectors.avcodec_find_encoder_by_name = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_find_encoder_by_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_find_encoder_by_name(@name); + }; + + vectors.avcodec_flush_buffers = (AVCodecContext* @avctx) => + { + vectors.avcodec_flush_buffers = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_flush_buffers", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avcodec_flush_buffers(@avctx); + }; + + vectors.avcodec_free_context = (AVCodecContext** @avctx) => + { + vectors.avcodec_free_context = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_free_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avcodec_free_context(@avctx); + }; + + vectors.avcodec_get_class = () => + { + vectors.avcodec_get_class = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_get_class", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_get_class(); + }; + + vectors.avcodec_get_frame_class = () => + { + vectors.avcodec_get_frame_class = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_get_frame_class", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_get_frame_class(); + }; + + vectors.avcodec_get_hw_config = (AVCodec* @codec, int @index) => + { + vectors.avcodec_get_hw_config = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_get_hw_config", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_get_hw_config(@codec, @index); + }; + + vectors.avcodec_get_hw_frames_parameters = (AVCodecContext* @avctx, AVBufferRef* @device_ref, AVPixelFormat @hw_pix_fmt, AVBufferRef** @out_frames_ref) => + { + vectors.avcodec_get_hw_frames_parameters = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_get_hw_frames_parameters", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_get_hw_frames_parameters(@avctx, @device_ref, @hw_pix_fmt, @out_frames_ref); + }; + + vectors.avcodec_get_name = (AVCodecID @id) => + { + vectors.avcodec_get_name = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_get_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_get_name(@id); + }; + + vectors.avcodec_get_subtitle_rect_class = () => + { + vectors.avcodec_get_subtitle_rect_class = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_get_subtitle_rect_class", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_get_subtitle_rect_class(); + }; + + vectors.avcodec_get_type = (AVCodecID @codec_id) => + { + vectors.avcodec_get_type = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_get_type", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_get_type(@codec_id); + }; + + vectors.avcodec_is_open = (AVCodecContext* @s) => + { + vectors.avcodec_is_open = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_is_open", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_is_open(@s); + }; + + vectors.avcodec_license = () => + { + vectors.avcodec_license = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_license", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_license(); + }; + + vectors.avcodec_open2 = (AVCodecContext* @avctx, AVCodec* @codec, AVDictionary** @options) => + { + vectors.avcodec_open2 = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_open2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_open2(@avctx, @codec, @options); + }; + + vectors.avcodec_parameters_alloc = () => + { + vectors.avcodec_parameters_alloc = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_parameters_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_parameters_alloc(); + }; + + vectors.avcodec_parameters_copy = (AVCodecParameters* @dst, AVCodecParameters* @src) => + { + vectors.avcodec_parameters_copy = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_parameters_copy", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_parameters_copy(@dst, @src); + }; + + vectors.avcodec_parameters_free = (AVCodecParameters** @par) => + { + vectors.avcodec_parameters_free = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_parameters_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avcodec_parameters_free(@par); + }; + + vectors.avcodec_parameters_from_context = (AVCodecParameters* @par, AVCodecContext* @codec) => + { + vectors.avcodec_parameters_from_context = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_parameters_from_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_parameters_from_context(@par, @codec); + }; + + vectors.avcodec_parameters_to_context = (AVCodecContext* @codec, AVCodecParameters* @par) => + { + vectors.avcodec_parameters_to_context = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_parameters_to_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_parameters_to_context(@codec, @par); + }; + + vectors.avcodec_pix_fmt_to_codec_tag = (AVPixelFormat @pix_fmt) => + { + vectors.avcodec_pix_fmt_to_codec_tag = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_pix_fmt_to_codec_tag", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_pix_fmt_to_codec_tag(@pix_fmt); + }; + + vectors.avcodec_profile_name = (AVCodecID @codec_id, int @profile) => + { + vectors.avcodec_profile_name = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_profile_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_profile_name(@codec_id, @profile); + }; + + vectors.avcodec_receive_frame = (AVCodecContext* @avctx, AVFrame* @frame) => + { + vectors.avcodec_receive_frame = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_receive_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_receive_frame(@avctx, @frame); + }; + + vectors.avcodec_receive_packet = (AVCodecContext* @avctx, AVPacket* @avpkt) => + { + vectors.avcodec_receive_packet = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_receive_packet", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_receive_packet(@avctx, @avpkt); + }; + + vectors.avcodec_send_frame = (AVCodecContext* @avctx, AVFrame* @frame) => + { + vectors.avcodec_send_frame = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_send_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_send_frame(@avctx, @frame); + }; + + vectors.avcodec_send_packet = (AVCodecContext* @avctx, AVPacket* @avpkt) => + { + vectors.avcodec_send_packet = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_send_packet", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_send_packet(@avctx, @avpkt); + }; + + vectors.avcodec_string = (byte* @buf, int @buf_size, AVCodecContext* @enc, int @encode) => + { + vectors.avcodec_string = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_string", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avcodec_string(@buf, @buf_size, @enc, @encode); + }; + + vectors.avcodec_version = () => + { + vectors.avcodec_version = FunctionResolver.GetFunctionDelegate("avcodec", "avcodec_version", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avcodec_version(); + }; + + vectors.avdevice_app_to_dev_control_message = (AVFormatContext* @s, AVAppToDevMessageType @type, void* @data, ulong @data_size) => + { + vectors.avdevice_app_to_dev_control_message = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_app_to_dev_control_message", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avdevice_app_to_dev_control_message(@s, @type, @data, @data_size); + }; + + vectors.avdevice_capabilities_create = (AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s, AVDictionary** @device_options) => + { + vectors.avdevice_capabilities_create = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_capabilities_create", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avdevice_capabilities_create(@caps, @s, @device_options); + }; + + vectors.avdevice_capabilities_free = (AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s) => + { + vectors.avdevice_capabilities_free = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_capabilities_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avdevice_capabilities_free(@caps, @s); + }; + + vectors.avdevice_configuration = () => + { + vectors.avdevice_configuration = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_configuration", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avdevice_configuration(); + }; + + vectors.avdevice_dev_to_app_control_message = (AVFormatContext* @s, AVDevToAppMessageType @type, void* @data, ulong @data_size) => + { + vectors.avdevice_dev_to_app_control_message = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_dev_to_app_control_message", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avdevice_dev_to_app_control_message(@s, @type, @data, @data_size); + }; + + vectors.avdevice_free_list_devices = (AVDeviceInfoList** @device_list) => + { + vectors.avdevice_free_list_devices = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_free_list_devices", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avdevice_free_list_devices(@device_list); + }; + + vectors.avdevice_license = () => + { + vectors.avdevice_license = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_license", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avdevice_license(); + }; + + vectors.avdevice_list_devices = (AVFormatContext* @s, AVDeviceInfoList** @device_list) => + { + vectors.avdevice_list_devices = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_list_devices", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avdevice_list_devices(@s, @device_list); + }; + + vectors.avdevice_list_input_sources = (AVInputFormat* @device, string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list) => + { + vectors.avdevice_list_input_sources = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_list_input_sources", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avdevice_list_input_sources(@device, @device_name, @device_options, @device_list); + }; + + vectors.avdevice_list_output_sinks = (AVOutputFormat* @device, string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list) => + { + vectors.avdevice_list_output_sinks = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_list_output_sinks", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avdevice_list_output_sinks(@device, @device_name, @device_options, @device_list); + }; + + vectors.avdevice_register_all = () => + { + vectors.avdevice_register_all = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_register_all", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avdevice_register_all(); + }; + + vectors.avdevice_version = () => + { + vectors.avdevice_version = FunctionResolver.GetFunctionDelegate("avdevice", "avdevice_version", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avdevice_version(); + }; + + vectors.avfilter_config_links = (AVFilterContext* @filter) => + { + vectors.avfilter_config_links = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_config_links", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_config_links(@filter); + }; + + vectors.avfilter_configuration = () => + { + vectors.avfilter_configuration = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_configuration", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_configuration(); + }; + + vectors.avfilter_filter_pad_count = (AVFilter* @filter, int @is_output) => + { + vectors.avfilter_filter_pad_count = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_filter_pad_count", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_filter_pad_count(@filter, @is_output); + }; + + vectors.avfilter_free = (AVFilterContext* @filter) => + { + vectors.avfilter_free = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avfilter_free(@filter); + }; + + vectors.avfilter_get_by_name = (string @name) => + { + vectors.avfilter_get_by_name = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_get_by_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_get_by_name(@name); + }; + + vectors.avfilter_get_class = () => + { + vectors.avfilter_get_class = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_get_class", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_get_class(); + }; + + vectors.avfilter_graph_alloc = () => + { + vectors.avfilter_graph_alloc = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_alloc(); + }; + + vectors.avfilter_graph_alloc_filter = (AVFilterGraph* @graph, AVFilter* @filter, string @name) => + { + vectors.avfilter_graph_alloc_filter = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_alloc_filter", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_alloc_filter(@graph, @filter, @name); + }; + + vectors.avfilter_graph_config = (AVFilterGraph* @graphctx, void* @log_ctx) => + { + vectors.avfilter_graph_config = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_config", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_config(@graphctx, @log_ctx); + }; + + vectors.avfilter_graph_create_filter = (AVFilterContext** @filt_ctx, AVFilter* @filt, string @name, string @args, void* @opaque, AVFilterGraph* @graph_ctx) => + { + vectors.avfilter_graph_create_filter = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_create_filter", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_create_filter(@filt_ctx, @filt, @name, @args, @opaque, @graph_ctx); + }; + + vectors.avfilter_graph_dump = (AVFilterGraph* @graph, string @options) => + { + vectors.avfilter_graph_dump = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_dump", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_dump(@graph, @options); + }; + + vectors.avfilter_graph_free = (AVFilterGraph** @graph) => + { + vectors.avfilter_graph_free = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avfilter_graph_free(@graph); + }; + + vectors.avfilter_graph_get_filter = (AVFilterGraph* @graph, string @name) => + { + vectors.avfilter_graph_get_filter = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_get_filter", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_get_filter(@graph, @name); + }; + + vectors.avfilter_graph_parse = (AVFilterGraph* @graph, string @filters, AVFilterInOut* @inputs, AVFilterInOut* @outputs, void* @log_ctx) => + { + vectors.avfilter_graph_parse = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_parse", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_parse(@graph, @filters, @inputs, @outputs, @log_ctx); + }; + + vectors.avfilter_graph_parse_ptr = (AVFilterGraph* @graph, string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs, void* @log_ctx) => + { + vectors.avfilter_graph_parse_ptr = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_parse_ptr", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_parse_ptr(@graph, @filters, @inputs, @outputs, @log_ctx); + }; + + vectors.avfilter_graph_parse2 = (AVFilterGraph* @graph, string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs) => + { + vectors.avfilter_graph_parse2 = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_parse2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_parse2(@graph, @filters, @inputs, @outputs); + }; + + vectors.avfilter_graph_queue_command = (AVFilterGraph* @graph, string @target, string @cmd, string @arg, int @flags, double @ts) => + { + vectors.avfilter_graph_queue_command = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_queue_command", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_queue_command(@graph, @target, @cmd, @arg, @flags, @ts); + }; + + vectors.avfilter_graph_request_oldest = (AVFilterGraph* @graph) => + { + vectors.avfilter_graph_request_oldest = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_request_oldest", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_request_oldest(@graph); + }; + + vectors.avfilter_graph_send_command = (AVFilterGraph* @graph, string @target, string @cmd, string @arg, byte* @res, int @res_len, int @flags) => + { + vectors.avfilter_graph_send_command = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_send_command", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_graph_send_command(@graph, @target, @cmd, @arg, @res, @res_len, @flags); + }; + + vectors.avfilter_graph_set_auto_convert = (AVFilterGraph* @graph, uint @flags) => + { + vectors.avfilter_graph_set_auto_convert = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_graph_set_auto_convert", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avfilter_graph_set_auto_convert(@graph, @flags); + }; + + vectors.avfilter_init_dict = (AVFilterContext* @ctx, AVDictionary** @options) => + { + vectors.avfilter_init_dict = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_init_dict", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_init_dict(@ctx, @options); + }; + + vectors.avfilter_init_str = (AVFilterContext* @ctx, string @args) => + { + vectors.avfilter_init_str = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_init_str", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_init_str(@ctx, @args); + }; + + vectors.avfilter_inout_alloc = () => + { + vectors.avfilter_inout_alloc = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_inout_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_inout_alloc(); + }; + + vectors.avfilter_inout_free = (AVFilterInOut** @inout) => + { + vectors.avfilter_inout_free = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_inout_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avfilter_inout_free(@inout); + }; + + vectors.avfilter_insert_filter = (AVFilterLink* @link, AVFilterContext* @filt, uint @filt_srcpad_idx, uint @filt_dstpad_idx) => + { + vectors.avfilter_insert_filter = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_insert_filter", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_insert_filter(@link, @filt, @filt_srcpad_idx, @filt_dstpad_idx); + }; + + vectors.avfilter_license = () => + { + vectors.avfilter_license = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_license", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_license(); + }; + + vectors.avfilter_link = (AVFilterContext* @src, uint @srcpad, AVFilterContext* @dst, uint @dstpad) => + { + vectors.avfilter_link = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_link", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_link(@src, @srcpad, @dst, @dstpad); + }; + + vectors.avfilter_link_free = (AVFilterLink** @link) => + { + vectors.avfilter_link_free = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_link_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avfilter_link_free(@link); + }; + + vectors.avfilter_pad_count = (AVFilterPad* @pads) => + { + vectors.avfilter_pad_count = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_pad_count", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_pad_count(@pads); + }; + + vectors.avfilter_pad_get_name = (AVFilterPad* @pads, int @pad_idx) => + { + vectors.avfilter_pad_get_name = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_pad_get_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_pad_get_name(@pads, @pad_idx); + }; + + vectors.avfilter_pad_get_type = (AVFilterPad* @pads, int @pad_idx) => + { + vectors.avfilter_pad_get_type = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_pad_get_type", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_pad_get_type(@pads, @pad_idx); + }; + + vectors.avfilter_process_command = (AVFilterContext* @filter, string @cmd, string @arg, byte* @res, int @res_len, int @flags) => + { + vectors.avfilter_process_command = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_process_command", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_process_command(@filter, @cmd, @arg, @res, @res_len, @flags); + }; + + vectors.avfilter_version = () => + { + vectors.avfilter_version = FunctionResolver.GetFunctionDelegate("avfilter", "avfilter_version", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avfilter_version(); + }; + + vectors.avformat_alloc_context = () => + { + vectors.avformat_alloc_context = FunctionResolver.GetFunctionDelegate("avformat", "avformat_alloc_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_alloc_context(); + }; + + vectors.avformat_alloc_output_context2 = (AVFormatContext** @ctx, AVOutputFormat* @oformat, string @format_name, string @filename) => + { + vectors.avformat_alloc_output_context2 = FunctionResolver.GetFunctionDelegate("avformat", "avformat_alloc_output_context2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_alloc_output_context2(@ctx, @oformat, @format_name, @filename); + }; + + vectors.avformat_close_input = (AVFormatContext** @s) => + { + vectors.avformat_close_input = FunctionResolver.GetFunctionDelegate("avformat", "avformat_close_input", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avformat_close_input(@s); + }; + + vectors.avformat_configuration = () => + { + vectors.avformat_configuration = FunctionResolver.GetFunctionDelegate("avformat", "avformat_configuration", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_configuration(); + }; + + vectors.avformat_find_stream_info = (AVFormatContext* @ic, AVDictionary** @options) => + { + vectors.avformat_find_stream_info = FunctionResolver.GetFunctionDelegate("avformat", "avformat_find_stream_info", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_find_stream_info(@ic, @options); + }; + + vectors.avformat_flush = (AVFormatContext* @s) => + { + vectors.avformat_flush = FunctionResolver.GetFunctionDelegate("avformat", "avformat_flush", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_flush(@s); + }; + + vectors.avformat_free_context = (AVFormatContext* @s) => + { + vectors.avformat_free_context = FunctionResolver.GetFunctionDelegate("avformat", "avformat_free_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avformat_free_context(@s); + }; + + vectors.avformat_get_class = () => + { + vectors.avformat_get_class = FunctionResolver.GetFunctionDelegate("avformat", "avformat_get_class", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_get_class(); + }; + + vectors.avformat_get_mov_audio_tags = () => + { + vectors.avformat_get_mov_audio_tags = FunctionResolver.GetFunctionDelegate("avformat", "avformat_get_mov_audio_tags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_get_mov_audio_tags(); + }; + + vectors.avformat_get_mov_video_tags = () => + { + vectors.avformat_get_mov_video_tags = FunctionResolver.GetFunctionDelegate("avformat", "avformat_get_mov_video_tags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_get_mov_video_tags(); + }; + + vectors.avformat_get_riff_audio_tags = () => + { + vectors.avformat_get_riff_audio_tags = FunctionResolver.GetFunctionDelegate("avformat", "avformat_get_riff_audio_tags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_get_riff_audio_tags(); + }; + + vectors.avformat_get_riff_video_tags = () => + { + vectors.avformat_get_riff_video_tags = FunctionResolver.GetFunctionDelegate("avformat", "avformat_get_riff_video_tags", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_get_riff_video_tags(); + }; + + vectors.avformat_index_get_entries_count = (AVStream* @st) => + { + vectors.avformat_index_get_entries_count = FunctionResolver.GetFunctionDelegate("avformat", "avformat_index_get_entries_count", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_index_get_entries_count(@st); + }; + + vectors.avformat_index_get_entry = (AVStream* @st, int @idx) => + { + vectors.avformat_index_get_entry = FunctionResolver.GetFunctionDelegate("avformat", "avformat_index_get_entry", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_index_get_entry(@st, @idx); + }; + + vectors.avformat_index_get_entry_from_timestamp = (AVStream* @st, long @wanted_timestamp, int @flags) => + { + vectors.avformat_index_get_entry_from_timestamp = FunctionResolver.GetFunctionDelegate("avformat", "avformat_index_get_entry_from_timestamp", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_index_get_entry_from_timestamp(@st, @wanted_timestamp, @flags); + }; + + vectors.avformat_init_output = (AVFormatContext* @s, AVDictionary** @options) => + { + vectors.avformat_init_output = FunctionResolver.GetFunctionDelegate("avformat", "avformat_init_output", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_init_output(@s, @options); + }; + + vectors.avformat_license = () => + { + vectors.avformat_license = FunctionResolver.GetFunctionDelegate("avformat", "avformat_license", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_license(); + }; + + vectors.avformat_match_stream_specifier = (AVFormatContext* @s, AVStream* @st, string @spec) => + { + vectors.avformat_match_stream_specifier = FunctionResolver.GetFunctionDelegate("avformat", "avformat_match_stream_specifier", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_match_stream_specifier(@s, @st, @spec); + }; + + vectors.avformat_network_deinit = () => + { + vectors.avformat_network_deinit = FunctionResolver.GetFunctionDelegate("avformat", "avformat_network_deinit", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_network_deinit(); + }; + + vectors.avformat_network_init = () => + { + vectors.avformat_network_init = FunctionResolver.GetFunctionDelegate("avformat", "avformat_network_init", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_network_init(); + }; + + vectors.avformat_new_stream = (AVFormatContext* @s, AVCodec* @c) => + { + vectors.avformat_new_stream = FunctionResolver.GetFunctionDelegate("avformat", "avformat_new_stream", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_new_stream(@s, @c); + }; + + vectors.avformat_open_input = (AVFormatContext** @ps, string @url, AVInputFormat* @fmt, AVDictionary** @options) => + { + vectors.avformat_open_input = FunctionResolver.GetFunctionDelegate("avformat", "avformat_open_input", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_open_input(@ps, @url, @fmt, @options); + }; + + vectors.avformat_query_codec = (AVOutputFormat* @ofmt, AVCodecID @codec_id, int @std_compliance) => + { + vectors.avformat_query_codec = FunctionResolver.GetFunctionDelegate("avformat", "avformat_query_codec", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_query_codec(@ofmt, @codec_id, @std_compliance); + }; + + vectors.avformat_queue_attached_pictures = (AVFormatContext* @s) => + { + vectors.avformat_queue_attached_pictures = FunctionResolver.GetFunctionDelegate("avformat", "avformat_queue_attached_pictures", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_queue_attached_pictures(@s); + }; + + vectors.avformat_seek_file = (AVFormatContext* @s, int @stream_index, long @min_ts, long @ts, long @max_ts, int @flags) => + { + vectors.avformat_seek_file = FunctionResolver.GetFunctionDelegate("avformat", "avformat_seek_file", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_seek_file(@s, @stream_index, @min_ts, @ts, @max_ts, @flags); + }; + + vectors.avformat_transfer_internal_stream_timing_info = (AVOutputFormat* @ofmt, AVStream* @ost, AVStream* @ist, AVTimebaseSource @copy_tb) => + { + vectors.avformat_transfer_internal_stream_timing_info = FunctionResolver.GetFunctionDelegate("avformat", "avformat_transfer_internal_stream_timing_info", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_transfer_internal_stream_timing_info(@ofmt, @ost, @ist, @copy_tb); + }; + + vectors.avformat_version = () => + { + vectors.avformat_version = FunctionResolver.GetFunctionDelegate("avformat", "avformat_version", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_version(); + }; + + vectors.avformat_write_header = (AVFormatContext* @s, AVDictionary** @options) => + { + vectors.avformat_write_header = FunctionResolver.GetFunctionDelegate("avformat", "avformat_write_header", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avformat_write_header(@s, @options); + }; + + vectors.avio_accept = (AVIOContext* @s, AVIOContext** @c) => + { + vectors.avio_accept = FunctionResolver.GetFunctionDelegate("avformat", "avio_accept", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_accept(@s, @c); + }; + + vectors.avio_alloc_context = (byte* @buffer, int @buffer_size, int @write_flag, void* @opaque, avio_alloc_context_read_packet_func @read_packet, avio_alloc_context_write_packet_func @write_packet, avio_alloc_context_seek_func @seek) => + { + vectors.avio_alloc_context = FunctionResolver.GetFunctionDelegate("avformat", "avio_alloc_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_alloc_context(@buffer, @buffer_size, @write_flag, @opaque, @read_packet, @write_packet, @seek); + }; + + vectors.avio_check = (string @url, int @flags) => + { + vectors.avio_check = FunctionResolver.GetFunctionDelegate("avformat", "avio_check", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_check(@url, @flags); + }; + + vectors.avio_close = (AVIOContext* @s) => + { + vectors.avio_close = FunctionResolver.GetFunctionDelegate("avformat", "avio_close", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_close(@s); + }; + + vectors.avio_close_dir = (AVIODirContext** @s) => + { + vectors.avio_close_dir = FunctionResolver.GetFunctionDelegate("avformat", "avio_close_dir", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_close_dir(@s); + }; + + vectors.avio_close_dyn_buf = (AVIOContext* @s, byte** @pbuffer) => + { + vectors.avio_close_dyn_buf = FunctionResolver.GetFunctionDelegate("avformat", "avio_close_dyn_buf", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_close_dyn_buf(@s, @pbuffer); + }; + + vectors.avio_closep = (AVIOContext** @s) => + { + vectors.avio_closep = FunctionResolver.GetFunctionDelegate("avformat", "avio_closep", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_closep(@s); + }; + + vectors.avio_context_free = (AVIOContext** @s) => + { + vectors.avio_context_free = FunctionResolver.GetFunctionDelegate("avformat", "avio_context_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_context_free(@s); + }; + + vectors.avio_enum_protocols = (void** @opaque, int @output) => + { + vectors.avio_enum_protocols = FunctionResolver.GetFunctionDelegate("avformat", "avio_enum_protocols", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_enum_protocols(@opaque, @output); + }; + + vectors.avio_feof = (AVIOContext* @s) => + { + vectors.avio_feof = FunctionResolver.GetFunctionDelegate("avformat", "avio_feof", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_feof(@s); + }; + + vectors.avio_find_protocol_name = (string @url) => + { + vectors.avio_find_protocol_name = FunctionResolver.GetFunctionDelegate("avformat", "avio_find_protocol_name", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_find_protocol_name(@url); + }; + + vectors.avio_flush = (AVIOContext* @s) => + { + vectors.avio_flush = FunctionResolver.GetFunctionDelegate("avformat", "avio_flush", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_flush(@s); + }; + + vectors.avio_free_directory_entry = (AVIODirEntry** @entry) => + { + vectors.avio_free_directory_entry = FunctionResolver.GetFunctionDelegate("avformat", "avio_free_directory_entry", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_free_directory_entry(@entry); + }; + + vectors.avio_get_dyn_buf = (AVIOContext* @s, byte** @pbuffer) => + { + vectors.avio_get_dyn_buf = FunctionResolver.GetFunctionDelegate("avformat", "avio_get_dyn_buf", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_get_dyn_buf(@s, @pbuffer); + }; + + vectors.avio_get_str = (AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen) => + { + vectors.avio_get_str = FunctionResolver.GetFunctionDelegate("avformat", "avio_get_str", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_get_str(@pb, @maxlen, @buf, @buflen); + }; + + vectors.avio_get_str16be = (AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen) => + { + vectors.avio_get_str16be = FunctionResolver.GetFunctionDelegate("avformat", "avio_get_str16be", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_get_str16be(@pb, @maxlen, @buf, @buflen); + }; + + vectors.avio_get_str16le = (AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen) => + { + vectors.avio_get_str16le = FunctionResolver.GetFunctionDelegate("avformat", "avio_get_str16le", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_get_str16le(@pb, @maxlen, @buf, @buflen); + }; + + vectors.avio_handshake = (AVIOContext* @c) => + { + vectors.avio_handshake = FunctionResolver.GetFunctionDelegate("avformat", "avio_handshake", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_handshake(@c); + }; + + vectors.avio_open = (AVIOContext** @s, string @url, int @flags) => + { + vectors.avio_open = FunctionResolver.GetFunctionDelegate("avformat", "avio_open", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_open(@s, @url, @flags); + }; + + vectors.avio_open_dir = (AVIODirContext** @s, string @url, AVDictionary** @options) => + { + vectors.avio_open_dir = FunctionResolver.GetFunctionDelegate("avformat", "avio_open_dir", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_open_dir(@s, @url, @options); + }; + + vectors.avio_open_dyn_buf = (AVIOContext** @s) => + { + vectors.avio_open_dyn_buf = FunctionResolver.GetFunctionDelegate("avformat", "avio_open_dyn_buf", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_open_dyn_buf(@s); + }; + + vectors.avio_open2 = (AVIOContext** @s, string @url, int @flags, AVIOInterruptCB* @int_cb, AVDictionary** @options) => + { + vectors.avio_open2 = FunctionResolver.GetFunctionDelegate("avformat", "avio_open2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_open2(@s, @url, @flags, @int_cb, @options); + }; + + vectors.avio_pause = (AVIOContext* @h, int @pause) => + { + vectors.avio_pause = FunctionResolver.GetFunctionDelegate("avformat", "avio_pause", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_pause(@h, @pause); + }; + + vectors.avio_print_string_array = (AVIOContext* @s, byte*[] @strings) => + { + vectors.avio_print_string_array = FunctionResolver.GetFunctionDelegate("avformat", "avio_print_string_array", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_print_string_array(@s, @strings); + }; + + vectors.avio_printf = (AVIOContext* @s, string @fmt) => + { + vectors.avio_printf = FunctionResolver.GetFunctionDelegate("avformat", "avio_printf", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_printf(@s, @fmt); + }; + + vectors.avio_protocol_get_class = (string @name) => + { + vectors.avio_protocol_get_class = FunctionResolver.GetFunctionDelegate("avformat", "avio_protocol_get_class", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_protocol_get_class(@name); + }; + + vectors.avio_put_str = (AVIOContext* @s, string @str) => + { + vectors.avio_put_str = FunctionResolver.GetFunctionDelegate("avformat", "avio_put_str", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_put_str(@s, @str); + }; + + vectors.avio_put_str16be = (AVIOContext* @s, string @str) => + { + vectors.avio_put_str16be = FunctionResolver.GetFunctionDelegate("avformat", "avio_put_str16be", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_put_str16be(@s, @str); + }; + + vectors.avio_put_str16le = (AVIOContext* @s, string @str) => + { + vectors.avio_put_str16le = FunctionResolver.GetFunctionDelegate("avformat", "avio_put_str16le", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_put_str16le(@s, @str); + }; + + vectors.avio_r8 = (AVIOContext* @s) => + { + vectors.avio_r8 = FunctionResolver.GetFunctionDelegate("avformat", "avio_r8", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_r8(@s); + }; + + vectors.avio_rb16 = (AVIOContext* @s) => + { + vectors.avio_rb16 = FunctionResolver.GetFunctionDelegate("avformat", "avio_rb16", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_rb16(@s); + }; + + vectors.avio_rb24 = (AVIOContext* @s) => + { + vectors.avio_rb24 = FunctionResolver.GetFunctionDelegate("avformat", "avio_rb24", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_rb24(@s); + }; + + vectors.avio_rb32 = (AVIOContext* @s) => + { + vectors.avio_rb32 = FunctionResolver.GetFunctionDelegate("avformat", "avio_rb32", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_rb32(@s); + }; + + vectors.avio_rb64 = (AVIOContext* @s) => + { + vectors.avio_rb64 = FunctionResolver.GetFunctionDelegate("avformat", "avio_rb64", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_rb64(@s); + }; + + vectors.avio_read = (AVIOContext* @s, byte* @buf, int @size) => + { + vectors.avio_read = FunctionResolver.GetFunctionDelegate("avformat", "avio_read", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_read(@s, @buf, @size); + }; + + vectors.avio_read_dir = (AVIODirContext* @s, AVIODirEntry** @next) => + { + vectors.avio_read_dir = FunctionResolver.GetFunctionDelegate("avformat", "avio_read_dir", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_read_dir(@s, @next); + }; + + vectors.avio_read_partial = (AVIOContext* @s, byte* @buf, int @size) => + { + vectors.avio_read_partial = FunctionResolver.GetFunctionDelegate("avformat", "avio_read_partial", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_read_partial(@s, @buf, @size); + }; + + vectors.avio_read_to_bprint = (AVIOContext* @h, AVBPrint* @pb, ulong @max_size) => + { + vectors.avio_read_to_bprint = FunctionResolver.GetFunctionDelegate("avformat", "avio_read_to_bprint", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_read_to_bprint(@h, @pb, @max_size); + }; + + vectors.avio_rl16 = (AVIOContext* @s) => + { + vectors.avio_rl16 = FunctionResolver.GetFunctionDelegate("avformat", "avio_rl16", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_rl16(@s); + }; + + vectors.avio_rl24 = (AVIOContext* @s) => + { + vectors.avio_rl24 = FunctionResolver.GetFunctionDelegate("avformat", "avio_rl24", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_rl24(@s); + }; + + vectors.avio_rl32 = (AVIOContext* @s) => + { + vectors.avio_rl32 = FunctionResolver.GetFunctionDelegate("avformat", "avio_rl32", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_rl32(@s); + }; + + vectors.avio_rl64 = (AVIOContext* @s) => + { + vectors.avio_rl64 = FunctionResolver.GetFunctionDelegate("avformat", "avio_rl64", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_rl64(@s); + }; + + vectors.avio_seek = (AVIOContext* @s, long @offset, int @whence) => + { + vectors.avio_seek = FunctionResolver.GetFunctionDelegate("avformat", "avio_seek", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_seek(@s, @offset, @whence); + }; + + vectors.avio_seek_time = (AVIOContext* @h, int @stream_index, long @timestamp, int @flags) => + { + vectors.avio_seek_time = FunctionResolver.GetFunctionDelegate("avformat", "avio_seek_time", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_seek_time(@h, @stream_index, @timestamp, @flags); + }; + + vectors.avio_size = (AVIOContext* @s) => + { + vectors.avio_size = FunctionResolver.GetFunctionDelegate("avformat", "avio_size", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_size(@s); + }; + + vectors.avio_skip = (AVIOContext* @s, long @offset) => + { + vectors.avio_skip = FunctionResolver.GetFunctionDelegate("avformat", "avio_skip", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_skip(@s, @offset); + }; + + vectors.avio_vprintf = (AVIOContext* @s, string @fmt, byte* @ap) => + { + vectors.avio_vprintf = FunctionResolver.GetFunctionDelegate("avformat", "avio_vprintf", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avio_vprintf(@s, @fmt, @ap); + }; + + vectors.avio_w8 = (AVIOContext* @s, int @b) => + { + vectors.avio_w8 = FunctionResolver.GetFunctionDelegate("avformat", "avio_w8", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_w8(@s, @b); + }; + + vectors.avio_wb16 = (AVIOContext* @s, uint @val) => + { + vectors.avio_wb16 = FunctionResolver.GetFunctionDelegate("avformat", "avio_wb16", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_wb16(@s, @val); + }; + + vectors.avio_wb24 = (AVIOContext* @s, uint @val) => + { + vectors.avio_wb24 = FunctionResolver.GetFunctionDelegate("avformat", "avio_wb24", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_wb24(@s, @val); + }; + + vectors.avio_wb32 = (AVIOContext* @s, uint @val) => + { + vectors.avio_wb32 = FunctionResolver.GetFunctionDelegate("avformat", "avio_wb32", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_wb32(@s, @val); + }; + + vectors.avio_wb64 = (AVIOContext* @s, ulong @val) => + { + vectors.avio_wb64 = FunctionResolver.GetFunctionDelegate("avformat", "avio_wb64", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_wb64(@s, @val); + }; + + vectors.avio_wl16 = (AVIOContext* @s, uint @val) => + { + vectors.avio_wl16 = FunctionResolver.GetFunctionDelegate("avformat", "avio_wl16", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_wl16(@s, @val); + }; + + vectors.avio_wl24 = (AVIOContext* @s, uint @val) => + { + vectors.avio_wl24 = FunctionResolver.GetFunctionDelegate("avformat", "avio_wl24", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_wl24(@s, @val); + }; + + vectors.avio_wl32 = (AVIOContext* @s, uint @val) => + { + vectors.avio_wl32 = FunctionResolver.GetFunctionDelegate("avformat", "avio_wl32", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_wl32(@s, @val); + }; + + vectors.avio_wl64 = (AVIOContext* @s, ulong @val) => + { + vectors.avio_wl64 = FunctionResolver.GetFunctionDelegate("avformat", "avio_wl64", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_wl64(@s, @val); + }; + + vectors.avio_write = (AVIOContext* @s, byte* @buf, int @size) => + { + vectors.avio_write = FunctionResolver.GetFunctionDelegate("avformat", "avio_write", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_write(@s, @buf, @size); + }; + + vectors.avio_write_marker = (AVIOContext* @s, long @time, AVIODataMarkerType @type) => + { + vectors.avio_write_marker = FunctionResolver.GetFunctionDelegate("avformat", "avio_write_marker", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avio_write_marker(@s, @time, @type); + }; + + vectors.avsubtitle_free = (AVSubtitle* @sub) => + { + vectors.avsubtitle_free = FunctionResolver.GetFunctionDelegate("avcodec", "avsubtitle_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.avsubtitle_free(@sub); + }; + + vectors.avutil_configuration = () => + { + vectors.avutil_configuration = FunctionResolver.GetFunctionDelegate("avutil", "avutil_configuration", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avutil_configuration(); + }; + + vectors.avutil_license = () => + { + vectors.avutil_license = FunctionResolver.GetFunctionDelegate("avutil", "avutil_license", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avutil_license(); + }; + + vectors.avutil_version = () => + { + vectors.avutil_version = FunctionResolver.GetFunctionDelegate("avutil", "avutil_version", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.avutil_version(); + }; + + vectors.postproc_configuration = () => + { + vectors.postproc_configuration = FunctionResolver.GetFunctionDelegate("postproc", "postproc_configuration", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.postproc_configuration(); + }; + + vectors.postproc_license = () => + { + vectors.postproc_license = FunctionResolver.GetFunctionDelegate("postproc", "postproc_license", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.postproc_license(); + }; + + vectors.postproc_version = () => + { + vectors.postproc_version = FunctionResolver.GetFunctionDelegate("postproc", "postproc_version", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.postproc_version(); + }; + + vectors.pp_free_context = (void* @ppContext) => + { + vectors.pp_free_context = FunctionResolver.GetFunctionDelegate("postproc", "pp_free_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.pp_free_context(@ppContext); + }; + + vectors.pp_free_mode = (void* @mode) => + { + vectors.pp_free_mode = FunctionResolver.GetFunctionDelegate("postproc", "pp_free_mode", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.pp_free_mode(@mode); + }; + + vectors.pp_get_context = (int @width, int @height, int @flags) => + { + vectors.pp_get_context = FunctionResolver.GetFunctionDelegate("postproc", "pp_get_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.pp_get_context(@width, @height, @flags); + }; + + vectors.pp_get_mode_by_name_and_quality = (string @name, int @quality) => + { + vectors.pp_get_mode_by_name_and_quality = FunctionResolver.GetFunctionDelegate("postproc", "pp_get_mode_by_name_and_quality", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.pp_get_mode_by_name_and_quality(@name, @quality); + }; + + vectors.pp_postprocess = (in byte_ptrArray3 @src, in int_array3 @srcStride, ref byte_ptrArray3 @dst, in int_array3 @dstStride, int @horizontalSize, int @verticalSize, sbyte* @QP_store, int @QP_stride, void* @mode, void* @ppContext, int @pict_type) => + { + vectors.pp_postprocess = FunctionResolver.GetFunctionDelegate("postproc", "pp_postprocess", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.pp_postprocess(@src, @srcStride, ref @dst, @dstStride, @horizontalSize, @verticalSize, @QP_store, @QP_stride, @mode, @ppContext, @pict_type); + }; + + vectors.swr_alloc = () => + { + vectors.swr_alloc = FunctionResolver.GetFunctionDelegate("swresample", "swr_alloc", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_alloc(); + }; + + vectors.swr_alloc_set_opts = (SwrContext* @s, long @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, long @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx) => + { + vectors.swr_alloc_set_opts = FunctionResolver.GetFunctionDelegate("swresample", "swr_alloc_set_opts", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_alloc_set_opts(@s, @out_ch_layout, @out_sample_fmt, @out_sample_rate, @in_ch_layout, @in_sample_fmt, @in_sample_rate, @log_offset, @log_ctx); + }; + + vectors.swr_alloc_set_opts2 = (SwrContext** @ps, AVChannelLayout* @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, AVChannelLayout* @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx) => + { + vectors.swr_alloc_set_opts2 = FunctionResolver.GetFunctionDelegate("swresample", "swr_alloc_set_opts2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_alloc_set_opts2(@ps, @out_ch_layout, @out_sample_fmt, @out_sample_rate, @in_ch_layout, @in_sample_fmt, @in_sample_rate, @log_offset, @log_ctx); + }; + + vectors.swr_build_matrix = (ulong @in_layout, ulong @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @rematrix_maxval, double @rematrix_volume, double* @matrix, int @stride, AVMatrixEncoding @matrix_encoding, void* @log_ctx) => + { + vectors.swr_build_matrix = FunctionResolver.GetFunctionDelegate("swresample", "swr_build_matrix", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_build_matrix(@in_layout, @out_layout, @center_mix_level, @surround_mix_level, @lfe_mix_level, @rematrix_maxval, @rematrix_volume, @matrix, @stride, @matrix_encoding, @log_ctx); + }; + + vectors.swr_build_matrix2 = (AVChannelLayout* @in_layout, AVChannelLayout* @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @maxval, double @rematrix_volume, double* @matrix, long @stride, AVMatrixEncoding @matrix_encoding, void* @log_context) => + { + vectors.swr_build_matrix2 = FunctionResolver.GetFunctionDelegate("swresample", "swr_build_matrix2", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_build_matrix2(@in_layout, @out_layout, @center_mix_level, @surround_mix_level, @lfe_mix_level, @maxval, @rematrix_volume, @matrix, @stride, @matrix_encoding, @log_context); + }; + + vectors.swr_close = (SwrContext* @s) => + { + vectors.swr_close = FunctionResolver.GetFunctionDelegate("swresample", "swr_close", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.swr_close(@s); + }; + + vectors.swr_config_frame = (SwrContext* @swr, AVFrame* @out, AVFrame* @in) => + { + vectors.swr_config_frame = FunctionResolver.GetFunctionDelegate("swresample", "swr_config_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_config_frame(@swr, @out, @in); + }; + + vectors.swr_convert = (SwrContext* @s, byte** @out, int @out_count, byte** @in, int @in_count) => + { + vectors.swr_convert = FunctionResolver.GetFunctionDelegate("swresample", "swr_convert", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_convert(@s, @out, @out_count, @in, @in_count); + }; + + vectors.swr_convert_frame = (SwrContext* @swr, AVFrame* @output, AVFrame* @input) => + { + vectors.swr_convert_frame = FunctionResolver.GetFunctionDelegate("swresample", "swr_convert_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_convert_frame(@swr, @output, @input); + }; + + vectors.swr_drop_output = (SwrContext* @s, int @count) => + { + vectors.swr_drop_output = FunctionResolver.GetFunctionDelegate("swresample", "swr_drop_output", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_drop_output(@s, @count); + }; + + vectors.swr_free = (SwrContext** @s) => + { + vectors.swr_free = FunctionResolver.GetFunctionDelegate("swresample", "swr_free", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.swr_free(@s); + }; + + vectors.swr_get_class = () => + { + vectors.swr_get_class = FunctionResolver.GetFunctionDelegate("swresample", "swr_get_class", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_get_class(); + }; + + vectors.swr_get_delay = (SwrContext* @s, long @base) => + { + vectors.swr_get_delay = FunctionResolver.GetFunctionDelegate("swresample", "swr_get_delay", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_get_delay(@s, @base); + }; + + vectors.swr_get_out_samples = (SwrContext* @s, int @in_samples) => + { + vectors.swr_get_out_samples = FunctionResolver.GetFunctionDelegate("swresample", "swr_get_out_samples", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_get_out_samples(@s, @in_samples); + }; + + vectors.swr_init = (SwrContext* @s) => + { + vectors.swr_init = FunctionResolver.GetFunctionDelegate("swresample", "swr_init", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_init(@s); + }; + + vectors.swr_inject_silence = (SwrContext* @s, int @count) => + { + vectors.swr_inject_silence = FunctionResolver.GetFunctionDelegate("swresample", "swr_inject_silence", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_inject_silence(@s, @count); + }; + + vectors.swr_is_initialized = (SwrContext* @s) => + { + vectors.swr_is_initialized = FunctionResolver.GetFunctionDelegate("swresample", "swr_is_initialized", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_is_initialized(@s); + }; + + vectors.swr_next_pts = (SwrContext* @s, long @pts) => + { + vectors.swr_next_pts = FunctionResolver.GetFunctionDelegate("swresample", "swr_next_pts", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_next_pts(@s, @pts); + }; + + vectors.swr_set_channel_mapping = (SwrContext* @s, int* @channel_map) => + { + vectors.swr_set_channel_mapping = FunctionResolver.GetFunctionDelegate("swresample", "swr_set_channel_mapping", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_set_channel_mapping(@s, @channel_map); + }; + + vectors.swr_set_compensation = (SwrContext* @s, int @sample_delta, int @compensation_distance) => + { + vectors.swr_set_compensation = FunctionResolver.GetFunctionDelegate("swresample", "swr_set_compensation", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_set_compensation(@s, @sample_delta, @compensation_distance); + }; + + vectors.swr_set_matrix = (SwrContext* @s, double* @matrix, int @stride) => + { + vectors.swr_set_matrix = FunctionResolver.GetFunctionDelegate("swresample", "swr_set_matrix", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swr_set_matrix(@s, @matrix, @stride); + }; + + vectors.swresample_configuration = () => + { + vectors.swresample_configuration = FunctionResolver.GetFunctionDelegate("swresample", "swresample_configuration", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swresample_configuration(); + }; + + vectors.swresample_license = () => + { + vectors.swresample_license = FunctionResolver.GetFunctionDelegate("swresample", "swresample_license", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swresample_license(); + }; + + vectors.swresample_version = () => + { + vectors.swresample_version = FunctionResolver.GetFunctionDelegate("swresample", "swresample_version", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swresample_version(); + }; + + vectors.sws_alloc_context = () => + { + vectors.sws_alloc_context = FunctionResolver.GetFunctionDelegate("swscale", "sws_alloc_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_alloc_context(); + }; + + vectors.sws_allocVec = (int @length) => + { + vectors.sws_allocVec = FunctionResolver.GetFunctionDelegate("swscale", "sws_allocVec", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_allocVec(@length); + }; + + vectors.sws_convertPalette8ToPacked24 = (byte* @src, byte* @dst, int @num_pixels, byte* @palette) => + { + vectors.sws_convertPalette8ToPacked24 = FunctionResolver.GetFunctionDelegate("swscale", "sws_convertPalette8ToPacked24", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.sws_convertPalette8ToPacked24(@src, @dst, @num_pixels, @palette); + }; + + vectors.sws_convertPalette8ToPacked32 = (byte* @src, byte* @dst, int @num_pixels, byte* @palette) => + { + vectors.sws_convertPalette8ToPacked32 = FunctionResolver.GetFunctionDelegate("swscale", "sws_convertPalette8ToPacked32", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.sws_convertPalette8ToPacked32(@src, @dst, @num_pixels, @palette); + }; + + vectors.sws_frame_end = (SwsContext* @c) => + { + vectors.sws_frame_end = FunctionResolver.GetFunctionDelegate("swscale", "sws_frame_end", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.sws_frame_end(@c); + }; + + vectors.sws_frame_start = (SwsContext* @c, AVFrame* @dst, AVFrame* @src) => + { + vectors.sws_frame_start = FunctionResolver.GetFunctionDelegate("swscale", "sws_frame_start", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_frame_start(@c, @dst, @src); + }; + + vectors.sws_freeContext = (SwsContext* @swsContext) => + { + vectors.sws_freeContext = FunctionResolver.GetFunctionDelegate("swscale", "sws_freeContext", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.sws_freeContext(@swsContext); + }; + + vectors.sws_freeFilter = (SwsFilter* @filter) => + { + vectors.sws_freeFilter = FunctionResolver.GetFunctionDelegate("swscale", "sws_freeFilter", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.sws_freeFilter(@filter); + }; + + vectors.sws_freeVec = (SwsVector* @a) => + { + vectors.sws_freeVec = FunctionResolver.GetFunctionDelegate("swscale", "sws_freeVec", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.sws_freeVec(@a); + }; + + vectors.sws_get_class = () => + { + vectors.sws_get_class = FunctionResolver.GetFunctionDelegate("swscale", "sws_get_class", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_get_class(); + }; + + vectors.sws_getCachedContext = (SwsContext* @context, int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param) => + { + vectors.sws_getCachedContext = FunctionResolver.GetFunctionDelegate("swscale", "sws_getCachedContext", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_getCachedContext(@context, @srcW, @srcH, @srcFormat, @dstW, @dstH, @dstFormat, @flags, @srcFilter, @dstFilter, @param); + }; + + vectors.sws_getCoefficients = (int @colorspace) => + { + vectors.sws_getCoefficients = FunctionResolver.GetFunctionDelegate("swscale", "sws_getCoefficients", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_getCoefficients(@colorspace); + }; + + vectors.sws_getColorspaceDetails = (SwsContext* @c, int** @inv_table, int* @srcRange, int** @table, int* @dstRange, int* @brightness, int* @contrast, int* @saturation) => + { + vectors.sws_getColorspaceDetails = FunctionResolver.GetFunctionDelegate("swscale", "sws_getColorspaceDetails", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_getColorspaceDetails(@c, @inv_table, @srcRange, @table, @dstRange, @brightness, @contrast, @saturation); + }; + + vectors.sws_getContext = (int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param) => + { + vectors.sws_getContext = FunctionResolver.GetFunctionDelegate("swscale", "sws_getContext", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_getContext(@srcW, @srcH, @srcFormat, @dstW, @dstH, @dstFormat, @flags, @srcFilter, @dstFilter, @param); + }; + + vectors.sws_getDefaultFilter = (float @lumaGBlur, float @chromaGBlur, float @lumaSharpen, float @chromaSharpen, float @chromaHShift, float @chromaVShift, int @verbose) => + { + vectors.sws_getDefaultFilter = FunctionResolver.GetFunctionDelegate("swscale", "sws_getDefaultFilter", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_getDefaultFilter(@lumaGBlur, @chromaGBlur, @lumaSharpen, @chromaSharpen, @chromaHShift, @chromaVShift, @verbose); + }; + + vectors.sws_getGaussianVec = (double @variance, double @quality) => + { + vectors.sws_getGaussianVec = FunctionResolver.GetFunctionDelegate("swscale", "sws_getGaussianVec", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_getGaussianVec(@variance, @quality); + }; + + vectors.sws_init_context = (SwsContext* @sws_context, SwsFilter* @srcFilter, SwsFilter* @dstFilter) => + { + vectors.sws_init_context = FunctionResolver.GetFunctionDelegate("swscale", "sws_init_context", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_init_context(@sws_context, @srcFilter, @dstFilter); + }; + + vectors.sws_isSupportedEndiannessConversion = (AVPixelFormat @pix_fmt) => + { + vectors.sws_isSupportedEndiannessConversion = FunctionResolver.GetFunctionDelegate("swscale", "sws_isSupportedEndiannessConversion", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_isSupportedEndiannessConversion(@pix_fmt); + }; + + vectors.sws_isSupportedInput = (AVPixelFormat @pix_fmt) => + { + vectors.sws_isSupportedInput = FunctionResolver.GetFunctionDelegate("swscale", "sws_isSupportedInput", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_isSupportedInput(@pix_fmt); + }; + + vectors.sws_isSupportedOutput = (AVPixelFormat @pix_fmt) => + { + vectors.sws_isSupportedOutput = FunctionResolver.GetFunctionDelegate("swscale", "sws_isSupportedOutput", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_isSupportedOutput(@pix_fmt); + }; + + vectors.sws_normalizeVec = (SwsVector* @a, double @height) => + { + vectors.sws_normalizeVec = FunctionResolver.GetFunctionDelegate("swscale", "sws_normalizeVec", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.sws_normalizeVec(@a, @height); + }; + + vectors.sws_receive_slice = (SwsContext* @c, uint @slice_start, uint @slice_height) => + { + vectors.sws_receive_slice = FunctionResolver.GetFunctionDelegate("swscale", "sws_receive_slice", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_receive_slice(@c, @slice_start, @slice_height); + }; + + vectors.sws_receive_slice_alignment = (SwsContext* @c) => + { + vectors.sws_receive_slice_alignment = FunctionResolver.GetFunctionDelegate("swscale", "sws_receive_slice_alignment", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_receive_slice_alignment(@c); + }; + + vectors.sws_scale = (SwsContext* @c, byte*[] @srcSlice, int[] @srcStride, int @srcSliceY, int @srcSliceH, byte*[] @dst, int[] @dstStride) => + { + vectors.sws_scale = FunctionResolver.GetFunctionDelegate("swscale", "sws_scale", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_scale(@c, @srcSlice, @srcStride, @srcSliceY, @srcSliceH, @dst, @dstStride); + }; + + vectors.sws_scale_frame = (SwsContext* @c, AVFrame* @dst, AVFrame* @src) => + { + vectors.sws_scale_frame = FunctionResolver.GetFunctionDelegate("swscale", "sws_scale_frame", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_scale_frame(@c, @dst, @src); + }; + + vectors.sws_scaleVec = (SwsVector* @a, double @scalar) => + { + vectors.sws_scaleVec = FunctionResolver.GetFunctionDelegate("swscale", "sws_scaleVec", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + vectors.sws_scaleVec(@a, @scalar); + }; + + vectors.sws_send_slice = (SwsContext* @c, uint @slice_start, uint @slice_height) => + { + vectors.sws_send_slice = FunctionResolver.GetFunctionDelegate("swscale", "sws_send_slice", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_send_slice(@c, @slice_start, @slice_height); + }; + + vectors.sws_setColorspaceDetails = (SwsContext* @c, in int_array4 @inv_table, int @srcRange, in int_array4 @table, int @dstRange, int @brightness, int @contrast, int @saturation) => + { + vectors.sws_setColorspaceDetails = FunctionResolver.GetFunctionDelegate("swscale", "sws_setColorspaceDetails", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.sws_setColorspaceDetails(@c, @inv_table, @srcRange, @table, @dstRange, @brightness, @contrast, @saturation); + }; + + vectors.swscale_configuration = () => + { + vectors.swscale_configuration = FunctionResolver.GetFunctionDelegate("swscale", "swscale_configuration", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swscale_configuration(); + }; + + vectors.swscale_license = () => + { + vectors.swscale_license = FunctionResolver.GetFunctionDelegate("swscale", "swscale_license", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swscale_license(); + }; + + vectors.swscale_version = () => + { + vectors.swscale_version = FunctionResolver.GetFunctionDelegate("swscale", "swscale_version", ThrowErrorIfFunctionNotFound) ?? delegate { throw new NotSupportedException(); }; + return vectors.swscale_version(); + }; + + } +} diff --git a/FFmpeg.AutoGen/generated/Enums.g.cs b/FFmpeg.AutoGen/generated/Enums.g.cs new file mode 100644 index 00000000..ca5913c3 --- /dev/null +++ b/FFmpeg.AutoGen/generated/Enums.g.cs @@ -0,0 +1,1736 @@ +namespace FFmpeg.AutoGen; + +public enum AVActiveFormatDescription : int +{ + @AV_AFD_SAME = 8, + @AV_AFD_4_3 = 9, + @AV_AFD_16_9 = 10, + @AV_AFD_14_9 = 11, + @AV_AFD_4_3_SP_14_9 = 13, + @AV_AFD_16_9_SP_14_9 = 14, + @AV_AFD_SP_4_3 = 15, +} + +/// Message types used by avdevice_app_to_dev_control_message(). +public enum AVAppToDevMessageType : int +{ + /// Dummy message. + @AV_APP_TO_DEV_NONE = 1313820229, + /// Window size change message. + @AV_APP_TO_DEV_WINDOW_SIZE = 1195724621, + /// Repaint request message. + @AV_APP_TO_DEV_WINDOW_REPAINT = 1380274241, + /// Request pause/play. + @AV_APP_TO_DEV_PAUSE = 1346458912, + /// Request pause/play. + @AV_APP_TO_DEV_PLAY = 1347174745, + /// Request pause/play. + @AV_APP_TO_DEV_TOGGLE_PAUSE = 1346458964, + /// Volume control message. + @AV_APP_TO_DEV_SET_VOLUME = 1398165324, + /// Mute control messages. + @AV_APP_TO_DEV_MUTE = 541939028, + /// Mute control messages. + @AV_APP_TO_DEV_UNMUTE = 1431131476, + /// Mute control messages. + @AV_APP_TO_DEV_TOGGLE_MUTE = 1414354260, + /// Get volume/mute messages. + @AV_APP_TO_DEV_GET_VOLUME = 1196838732, + /// Get volume/mute messages. + @AV_APP_TO_DEV_GET_MUTE = 1196250452, +} + +public enum AVAudioServiceType : int +{ + @AV_AUDIO_SERVICE_TYPE_MAIN = 0, + @AV_AUDIO_SERVICE_TYPE_EFFECTS = 1, + @AV_AUDIO_SERVICE_TYPE_VISUALLY_IMPAIRED = 2, + @AV_AUDIO_SERVICE_TYPE_HEARING_IMPAIRED = 3, + @AV_AUDIO_SERVICE_TYPE_DIALOGUE = 4, + @AV_AUDIO_SERVICE_TYPE_COMMENTARY = 5, + @AV_AUDIO_SERVICE_TYPE_EMERGENCY = 6, + @AV_AUDIO_SERVICE_TYPE_VOICE_OVER = 7, + @AV_AUDIO_SERVICE_TYPE_KARAOKE = 8, + /// Not part of ABI + @AV_AUDIO_SERVICE_TYPE_NB = 9, +} + +/// @{ +public enum AVChannel : int +{ + @AV_CHAN_NONE = -1, + @AV_CHAN_FRONT_LEFT = 0, + @AV_CHAN_FRONT_RIGHT = 1, + @AV_CHAN_FRONT_CENTER = 2, + @AV_CHAN_LOW_FREQUENCY = 3, + @AV_CHAN_BACK_LEFT = 4, + @AV_CHAN_BACK_RIGHT = 5, + @AV_CHAN_FRONT_LEFT_OF_CENTER = 6, + @AV_CHAN_FRONT_RIGHT_OF_CENTER = 7, + @AV_CHAN_BACK_CENTER = 8, + @AV_CHAN_SIDE_LEFT = 9, + @AV_CHAN_SIDE_RIGHT = 10, + @AV_CHAN_TOP_CENTER = 11, + @AV_CHAN_TOP_FRONT_LEFT = 12, + @AV_CHAN_TOP_FRONT_CENTER = 13, + @AV_CHAN_TOP_FRONT_RIGHT = 14, + @AV_CHAN_TOP_BACK_LEFT = 15, + @AV_CHAN_TOP_BACK_CENTER = 16, + @AV_CHAN_TOP_BACK_RIGHT = 17, + /// Stereo downmix. + @AV_CHAN_STEREO_LEFT = 29, + /// See above. + @AV_CHAN_STEREO_RIGHT = 30, + /// See above. + @AV_CHAN_WIDE_LEFT = 31, + /// See above. + @AV_CHAN_WIDE_RIGHT = 32, + /// See above. + @AV_CHAN_SURROUND_DIRECT_LEFT = 33, + /// See above. + @AV_CHAN_SURROUND_DIRECT_RIGHT = 34, + /// See above. + @AV_CHAN_LOW_FREQUENCY_2 = 35, + /// See above. + @AV_CHAN_TOP_SIDE_LEFT = 36, + /// See above. + @AV_CHAN_TOP_SIDE_RIGHT = 37, + /// See above. + @AV_CHAN_BOTTOM_FRONT_CENTER = 38, + /// See above. + @AV_CHAN_BOTTOM_FRONT_LEFT = 39, + /// See above. + @AV_CHAN_BOTTOM_FRONT_RIGHT = 40, + /// Channel is empty can be safely skipped. + @AV_CHAN_UNUSED = 512, + /// Channel contains data, but its position is unknown. + @AV_CHAN_UNKNOWN = 768, + /// Range of channels between AV_CHAN_AMBISONIC_BASE and AV_CHAN_AMBISONIC_END represent Ambisonic components using the ACN system. + @AV_CHAN_AMBISONIC_BASE = 1024, + /// Range of channels between AV_CHAN_AMBISONIC_BASE and AV_CHAN_AMBISONIC_END represent Ambisonic components using the ACN system. + @AV_CHAN_AMBISONIC_END = 2047, +} + +public enum AVChannelOrder : int +{ + /// Only the channel count is specified, without any further information about the channel order. + @AV_CHANNEL_ORDER_UNSPEC = 0, + /// The native channel order, i.e. the channels are in the same order in which they are defined in the AVChannel enum. This supports up to 63 different channels. + @AV_CHANNEL_ORDER_NATIVE = 1, + /// The channel order does not correspond to any other predefined order and is stored as an explicit map. For example, this could be used to support layouts with 64 or more channels, or with empty/skipped (AV_CHAN_SILENCE) channels at arbitrary positions. + @AV_CHANNEL_ORDER_CUSTOM = 2, + /// The audio is represented as the decomposition of the sound field into spherical harmonics. Each channel corresponds to a single expansion component. Channels are ordered according to ACN (Ambisonic Channel Number). + @AV_CHANNEL_ORDER_AMBISONIC = 3, +} + +/// Location of chroma samples. +public enum AVChromaLocation : int +{ + @AVCHROMA_LOC_UNSPECIFIED = 0, + /// MPEG-2/4 4:2:0, H.264 default for 4:2:0 + @AVCHROMA_LOC_LEFT = 1, + /// MPEG-1 4:2:0, JPEG 4:2:0, H.263 4:2:0 + @AVCHROMA_LOC_CENTER = 2, + /// ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2 + @AVCHROMA_LOC_TOPLEFT = 3, + @AVCHROMA_LOC_TOP = 4, + @AVCHROMA_LOC_BOTTOMLEFT = 5, + @AVCHROMA_LOC_BOTTOM = 6, + /// Not part of ABI + @AVCHROMA_LOC_NB = 7, +} + +public enum AVClassCategory : int +{ + @AV_CLASS_CATEGORY_NA = 0, + @AV_CLASS_CATEGORY_INPUT = 1, + @AV_CLASS_CATEGORY_OUTPUT = 2, + @AV_CLASS_CATEGORY_MUXER = 3, + @AV_CLASS_CATEGORY_DEMUXER = 4, + @AV_CLASS_CATEGORY_ENCODER = 5, + @AV_CLASS_CATEGORY_DECODER = 6, + @AV_CLASS_CATEGORY_FILTER = 7, + @AV_CLASS_CATEGORY_BITSTREAM_FILTER = 8, + @AV_CLASS_CATEGORY_SWSCALER = 9, + @AV_CLASS_CATEGORY_SWRESAMPLER = 10, + @AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT = 40, + @AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT = 41, + @AV_CLASS_CATEGORY_DEVICE_AUDIO_OUTPUT = 42, + @AV_CLASS_CATEGORY_DEVICE_AUDIO_INPUT = 43, + @AV_CLASS_CATEGORY_DEVICE_OUTPUT = 44, + @AV_CLASS_CATEGORY_DEVICE_INPUT = 45, + /// not part of ABI/API + @AV_CLASS_CATEGORY_NB = 46, +} + +/// Identify the syntax and semantics of the bitstream. The principle is roughly: Two decoders with the same ID can decode the same streams. Two encoders with the same ID can encode compatible streams. There may be slight deviations from the principle due to implementation details. +public enum AVCodecID : int +{ + @AV_CODEC_ID_NONE = 0, + @AV_CODEC_ID_MPEG1VIDEO = 1, + /// preferred ID for MPEG-1/2 video decoding + @AV_CODEC_ID_MPEG2VIDEO = 2, + @AV_CODEC_ID_H261 = 3, + @AV_CODEC_ID_H263 = 4, + @AV_CODEC_ID_RV10 = 5, + @AV_CODEC_ID_RV20 = 6, + @AV_CODEC_ID_MJPEG = 7, + @AV_CODEC_ID_MJPEGB = 8, + @AV_CODEC_ID_LJPEG = 9, + @AV_CODEC_ID_SP5X = 10, + @AV_CODEC_ID_JPEGLS = 11, + @AV_CODEC_ID_MPEG4 = 12, + @AV_CODEC_ID_RAWVIDEO = 13, + @AV_CODEC_ID_MSMPEG4V1 = 14, + @AV_CODEC_ID_MSMPEG4V2 = 15, + @AV_CODEC_ID_MSMPEG4V3 = 16, + @AV_CODEC_ID_WMV1 = 17, + @AV_CODEC_ID_WMV2 = 18, + @AV_CODEC_ID_H263P = 19, + @AV_CODEC_ID_H263I = 20, + @AV_CODEC_ID_FLV1 = 21, + @AV_CODEC_ID_SVQ1 = 22, + @AV_CODEC_ID_SVQ3 = 23, + @AV_CODEC_ID_DVVIDEO = 24, + @AV_CODEC_ID_HUFFYUV = 25, + @AV_CODEC_ID_CYUV = 26, + @AV_CODEC_ID_H264 = 27, + @AV_CODEC_ID_INDEO3 = 28, + @AV_CODEC_ID_VP3 = 29, + @AV_CODEC_ID_THEORA = 30, + @AV_CODEC_ID_ASV1 = 31, + @AV_CODEC_ID_ASV2 = 32, + @AV_CODEC_ID_FFV1 = 33, + @AV_CODEC_ID_4XM = 34, + @AV_CODEC_ID_VCR1 = 35, + @AV_CODEC_ID_CLJR = 36, + @AV_CODEC_ID_MDEC = 37, + @AV_CODEC_ID_ROQ = 38, + @AV_CODEC_ID_INTERPLAY_VIDEO = 39, + @AV_CODEC_ID_XAN_WC3 = 40, + @AV_CODEC_ID_XAN_WC4 = 41, + @AV_CODEC_ID_RPZA = 42, + @AV_CODEC_ID_CINEPAK = 43, + @AV_CODEC_ID_WS_VQA = 44, + @AV_CODEC_ID_MSRLE = 45, + @AV_CODEC_ID_MSVIDEO1 = 46, + @AV_CODEC_ID_IDCIN = 47, + @AV_CODEC_ID_8BPS = 48, + @AV_CODEC_ID_SMC = 49, + @AV_CODEC_ID_FLIC = 50, + @AV_CODEC_ID_TRUEMOTION1 = 51, + @AV_CODEC_ID_VMDVIDEO = 52, + @AV_CODEC_ID_MSZH = 53, + @AV_CODEC_ID_ZLIB = 54, + @AV_CODEC_ID_QTRLE = 55, + @AV_CODEC_ID_TSCC = 56, + @AV_CODEC_ID_ULTI = 57, + @AV_CODEC_ID_QDRAW = 58, + @AV_CODEC_ID_VIXL = 59, + @AV_CODEC_ID_QPEG = 60, + @AV_CODEC_ID_PNG = 61, + @AV_CODEC_ID_PPM = 62, + @AV_CODEC_ID_PBM = 63, + @AV_CODEC_ID_PGM = 64, + @AV_CODEC_ID_PGMYUV = 65, + @AV_CODEC_ID_PAM = 66, + @AV_CODEC_ID_FFVHUFF = 67, + @AV_CODEC_ID_RV30 = 68, + @AV_CODEC_ID_RV40 = 69, + @AV_CODEC_ID_VC1 = 70, + @AV_CODEC_ID_WMV3 = 71, + @AV_CODEC_ID_LOCO = 72, + @AV_CODEC_ID_WNV1 = 73, + @AV_CODEC_ID_AASC = 74, + @AV_CODEC_ID_INDEO2 = 75, + @AV_CODEC_ID_FRAPS = 76, + @AV_CODEC_ID_TRUEMOTION2 = 77, + @AV_CODEC_ID_BMP = 78, + @AV_CODEC_ID_CSCD = 79, + @AV_CODEC_ID_MMVIDEO = 80, + @AV_CODEC_ID_ZMBV = 81, + @AV_CODEC_ID_AVS = 82, + @AV_CODEC_ID_SMACKVIDEO = 83, + @AV_CODEC_ID_NUV = 84, + @AV_CODEC_ID_KMVC = 85, + @AV_CODEC_ID_FLASHSV = 86, + @AV_CODEC_ID_CAVS = 87, + @AV_CODEC_ID_JPEG2000 = 88, + @AV_CODEC_ID_VMNC = 89, + @AV_CODEC_ID_VP5 = 90, + @AV_CODEC_ID_VP6 = 91, + @AV_CODEC_ID_VP6F = 92, + @AV_CODEC_ID_TARGA = 93, + @AV_CODEC_ID_DSICINVIDEO = 94, + @AV_CODEC_ID_TIERTEXSEQVIDEO = 95, + @AV_CODEC_ID_TIFF = 96, + @AV_CODEC_ID_GIF = 97, + @AV_CODEC_ID_DXA = 98, + @AV_CODEC_ID_DNXHD = 99, + @AV_CODEC_ID_THP = 100, + @AV_CODEC_ID_SGI = 101, + @AV_CODEC_ID_C93 = 102, + @AV_CODEC_ID_BETHSOFTVID = 103, + @AV_CODEC_ID_PTX = 104, + @AV_CODEC_ID_TXD = 105, + @AV_CODEC_ID_VP6A = 106, + @AV_CODEC_ID_AMV = 107, + @AV_CODEC_ID_VB = 108, + @AV_CODEC_ID_PCX = 109, + @AV_CODEC_ID_SUNRAST = 110, + @AV_CODEC_ID_INDEO4 = 111, + @AV_CODEC_ID_INDEO5 = 112, + @AV_CODEC_ID_MIMIC = 113, + @AV_CODEC_ID_RL2 = 114, + @AV_CODEC_ID_ESCAPE124 = 115, + @AV_CODEC_ID_DIRAC = 116, + @AV_CODEC_ID_BFI = 117, + @AV_CODEC_ID_CMV = 118, + @AV_CODEC_ID_MOTIONPIXELS = 119, + @AV_CODEC_ID_TGV = 120, + @AV_CODEC_ID_TGQ = 121, + @AV_CODEC_ID_TQI = 122, + @AV_CODEC_ID_AURA = 123, + @AV_CODEC_ID_AURA2 = 124, + @AV_CODEC_ID_V210X = 125, + @AV_CODEC_ID_TMV = 126, + @AV_CODEC_ID_V210 = 127, + @AV_CODEC_ID_DPX = 128, + @AV_CODEC_ID_MAD = 129, + @AV_CODEC_ID_FRWU = 130, + @AV_CODEC_ID_FLASHSV2 = 131, + @AV_CODEC_ID_CDGRAPHICS = 132, + @AV_CODEC_ID_R210 = 133, + @AV_CODEC_ID_ANM = 134, + @AV_CODEC_ID_BINKVIDEO = 135, + @AV_CODEC_ID_IFF_ILBM = 136, + @AV_CODEC_ID_KGV1 = 137, + @AV_CODEC_ID_YOP = 138, + @AV_CODEC_ID_VP8 = 139, + @AV_CODEC_ID_PICTOR = 140, + @AV_CODEC_ID_ANSI = 141, + @AV_CODEC_ID_A64_MULTI = 142, + @AV_CODEC_ID_A64_MULTI5 = 143, + @AV_CODEC_ID_R10K = 144, + @AV_CODEC_ID_MXPEG = 145, + @AV_CODEC_ID_LAGARITH = 146, + @AV_CODEC_ID_PRORES = 147, + @AV_CODEC_ID_JV = 148, + @AV_CODEC_ID_DFA = 149, + @AV_CODEC_ID_WMV3IMAGE = 150, + @AV_CODEC_ID_VC1IMAGE = 151, + @AV_CODEC_ID_UTVIDEO = 152, + @AV_CODEC_ID_BMV_VIDEO = 153, + @AV_CODEC_ID_VBLE = 154, + @AV_CODEC_ID_DXTORY = 155, + @AV_CODEC_ID_V410 = 156, + @AV_CODEC_ID_XWD = 157, + @AV_CODEC_ID_CDXL = 158, + @AV_CODEC_ID_XBM = 159, + @AV_CODEC_ID_ZEROCODEC = 160, + @AV_CODEC_ID_MSS1 = 161, + @AV_CODEC_ID_MSA1 = 162, + @AV_CODEC_ID_TSCC2 = 163, + @AV_CODEC_ID_MTS2 = 164, + @AV_CODEC_ID_CLLC = 165, + @AV_CODEC_ID_MSS2 = 166, + @AV_CODEC_ID_VP9 = 167, + @AV_CODEC_ID_AIC = 168, + @AV_CODEC_ID_ESCAPE130 = 169, + @AV_CODEC_ID_G2M = 170, + @AV_CODEC_ID_WEBP = 171, + @AV_CODEC_ID_HNM4_VIDEO = 172, + @AV_CODEC_ID_HEVC = 173, + @AV_CODEC_ID_FIC = 174, + @AV_CODEC_ID_ALIAS_PIX = 175, + @AV_CODEC_ID_BRENDER_PIX = 176, + @AV_CODEC_ID_PAF_VIDEO = 177, + @AV_CODEC_ID_EXR = 178, + @AV_CODEC_ID_VP7 = 179, + @AV_CODEC_ID_SANM = 180, + @AV_CODEC_ID_SGIRLE = 181, + @AV_CODEC_ID_MVC1 = 182, + @AV_CODEC_ID_MVC2 = 183, + @AV_CODEC_ID_HQX = 184, + @AV_CODEC_ID_TDSC = 185, + @AV_CODEC_ID_HQ_HQA = 186, + @AV_CODEC_ID_HAP = 187, + @AV_CODEC_ID_DDS = 188, + @AV_CODEC_ID_DXV = 189, + @AV_CODEC_ID_SCREENPRESSO = 190, + @AV_CODEC_ID_RSCC = 191, + @AV_CODEC_ID_AVS2 = 192, + @AV_CODEC_ID_PGX = 193, + @AV_CODEC_ID_AVS3 = 194, + @AV_CODEC_ID_MSP2 = 195, + @AV_CODEC_ID_VVC = 196, + @AV_CODEC_ID_Y41P = 197, + @AV_CODEC_ID_AVRP = 198, + @AV_CODEC_ID_012V = 199, + @AV_CODEC_ID_AVUI = 200, + @AV_CODEC_ID_AYUV = 201, + @AV_CODEC_ID_TARGA_Y216 = 202, + @AV_CODEC_ID_V308 = 203, + @AV_CODEC_ID_V408 = 204, + @AV_CODEC_ID_YUV4 = 205, + @AV_CODEC_ID_AVRN = 206, + @AV_CODEC_ID_CPIA = 207, + @AV_CODEC_ID_XFACE = 208, + @AV_CODEC_ID_SNOW = 209, + @AV_CODEC_ID_SMVJPEG = 210, + @AV_CODEC_ID_APNG = 211, + @AV_CODEC_ID_DAALA = 212, + @AV_CODEC_ID_CFHD = 213, + @AV_CODEC_ID_TRUEMOTION2RT = 214, + @AV_CODEC_ID_M101 = 215, + @AV_CODEC_ID_MAGICYUV = 216, + @AV_CODEC_ID_SHEERVIDEO = 217, + @AV_CODEC_ID_YLC = 218, + @AV_CODEC_ID_PSD = 219, + @AV_CODEC_ID_PIXLET = 220, + @AV_CODEC_ID_SPEEDHQ = 221, + @AV_CODEC_ID_FMVC = 222, + @AV_CODEC_ID_SCPR = 223, + @AV_CODEC_ID_CLEARVIDEO = 224, + @AV_CODEC_ID_XPM = 225, + @AV_CODEC_ID_AV1 = 226, + @AV_CODEC_ID_BITPACKED = 227, + @AV_CODEC_ID_MSCC = 228, + @AV_CODEC_ID_SRGC = 229, + @AV_CODEC_ID_SVG = 230, + @AV_CODEC_ID_GDV = 231, + @AV_CODEC_ID_FITS = 232, + @AV_CODEC_ID_IMM4 = 233, + @AV_CODEC_ID_PROSUMER = 234, + @AV_CODEC_ID_MWSC = 235, + @AV_CODEC_ID_WCMV = 236, + @AV_CODEC_ID_RASC = 237, + @AV_CODEC_ID_HYMT = 238, + @AV_CODEC_ID_ARBC = 239, + @AV_CODEC_ID_AGM = 240, + @AV_CODEC_ID_LSCR = 241, + @AV_CODEC_ID_VP4 = 242, + @AV_CODEC_ID_IMM5 = 243, + @AV_CODEC_ID_MVDV = 244, + @AV_CODEC_ID_MVHA = 245, + @AV_CODEC_ID_CDTOONS = 246, + @AV_CODEC_ID_MV30 = 247, + @AV_CODEC_ID_NOTCHLC = 248, + @AV_CODEC_ID_PFM = 249, + @AV_CODEC_ID_MOBICLIP = 250, + @AV_CODEC_ID_PHOTOCD = 251, + @AV_CODEC_ID_IPU = 252, + @AV_CODEC_ID_ARGO = 253, + @AV_CODEC_ID_CRI = 254, + @AV_CODEC_ID_SIMBIOSIS_IMX = 255, + @AV_CODEC_ID_SGA_VIDEO = 256, + @AV_CODEC_ID_GEM = 257, + @AV_CODEC_ID_VBN = 258, + @AV_CODEC_ID_JPEGXL = 259, + @AV_CODEC_ID_QOI = 260, + @AV_CODEC_ID_PHM = 261, + /// A dummy id pointing at the start of audio codecs + @AV_CODEC_ID_FIRST_AUDIO = 65536, + @AV_CODEC_ID_PCM_S16LE = 65536, + @AV_CODEC_ID_PCM_S16BE = 65537, + @AV_CODEC_ID_PCM_U16LE = 65538, + @AV_CODEC_ID_PCM_U16BE = 65539, + @AV_CODEC_ID_PCM_S8 = 65540, + @AV_CODEC_ID_PCM_U8 = 65541, + @AV_CODEC_ID_PCM_MULAW = 65542, + @AV_CODEC_ID_PCM_ALAW = 65543, + @AV_CODEC_ID_PCM_S32LE = 65544, + @AV_CODEC_ID_PCM_S32BE = 65545, + @AV_CODEC_ID_PCM_U32LE = 65546, + @AV_CODEC_ID_PCM_U32BE = 65547, + @AV_CODEC_ID_PCM_S24LE = 65548, + @AV_CODEC_ID_PCM_S24BE = 65549, + @AV_CODEC_ID_PCM_U24LE = 65550, + @AV_CODEC_ID_PCM_U24BE = 65551, + @AV_CODEC_ID_PCM_S24DAUD = 65552, + @AV_CODEC_ID_PCM_ZORK = 65553, + @AV_CODEC_ID_PCM_S16LE_PLANAR = 65554, + @AV_CODEC_ID_PCM_DVD = 65555, + @AV_CODEC_ID_PCM_F32BE = 65556, + @AV_CODEC_ID_PCM_F32LE = 65557, + @AV_CODEC_ID_PCM_F64BE = 65558, + @AV_CODEC_ID_PCM_F64LE = 65559, + @AV_CODEC_ID_PCM_BLURAY = 65560, + @AV_CODEC_ID_PCM_LXF = 65561, + @AV_CODEC_ID_S302M = 65562, + @AV_CODEC_ID_PCM_S8_PLANAR = 65563, + @AV_CODEC_ID_PCM_S24LE_PLANAR = 65564, + @AV_CODEC_ID_PCM_S32LE_PLANAR = 65565, + @AV_CODEC_ID_PCM_S16BE_PLANAR = 65566, + @AV_CODEC_ID_PCM_S64LE = 65567, + @AV_CODEC_ID_PCM_S64BE = 65568, + @AV_CODEC_ID_PCM_F16LE = 65569, + @AV_CODEC_ID_PCM_F24LE = 65570, + @AV_CODEC_ID_PCM_VIDC = 65571, + @AV_CODEC_ID_PCM_SGA = 65572, + @AV_CODEC_ID_ADPCM_IMA_QT = 69632, + @AV_CODEC_ID_ADPCM_IMA_WAV = 69633, + @AV_CODEC_ID_ADPCM_IMA_DK3 = 69634, + @AV_CODEC_ID_ADPCM_IMA_DK4 = 69635, + @AV_CODEC_ID_ADPCM_IMA_WS = 69636, + @AV_CODEC_ID_ADPCM_IMA_SMJPEG = 69637, + @AV_CODEC_ID_ADPCM_MS = 69638, + @AV_CODEC_ID_ADPCM_4XM = 69639, + @AV_CODEC_ID_ADPCM_XA = 69640, + @AV_CODEC_ID_ADPCM_ADX = 69641, + @AV_CODEC_ID_ADPCM_EA = 69642, + @AV_CODEC_ID_ADPCM_G726 = 69643, + @AV_CODEC_ID_ADPCM_CT = 69644, + @AV_CODEC_ID_ADPCM_SWF = 69645, + @AV_CODEC_ID_ADPCM_YAMAHA = 69646, + @AV_CODEC_ID_ADPCM_SBPRO_4 = 69647, + @AV_CODEC_ID_ADPCM_SBPRO_3 = 69648, + @AV_CODEC_ID_ADPCM_SBPRO_2 = 69649, + @AV_CODEC_ID_ADPCM_THP = 69650, + @AV_CODEC_ID_ADPCM_IMA_AMV = 69651, + @AV_CODEC_ID_ADPCM_EA_R1 = 69652, + @AV_CODEC_ID_ADPCM_EA_R3 = 69653, + @AV_CODEC_ID_ADPCM_EA_R2 = 69654, + @AV_CODEC_ID_ADPCM_IMA_EA_SEAD = 69655, + @AV_CODEC_ID_ADPCM_IMA_EA_EACS = 69656, + @AV_CODEC_ID_ADPCM_EA_XAS = 69657, + @AV_CODEC_ID_ADPCM_EA_MAXIS_XA = 69658, + @AV_CODEC_ID_ADPCM_IMA_ISS = 69659, + @AV_CODEC_ID_ADPCM_G722 = 69660, + @AV_CODEC_ID_ADPCM_IMA_APC = 69661, + @AV_CODEC_ID_ADPCM_VIMA = 69662, + @AV_CODEC_ID_ADPCM_AFC = 69663, + @AV_CODEC_ID_ADPCM_IMA_OKI = 69664, + @AV_CODEC_ID_ADPCM_DTK = 69665, + @AV_CODEC_ID_ADPCM_IMA_RAD = 69666, + @AV_CODEC_ID_ADPCM_G726LE = 69667, + @AV_CODEC_ID_ADPCM_THP_LE = 69668, + @AV_CODEC_ID_ADPCM_PSX = 69669, + @AV_CODEC_ID_ADPCM_AICA = 69670, + @AV_CODEC_ID_ADPCM_IMA_DAT4 = 69671, + @AV_CODEC_ID_ADPCM_MTAF = 69672, + @AV_CODEC_ID_ADPCM_AGM = 69673, + @AV_CODEC_ID_ADPCM_ARGO = 69674, + @AV_CODEC_ID_ADPCM_IMA_SSI = 69675, + @AV_CODEC_ID_ADPCM_ZORK = 69676, + @AV_CODEC_ID_ADPCM_IMA_APM = 69677, + @AV_CODEC_ID_ADPCM_IMA_ALP = 69678, + @AV_CODEC_ID_ADPCM_IMA_MTF = 69679, + @AV_CODEC_ID_ADPCM_IMA_CUNNING = 69680, + @AV_CODEC_ID_ADPCM_IMA_MOFLEX = 69681, + @AV_CODEC_ID_ADPCM_IMA_ACORN = 69682, + @AV_CODEC_ID_AMR_NB = 73728, + @AV_CODEC_ID_AMR_WB = 73729, + @AV_CODEC_ID_RA_144 = 77824, + @AV_CODEC_ID_RA_288 = 77825, + @AV_CODEC_ID_ROQ_DPCM = 81920, + @AV_CODEC_ID_INTERPLAY_DPCM = 81921, + @AV_CODEC_ID_XAN_DPCM = 81922, + @AV_CODEC_ID_SOL_DPCM = 81923, + @AV_CODEC_ID_SDX2_DPCM = 81924, + @AV_CODEC_ID_GREMLIN_DPCM = 81925, + @AV_CODEC_ID_DERF_DPCM = 81926, + @AV_CODEC_ID_MP2 = 86016, + /// preferred ID for decoding MPEG audio layer 1, 2 or 3 + @AV_CODEC_ID_MP3 = 86017, + @AV_CODEC_ID_AAC = 86018, + @AV_CODEC_ID_AC3 = 86019, + @AV_CODEC_ID_DTS = 86020, + @AV_CODEC_ID_VORBIS = 86021, + @AV_CODEC_ID_DVAUDIO = 86022, + @AV_CODEC_ID_WMAV1 = 86023, + @AV_CODEC_ID_WMAV2 = 86024, + @AV_CODEC_ID_MACE3 = 86025, + @AV_CODEC_ID_MACE6 = 86026, + @AV_CODEC_ID_VMDAUDIO = 86027, + @AV_CODEC_ID_FLAC = 86028, + @AV_CODEC_ID_MP3ADU = 86029, + @AV_CODEC_ID_MP3ON4 = 86030, + @AV_CODEC_ID_SHORTEN = 86031, + @AV_CODEC_ID_ALAC = 86032, + @AV_CODEC_ID_WESTWOOD_SND1 = 86033, + /// as in Berlin toast format + @AV_CODEC_ID_GSM = 86034, + @AV_CODEC_ID_QDM2 = 86035, + @AV_CODEC_ID_COOK = 86036, + @AV_CODEC_ID_TRUESPEECH = 86037, + @AV_CODEC_ID_TTA = 86038, + @AV_CODEC_ID_SMACKAUDIO = 86039, + @AV_CODEC_ID_QCELP = 86040, + @AV_CODEC_ID_WAVPACK = 86041, + @AV_CODEC_ID_DSICINAUDIO = 86042, + @AV_CODEC_ID_IMC = 86043, + @AV_CODEC_ID_MUSEPACK7 = 86044, + @AV_CODEC_ID_MLP = 86045, + @AV_CODEC_ID_GSM_MS = 86046, + @AV_CODEC_ID_ATRAC3 = 86047, + @AV_CODEC_ID_APE = 86048, + @AV_CODEC_ID_NELLYMOSER = 86049, + @AV_CODEC_ID_MUSEPACK8 = 86050, + @AV_CODEC_ID_SPEEX = 86051, + @AV_CODEC_ID_WMAVOICE = 86052, + @AV_CODEC_ID_WMAPRO = 86053, + @AV_CODEC_ID_WMALOSSLESS = 86054, + @AV_CODEC_ID_ATRAC3P = 86055, + @AV_CODEC_ID_EAC3 = 86056, + @AV_CODEC_ID_SIPR = 86057, + @AV_CODEC_ID_MP1 = 86058, + @AV_CODEC_ID_TWINVQ = 86059, + @AV_CODEC_ID_TRUEHD = 86060, + @AV_CODEC_ID_MP4ALS = 86061, + @AV_CODEC_ID_ATRAC1 = 86062, + @AV_CODEC_ID_BINKAUDIO_RDFT = 86063, + @AV_CODEC_ID_BINKAUDIO_DCT = 86064, + @AV_CODEC_ID_AAC_LATM = 86065, + @AV_CODEC_ID_QDMC = 86066, + @AV_CODEC_ID_CELT = 86067, + @AV_CODEC_ID_G723_1 = 86068, + @AV_CODEC_ID_G729 = 86069, + @AV_CODEC_ID_8SVX_EXP = 86070, + @AV_CODEC_ID_8SVX_FIB = 86071, + @AV_CODEC_ID_BMV_AUDIO = 86072, + @AV_CODEC_ID_RALF = 86073, + @AV_CODEC_ID_IAC = 86074, + @AV_CODEC_ID_ILBC = 86075, + @AV_CODEC_ID_OPUS = 86076, + @AV_CODEC_ID_COMFORT_NOISE = 86077, + @AV_CODEC_ID_TAK = 86078, + @AV_CODEC_ID_METASOUND = 86079, + @AV_CODEC_ID_PAF_AUDIO = 86080, + @AV_CODEC_ID_ON2AVC = 86081, + @AV_CODEC_ID_DSS_SP = 86082, + @AV_CODEC_ID_CODEC2 = 86083, + @AV_CODEC_ID_FFWAVESYNTH = 86084, + @AV_CODEC_ID_SONIC = 86085, + @AV_CODEC_ID_SONIC_LS = 86086, + @AV_CODEC_ID_EVRC = 86087, + @AV_CODEC_ID_SMV = 86088, + @AV_CODEC_ID_DSD_LSBF = 86089, + @AV_CODEC_ID_DSD_MSBF = 86090, + @AV_CODEC_ID_DSD_LSBF_PLANAR = 86091, + @AV_CODEC_ID_DSD_MSBF_PLANAR = 86092, + @AV_CODEC_ID_4GV = 86093, + @AV_CODEC_ID_INTERPLAY_ACM = 86094, + @AV_CODEC_ID_XMA1 = 86095, + @AV_CODEC_ID_XMA2 = 86096, + @AV_CODEC_ID_DST = 86097, + @AV_CODEC_ID_ATRAC3AL = 86098, + @AV_CODEC_ID_ATRAC3PAL = 86099, + @AV_CODEC_ID_DOLBY_E = 86100, + @AV_CODEC_ID_APTX = 86101, + @AV_CODEC_ID_APTX_HD = 86102, + @AV_CODEC_ID_SBC = 86103, + @AV_CODEC_ID_ATRAC9 = 86104, + @AV_CODEC_ID_HCOM = 86105, + @AV_CODEC_ID_ACELP_KELVIN = 86106, + @AV_CODEC_ID_MPEGH_3D_AUDIO = 86107, + @AV_CODEC_ID_SIREN = 86108, + @AV_CODEC_ID_HCA = 86109, + @AV_CODEC_ID_FASTAUDIO = 86110, + @AV_CODEC_ID_MSNSIREN = 86111, + @AV_CODEC_ID_DFPWM = 86112, + /// A dummy ID pointing at the start of subtitle codecs. + @AV_CODEC_ID_FIRST_SUBTITLE = 94208, + @AV_CODEC_ID_DVD_SUBTITLE = 94208, + @AV_CODEC_ID_DVB_SUBTITLE = 94209, + /// raw UTF-8 text + @AV_CODEC_ID_TEXT = 94210, + @AV_CODEC_ID_XSUB = 94211, + @AV_CODEC_ID_SSA = 94212, + @AV_CODEC_ID_MOV_TEXT = 94213, + @AV_CODEC_ID_HDMV_PGS_SUBTITLE = 94214, + @AV_CODEC_ID_DVB_TELETEXT = 94215, + @AV_CODEC_ID_SRT = 94216, + @AV_CODEC_ID_MICRODVD = 94217, + @AV_CODEC_ID_EIA_608 = 94218, + @AV_CODEC_ID_JACOSUB = 94219, + @AV_CODEC_ID_SAMI = 94220, + @AV_CODEC_ID_REALTEXT = 94221, + @AV_CODEC_ID_STL = 94222, + @AV_CODEC_ID_SUBVIEWER1 = 94223, + @AV_CODEC_ID_SUBVIEWER = 94224, + @AV_CODEC_ID_SUBRIP = 94225, + @AV_CODEC_ID_WEBVTT = 94226, + @AV_CODEC_ID_MPL2 = 94227, + @AV_CODEC_ID_VPLAYER = 94228, + @AV_CODEC_ID_PJS = 94229, + @AV_CODEC_ID_ASS = 94230, + @AV_CODEC_ID_HDMV_TEXT_SUBTITLE = 94231, + @AV_CODEC_ID_TTML = 94232, + @AV_CODEC_ID_ARIB_CAPTION = 94233, + /// A dummy ID pointing at the start of various fake codecs. + @AV_CODEC_ID_FIRST_UNKNOWN = 98304, + @AV_CODEC_ID_TTF = 98304, + /// Contain timestamp estimated through PCR of program stream. + @AV_CODEC_ID_SCTE_35 = 98305, + @AV_CODEC_ID_EPG = 98306, + @AV_CODEC_ID_BINTEXT = 98307, + @AV_CODEC_ID_XBIN = 98308, + @AV_CODEC_ID_IDF = 98309, + @AV_CODEC_ID_OTF = 98310, + @AV_CODEC_ID_SMPTE_KLV = 98311, + @AV_CODEC_ID_DVD_NAV = 98312, + @AV_CODEC_ID_TIMED_ID3 = 98313, + @AV_CODEC_ID_BIN_DATA = 98314, + /// codec_id is not known (like AV_CODEC_ID_NONE) but lavf should attempt to identify it + @AV_CODEC_ID_PROBE = 102400, + /// _FAKE_ codec to indicate a raw MPEG-2 TS stream (only used by libavformat) + @AV_CODEC_ID_MPEG2TS = 131072, + /// _FAKE_ codec to indicate a MPEG-4 Systems stream (only used by libavformat) + @AV_CODEC_ID_MPEG4SYSTEMS = 131073, + /// Dummy codec for streams containing only metadata information. + @AV_CODEC_ID_FFMETADATA = 135168, + /// Passthrough codec, AVFrames wrapped in AVPacket + @AV_CODEC_ID_WRAPPED_AVFRAME = 135169, +} + +/// Chromaticity coordinates of the source primaries. These values match the ones defined by ISO/IEC 23091-2_2019 subclause 8.1 and ITU-T H.273. +public enum AVColorPrimaries : int +{ + @AVCOL_PRI_RESERVED0 = 0, + /// also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B + @AVCOL_PRI_BT709 = 1, + @AVCOL_PRI_UNSPECIFIED = 2, + @AVCOL_PRI_RESERVED = 3, + /// also FCC Title 47 Code of Federal Regulations 73.682 (a)(20) + @AVCOL_PRI_BT470M = 4, + /// also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM + @AVCOL_PRI_BT470BG = 5, + /// also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC + @AVCOL_PRI_SMPTE170M = 6, + /// identical to above, also called "SMPTE C" even though it uses D65 + @AVCOL_PRI_SMPTE240M = 7, + /// colour filters using Illuminant C + @AVCOL_PRI_FILM = 8, + /// ITU-R BT2020 + @AVCOL_PRI_BT2020 = 9, + /// SMPTE ST 428-1 (CIE 1931 XYZ) + @AVCOL_PRI_SMPTE428 = 10, + @AVCOL_PRI_SMPTEST428_1 = 10, + /// SMPTE ST 431-2 (2011) / DCI P3 + @AVCOL_PRI_SMPTE431 = 11, + /// SMPTE ST 432-1 (2010) / P3 D65 / Display P3 + @AVCOL_PRI_SMPTE432 = 12, + /// EBU Tech. 3213-E (nothing there) / one of JEDEC P22 group phosphors + @AVCOL_PRI_EBU3213 = 22, + @AVCOL_PRI_JEDEC_P22 = 22, + /// Not part of ABI + @AVCOL_PRI_NB = 23, +} + +/// Visual content value range. +public enum AVColorRange : int +{ + @AVCOL_RANGE_UNSPECIFIED = 0, + /// Narrow or limited range content. + @AVCOL_RANGE_MPEG = 1, + /// Full range content. + @AVCOL_RANGE_JPEG = 2, + /// Not part of ABI + @AVCOL_RANGE_NB = 3, +} + +/// YUV colorspace type. These values match the ones defined by ISO/IEC 23091-2_2019 subclause 8.3. +public enum AVColorSpace : int +{ + /// order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1 + @AVCOL_SPC_RGB = 0, + /// also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B + @AVCOL_SPC_BT709 = 1, + @AVCOL_SPC_UNSPECIFIED = 2, + /// reserved for future use by ITU-T and ISO/IEC just like 15-255 are + @AVCOL_SPC_RESERVED = 3, + /// FCC Title 47 Code of Federal Regulations 73.682 (a)(20) + @AVCOL_SPC_FCC = 4, + /// also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601 + @AVCOL_SPC_BT470BG = 5, + /// also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above + @AVCOL_SPC_SMPTE170M = 6, + /// derived from 170M primaries and D65 white point, 170M is derived from BT470 System M's primaries + @AVCOL_SPC_SMPTE240M = 7, + /// used by Dirac / VC-2 and H.264 FRext, see ITU-T SG16 + @AVCOL_SPC_YCGCO = 8, + @AVCOL_SPC_YCOCG = 8, + /// ITU-R BT2020 non-constant luminance system + @AVCOL_SPC_BT2020_NCL = 9, + /// ITU-R BT2020 constant luminance system + @AVCOL_SPC_BT2020_CL = 10, + /// SMPTE 2085, Y'D'zD'x + @AVCOL_SPC_SMPTE2085 = 11, + /// Chromaticity-derived non-constant luminance system + @AVCOL_SPC_CHROMA_DERIVED_NCL = 12, + /// Chromaticity-derived constant luminance system + @AVCOL_SPC_CHROMA_DERIVED_CL = 13, + /// ITU-R BT.2100-0, ICtCp + @AVCOL_SPC_ICTCP = 14, + /// Not part of ABI + @AVCOL_SPC_NB = 15, +} + +/// Color Transfer Characteristic. These values match the ones defined by ISO/IEC 23091-2_2019 subclause 8.2. +public enum AVColorTransferCharacteristic : int +{ + @AVCOL_TRC_RESERVED0 = 0, + /// also ITU-R BT1361 + @AVCOL_TRC_BT709 = 1, + @AVCOL_TRC_UNSPECIFIED = 2, + @AVCOL_TRC_RESERVED = 3, + /// also ITU-R BT470M / ITU-R BT1700 625 PAL & SECAM + @AVCOL_TRC_GAMMA22 = 4, + /// also ITU-R BT470BG + @AVCOL_TRC_GAMMA28 = 5, + /// also ITU-R BT601-6 525 or 625 / ITU-R BT1358 525 or 625 / ITU-R BT1700 NTSC + @AVCOL_TRC_SMPTE170M = 6, + @AVCOL_TRC_SMPTE240M = 7, + /// "Linear transfer characteristics" + @AVCOL_TRC_LINEAR = 8, + /// "Logarithmic transfer characteristic (100:1 range)" + @AVCOL_TRC_LOG = 9, + /// "Logarithmic transfer characteristic (100 * Sqrt(10) : 1 range)" + @AVCOL_TRC_LOG_SQRT = 10, + /// IEC 61966-2-4 + @AVCOL_TRC_IEC61966_2_4 = 11, + /// ITU-R BT1361 Extended Colour Gamut + @AVCOL_TRC_BT1361_ECG = 12, + /// IEC 61966-2-1 (sRGB or sYCC) + @AVCOL_TRC_IEC61966_2_1 = 13, + /// ITU-R BT2020 for 10-bit system + @AVCOL_TRC_BT2020_10 = 14, + /// ITU-R BT2020 for 12-bit system + @AVCOL_TRC_BT2020_12 = 15, + /// SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems + @AVCOL_TRC_SMPTE2084 = 16, + @AVCOL_TRC_SMPTEST2084 = 16, + /// SMPTE ST 428-1 + @AVCOL_TRC_SMPTE428 = 17, + @AVCOL_TRC_SMPTEST428_1 = 17, + /// ARIB STD-B67, known as "Hybrid log-gamma" + @AVCOL_TRC_ARIB_STD_B67 = 18, + /// Not part of ABI + @AVCOL_TRC_NB = 19, +} + +/// Message types used by avdevice_dev_to_app_control_message(). +public enum AVDevToAppMessageType : int +{ + /// Dummy message. + @AV_DEV_TO_APP_NONE = 1313820229, + /// Create window buffer message. + @AV_DEV_TO_APP_CREATE_WINDOW_BUFFER = 1111708229, + /// Prepare window buffer message. + @AV_DEV_TO_APP_PREPARE_WINDOW_BUFFER = 1112560197, + /// Display window buffer message. + @AV_DEV_TO_APP_DISPLAY_WINDOW_BUFFER = 1111771475, + /// Destroy window buffer message. + @AV_DEV_TO_APP_DESTROY_WINDOW_BUFFER = 1111770451, + /// Buffer fullness status messages. + @AV_DEV_TO_APP_BUFFER_OVERFLOW = 1112491596, + /// Buffer fullness status messages. + @AV_DEV_TO_APP_BUFFER_UNDERFLOW = 1112884812, + /// Buffer readable/writable. + @AV_DEV_TO_APP_BUFFER_READABLE = 1112687648, + /// Buffer readable/writable. + @AV_DEV_TO_APP_BUFFER_WRITABLE = 1113018912, + /// Mute state change message. + @AV_DEV_TO_APP_MUTE_STATE_CHANGED = 1129141588, + /// Volume level change message. + @AV_DEV_TO_APP_VOLUME_LEVEL_CHANGED = 1129729868, +} + +public enum AVDiscard : int +{ + /// discard nothing + @AVDISCARD_NONE = -16, + /// discard useless packets like 0 size packets in avi + @AVDISCARD_DEFAULT = 0, + /// discard all non reference + @AVDISCARD_NONREF = 8, + /// discard all bidirectional frames + @AVDISCARD_BIDIR = 16, + /// discard all non intra frames + @AVDISCARD_NONINTRA = 24, + /// discard all frames except keyframes + @AVDISCARD_NONKEY = 32, + /// discard all + @AVDISCARD_ALL = 48, +} + +/// The duration of a video can be estimated through various ways, and this enum can be used to know how the duration was estimated. +public enum AVDurationEstimationMethod : int +{ + /// Duration accurately estimated from PTSes + @AVFMT_DURATION_FROM_PTS = 0, + /// Duration estimated from a stream with a known duration + @AVFMT_DURATION_FROM_STREAM = 1, + /// Duration estimated from bitrate (less accurate) + @AVFMT_DURATION_FROM_BITRATE = 2, +} + +public enum AVFieldOrder : int +{ + @AV_FIELD_UNKNOWN = 0, + @AV_FIELD_PROGRESSIVE = 1, + @AV_FIELD_TT = 2, + @AV_FIELD_BB = 3, + @AV_FIELD_TB = 4, + @AV_FIELD_BT = 5, +} + +/// stage of the initialization of the link properties (dimensions, etc) +public enum AVFilterLink_init_state : int +{ + /// not started + @AVLINK_UNINIT = 0, + /// started, but incomplete + @AVLINK_STARTINIT = 1, + /// complete + @AVLINK_INIT = 2, +} + +/// @{ AVFrame is an abstraction for reference-counted raw multimedia data. +public enum AVFrameSideDataType : int +{ + /// The data is the AVPanScan struct defined in libavcodec. + @AV_FRAME_DATA_PANSCAN = 0, + /// ATSC A53 Part 4 Closed Captions. A53 CC bitstream is stored as uint8_t in AVFrameSideData.data. The number of bytes of CC data is AVFrameSideData.size. + @AV_FRAME_DATA_A53_CC = 1, + /// Stereoscopic 3d metadata. The data is the AVStereo3D struct defined in libavutil/stereo3d.h. + @AV_FRAME_DATA_STEREO3D = 2, + /// The data is the AVMatrixEncoding enum defined in libavutil/channel_layout.h. + @AV_FRAME_DATA_MATRIXENCODING = 3, + /// Metadata relevant to a downmix procedure. The data is the AVDownmixInfo struct defined in libavutil/downmix_info.h. + @AV_FRAME_DATA_DOWNMIX_INFO = 4, + /// ReplayGain information in the form of the AVReplayGain struct. + @AV_FRAME_DATA_REPLAYGAIN = 5, + /// This side data contains a 3x3 transformation matrix describing an affine transformation that needs to be applied to the frame for correct presentation. + @AV_FRAME_DATA_DISPLAYMATRIX = 6, + /// Active Format Description data consisting of a single byte as specified in ETSI TS 101 154 using AVActiveFormatDescription enum. + @AV_FRAME_DATA_AFD = 7, + /// Motion vectors exported by some codecs (on demand through the export_mvs flag set in the libavcodec AVCodecContext flags2 option). The data is the AVMotionVector struct defined in libavutil/motion_vector.h. + @AV_FRAME_DATA_MOTION_VECTORS = 8, + /// Recommmends skipping the specified number of samples. This is exported only if the "skip_manual" AVOption is set in libavcodec. This has the same format as AV_PKT_DATA_SKIP_SAMPLES. + @AV_FRAME_DATA_SKIP_SAMPLES = 9, + /// This side data must be associated with an audio frame and corresponds to enum AVAudioServiceType defined in avcodec.h. + @AV_FRAME_DATA_AUDIO_SERVICE_TYPE = 10, + /// Mastering display metadata associated with a video frame. The payload is an AVMasteringDisplayMetadata type and contains information about the mastering display color volume. + @AV_FRAME_DATA_MASTERING_DISPLAY_METADATA = 11, + /// The GOP timecode in 25 bit timecode format. Data format is 64-bit integer. This is set on the first frame of a GOP that has a temporal reference of 0. + @AV_FRAME_DATA_GOP_TIMECODE = 12, + /// The data represents the AVSphericalMapping structure defined in libavutil/spherical.h. + @AV_FRAME_DATA_SPHERICAL = 13, + /// Content light level (based on CTA-861.3). This payload contains data in the form of the AVContentLightMetadata struct. + @AV_FRAME_DATA_CONTENT_LIGHT_LEVEL = 14, + /// The data contains an ICC profile as an opaque octet buffer following the format described by ISO 15076-1 with an optional name defined in the metadata key entry "name". + @AV_FRAME_DATA_ICC_PROFILE = 15, + /// Timecode which conforms to SMPTE ST 12-1. The data is an array of 4 uint32_t where the first uint32_t describes how many (1-3) of the other timecodes are used. The timecode format is described in the documentation of av_timecode_get_smpte_from_framenum() function in libavutil/timecode.h. + @AV_FRAME_DATA_S12M_TIMECODE = 16, + /// HDR dynamic metadata associated with a video frame. The payload is an AVDynamicHDRPlus type and contains information for color volume transform - application 4 of SMPTE 2094-40:2016 standard. + @AV_FRAME_DATA_DYNAMIC_HDR_PLUS = 17, + /// Regions Of Interest, the data is an array of AVRegionOfInterest type, the number of array element is implied by AVFrameSideData.size / AVRegionOfInterest.self_size. + @AV_FRAME_DATA_REGIONS_OF_INTEREST = 18, + /// Encoding parameters for a video frame, as described by AVVideoEncParams. + @AV_FRAME_DATA_VIDEO_ENC_PARAMS = 19, + /// User data unregistered metadata associated with a video frame. This is the H.26[45] UDU SEI message, and shouldn't be used for any other purpose The data is stored as uint8_t in AVFrameSideData.data which is 16 bytes of uuid_iso_iec_11578 followed by AVFrameSideData.size - 16 bytes of user_data_payload_byte. + @AV_FRAME_DATA_SEI_UNREGISTERED = 20, + /// Film grain parameters for a frame, described by AVFilmGrainParams. Must be present for every frame which should have film grain applied. + @AV_FRAME_DATA_FILM_GRAIN_PARAMS = 21, + /// Bounding boxes for object detection and classification, as described by AVDetectionBBoxHeader. + @AV_FRAME_DATA_DETECTION_BBOXES = 22, + /// Dolby Vision RPU raw data, suitable for passing to x265 or other libraries. Array of uint8_t, with NAL emulation bytes intact. + @AV_FRAME_DATA_DOVI_RPU_BUFFER = 23, + /// Parsed Dolby Vision metadata, suitable for passing to a software implementation. The payload is the AVDOVIMetadata struct defined in libavutil/dovi_meta.h. + @AV_FRAME_DATA_DOVI_METADATA = 24, + /// HDR Vivid dynamic metadata associated with a video frame. The payload is an AVDynamicHDRVivid type and contains information for color volume transform - CUVA 005.1-2021. + @AV_FRAME_DATA_DYNAMIC_HDR_VIVID = 25, +} + +/// Option for overlapping elliptical pixel selectors in an image. +public enum AVHDRPlusOverlapProcessOption : int +{ + @AV_HDR_PLUS_OVERLAP_PROCESS_WEIGHTED_AVERAGING = 0, + @AV_HDR_PLUS_OVERLAP_PROCESS_LAYERING = 1, +} + +public enum AVHWDeviceType : int +{ + @AV_HWDEVICE_TYPE_NONE = 0, + @AV_HWDEVICE_TYPE_VDPAU = 1, + @AV_HWDEVICE_TYPE_CUDA = 2, + @AV_HWDEVICE_TYPE_VAAPI = 3, + @AV_HWDEVICE_TYPE_DXVA2 = 4, + @AV_HWDEVICE_TYPE_QSV = 5, + @AV_HWDEVICE_TYPE_VIDEOTOOLBOX = 6, + @AV_HWDEVICE_TYPE_D3D11VA = 7, + @AV_HWDEVICE_TYPE_DRM = 8, + @AV_HWDEVICE_TYPE_OPENCL = 9, + @AV_HWDEVICE_TYPE_MEDIACODEC = 10, + @AV_HWDEVICE_TYPE_VULKAN = 11, +} + +public enum AVHWFrameTransferDirection : int +{ + /// Transfer the data from the queried hw frame. + @AV_HWFRAME_TRANSFER_DIRECTION_FROM = 0, + /// Transfer the data to the queried hw frame. + @AV_HWFRAME_TRANSFER_DIRECTION_TO = 1, +} + +/// Different data types that can be returned via the AVIO write_data_type callback. +public enum AVIODataMarkerType : int +{ + /// Header data; this needs to be present for the stream to be decodeable. + @AVIO_DATA_MARKER_HEADER = 0, + /// A point in the output bytestream where a decoder can start decoding (i.e. a keyframe). A demuxer/decoder given the data flagged with AVIO_DATA_MARKER_HEADER, followed by any AVIO_DATA_MARKER_SYNC_POINT, should give decodeable results. + @AVIO_DATA_MARKER_SYNC_POINT = 1, + /// A point in the output bytestream where a demuxer can start parsing (for non self synchronizing bytestream formats). That is, any non-keyframe packet start point. + @AVIO_DATA_MARKER_BOUNDARY_POINT = 2, + /// This is any, unlabelled data. It can either be a muxer not marking any positions at all, it can be an actual boundary/sync point that the muxer chooses not to mark, or a later part of a packet/fragment that is cut into multiple write callbacks due to limited IO buffer size. + @AVIO_DATA_MARKER_UNKNOWN = 3, + /// Trailer data, which doesn't contain actual content, but only for finalizing the output file. + @AVIO_DATA_MARKER_TRAILER = 4, + /// A point in the output bytestream where the underlying AVIOContext might flush the buffer depending on latency or buffering requirements. Typically means the end of a packet. + @AVIO_DATA_MARKER_FLUSH_POINT = 5, +} + +/// Directory entry types. +public enum AVIODirEntryType : int +{ + @AVIO_ENTRY_UNKNOWN = 0, + @AVIO_ENTRY_BLOCK_DEVICE = 1, + @AVIO_ENTRY_CHARACTER_DEVICE = 2, + @AVIO_ENTRY_DIRECTORY = 3, + @AVIO_ENTRY_NAMED_PIPE = 4, + @AVIO_ENTRY_SYMBOLIC_LINK = 5, + @AVIO_ENTRY_SOCKET = 6, + @AVIO_ENTRY_FILE = 7, + @AVIO_ENTRY_SERVER = 8, + @AVIO_ENTRY_SHARE = 9, + @AVIO_ENTRY_WORKGROUP = 10, +} + +public enum AVMatrixEncoding : int +{ + @AV_MATRIX_ENCODING_NONE = 0, + @AV_MATRIX_ENCODING_DOLBY = 1, + @AV_MATRIX_ENCODING_DPLII = 2, + @AV_MATRIX_ENCODING_DPLIIX = 3, + @AV_MATRIX_ENCODING_DPLIIZ = 4, + @AV_MATRIX_ENCODING_DOLBYEX = 5, + @AV_MATRIX_ENCODING_DOLBYHEADPHONE = 6, + @AV_MATRIX_ENCODING_NB = 7, +} + +/// Media Type +public enum AVMediaType : int +{ + /// Usually treated as AVMEDIA_TYPE_DATA + @AVMEDIA_TYPE_UNKNOWN = -1, + @AVMEDIA_TYPE_VIDEO = 0, + @AVMEDIA_TYPE_AUDIO = 1, + /// Opaque data information usually continuous + @AVMEDIA_TYPE_DATA = 2, + @AVMEDIA_TYPE_SUBTITLE = 3, + /// Opaque data information usually sparse + @AVMEDIA_TYPE_ATTACHMENT = 4, + @AVMEDIA_TYPE_NB = 5, +} + +/// @{ AVOptions provide a generic system to declare options on arbitrary structs ("objects"). An option can have a help text, a type and a range of possible values. Options may then be enumerated, read and written to. +public enum AVOptionType : int +{ + @AV_OPT_TYPE_FLAGS = 0, + @AV_OPT_TYPE_INT = 1, + @AV_OPT_TYPE_INT64 = 2, + @AV_OPT_TYPE_DOUBLE = 3, + @AV_OPT_TYPE_FLOAT = 4, + @AV_OPT_TYPE_STRING = 5, + @AV_OPT_TYPE_RATIONAL = 6, + /// offset must point to a pointer immediately followed by an int for the length + @AV_OPT_TYPE_BINARY = 7, + @AV_OPT_TYPE_DICT = 8, + @AV_OPT_TYPE_UINT64 = 9, + @AV_OPT_TYPE_CONST = 10, + /// offset must point to two consecutive integers + @AV_OPT_TYPE_IMAGE_SIZE = 11, + @AV_OPT_TYPE_PIXEL_FMT = 12, + @AV_OPT_TYPE_SAMPLE_FMT = 13, + /// offset must point to AVRational + @AV_OPT_TYPE_VIDEO_RATE = 14, + @AV_OPT_TYPE_DURATION = 15, + @AV_OPT_TYPE_COLOR = 16, + @AV_OPT_TYPE_CHANNEL_LAYOUT = 17, + @AV_OPT_TYPE_BOOL = 18, + @AV_OPT_TYPE_CHLAYOUT = 19, +} + +/// Types and functions for working with AVPacket. @{ +public enum AVPacketSideDataType : int +{ + /// An AV_PKT_DATA_PALETTE side data packet contains exactly AVPALETTE_SIZE bytes worth of palette. This side data signals that a new palette is present. + @AV_PKT_DATA_PALETTE = 0, + /// The AV_PKT_DATA_NEW_EXTRADATA is used to notify the codec or the format that the extradata buffer was changed and the receiving side should act upon it appropriately. The new extradata is embedded in the side data buffer and should be immediately used for processing the current frame or packet. + @AV_PKT_DATA_NEW_EXTRADATA = 1, + /// An AV_PKT_DATA_PARAM_CHANGE side data packet is laid out as follows: + @AV_PKT_DATA_PARAM_CHANGE = 2, + /// An AV_PKT_DATA_H263_MB_INFO side data packet contains a number of structures with info about macroblocks relevant to splitting the packet into smaller packets on macroblock edges (e.g. as for RFC 2190). That is, it does not necessarily contain info about all macroblocks, as long as the distance between macroblocks in the info is smaller than the target payload size. Each MB info structure is 12 bytes, and is laid out as follows: + @AV_PKT_DATA_H263_MB_INFO = 3, + /// This side data should be associated with an audio stream and contains ReplayGain information in form of the AVReplayGain struct. + @AV_PKT_DATA_REPLAYGAIN = 4, + /// This side data contains a 3x3 transformation matrix describing an affine transformation that needs to be applied to the decoded video frames for correct presentation. + @AV_PKT_DATA_DISPLAYMATRIX = 5, + /// This side data should be associated with a video stream and contains Stereoscopic 3D information in form of the AVStereo3D struct. + @AV_PKT_DATA_STEREO3D = 6, + /// This side data should be associated with an audio stream and corresponds to enum AVAudioServiceType. + @AV_PKT_DATA_AUDIO_SERVICE_TYPE = 7, + /// This side data contains quality related information from the encoder. + @AV_PKT_DATA_QUALITY_STATS = 8, + /// This side data contains an integer value representing the stream index of a "fallback" track. A fallback track indicates an alternate track to use when the current track can not be decoded for some reason. e.g. no decoder available for codec. + @AV_PKT_DATA_FALLBACK_TRACK = 9, + /// This side data corresponds to the AVCPBProperties struct. + @AV_PKT_DATA_CPB_PROPERTIES = 10, + /// Recommmends skipping the specified number of samples + @AV_PKT_DATA_SKIP_SAMPLES = 11, + /// An AV_PKT_DATA_JP_DUALMONO side data packet indicates that the packet may contain "dual mono" audio specific to Japanese DTV and if it is true, recommends only the selected channel to be used. + @AV_PKT_DATA_JP_DUALMONO = 12, + /// A list of zero terminated key/value strings. There is no end marker for the list, so it is required to rely on the side data size to stop. + @AV_PKT_DATA_STRINGS_METADATA = 13, + /// Subtitle event position + @AV_PKT_DATA_SUBTITLE_POSITION = 14, + /// Data found in BlockAdditional element of matroska container. There is no end marker for the data, so it is required to rely on the side data size to recognize the end. 8 byte id (as found in BlockAddId) followed by data. + @AV_PKT_DATA_MATROSKA_BLOCKADDITIONAL = 15, + /// The optional first identifier line of a WebVTT cue. + @AV_PKT_DATA_WEBVTT_IDENTIFIER = 16, + /// The optional settings (rendering instructions) that immediately follow the timestamp specifier of a WebVTT cue. + @AV_PKT_DATA_WEBVTT_SETTINGS = 17, + /// A list of zero terminated key/value strings. There is no end marker for the list, so it is required to rely on the side data size to stop. This side data includes updated metadata which appeared in the stream. + @AV_PKT_DATA_METADATA_UPDATE = 18, + /// MPEGTS stream ID as uint8_t, this is required to pass the stream ID information from the demuxer to the corresponding muxer. + @AV_PKT_DATA_MPEGTS_STREAM_ID = 19, + /// Mastering display metadata (based on SMPTE-2086:2014). This metadata should be associated with a video stream and contains data in the form of the AVMasteringDisplayMetadata struct. + @AV_PKT_DATA_MASTERING_DISPLAY_METADATA = 20, + /// This side data should be associated with a video stream and corresponds to the AVSphericalMapping structure. + @AV_PKT_DATA_SPHERICAL = 21, + /// Content light level (based on CTA-861.3). This metadata should be associated with a video stream and contains data in the form of the AVContentLightMetadata struct. + @AV_PKT_DATA_CONTENT_LIGHT_LEVEL = 22, + /// ATSC A53 Part 4 Closed Captions. This metadata should be associated with a video stream. A53 CC bitstream is stored as uint8_t in AVPacketSideData.data. The number of bytes of CC data is AVPacketSideData.size. + @AV_PKT_DATA_A53_CC = 23, + /// This side data is encryption initialization data. The format is not part of ABI, use av_encryption_init_info_* methods to access. + @AV_PKT_DATA_ENCRYPTION_INIT_INFO = 24, + /// This side data contains encryption info for how to decrypt the packet. The format is not part of ABI, use av_encryption_info_* methods to access. + @AV_PKT_DATA_ENCRYPTION_INFO = 25, + /// Active Format Description data consisting of a single byte as specified in ETSI TS 101 154 using AVActiveFormatDescription enum. + @AV_PKT_DATA_AFD = 26, + /// Producer Reference Time data corresponding to the AVProducerReferenceTime struct, usually exported by some encoders (on demand through the prft flag set in the AVCodecContext export_side_data field). + @AV_PKT_DATA_PRFT = 27, + /// ICC profile data consisting of an opaque octet buffer following the format described by ISO 15076-1. + @AV_PKT_DATA_ICC_PROFILE = 28, + /// DOVI configuration ref: dolby-vision-bitstreams-within-the-iso-base-media-file-format-v2.1.2, section 2.2 dolby-vision-bitstreams-in-mpeg-2-transport-stream-multiplex-v1.2, section 3.3 Tags are stored in struct AVDOVIDecoderConfigurationRecord. + @AV_PKT_DATA_DOVI_CONF = 29, + /// Timecode which conforms to SMPTE ST 12-1:2014. The data is an array of 4 uint32_t where the first uint32_t describes how many (1-3) of the other timecodes are used. The timecode format is described in the documentation of av_timecode_get_smpte_from_framenum() function in libavutil/timecode.h. + @AV_PKT_DATA_S12M_TIMECODE = 30, + /// HDR10+ dynamic metadata associated with a video frame. The metadata is in the form of the AVDynamicHDRPlus struct and contains information for color volume transform - application 4 of SMPTE 2094-40:2016 standard. + @AV_PKT_DATA_DYNAMIC_HDR10_PLUS = 31, + /// The number of side data types. This is not part of the public API/ABI in the sense that it may change when new side data types are added. This must stay the last enum value. If its value becomes huge, some code using it needs to be updated as it assumes it to be smaller than other limits. + @AV_PKT_DATA_NB = 32, +} + +/// @{ +public enum AVPictureStructure : int +{ + @AV_PICTURE_STRUCTURE_UNKNOWN = 0, + @AV_PICTURE_STRUCTURE_TOP_FIELD = 1, + @AV_PICTURE_STRUCTURE_BOTTOM_FIELD = 2, + @AV_PICTURE_STRUCTURE_FRAME = 3, +} + +/// @} @} +public enum AVPictureType : int +{ + /// Undefined + @AV_PICTURE_TYPE_NONE = 0, + /// Intra + @AV_PICTURE_TYPE_I = 1, + /// Predicted + @AV_PICTURE_TYPE_P = 2, + /// Bi-dir predicted + @AV_PICTURE_TYPE_B = 3, + /// S(GMC)-VOP MPEG-4 + @AV_PICTURE_TYPE_S = 4, + /// Switching Intra + @AV_PICTURE_TYPE_SI = 5, + /// Switching Predicted + @AV_PICTURE_TYPE_SP = 6, + /// BI type + @AV_PICTURE_TYPE_BI = 7, +} + +/// Pixel format. +public enum AVPixelFormat : int +{ + @AV_PIX_FMT_NONE = -1, + /// planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples) + @AV_PIX_FMT_YUV420P = 0, + /// packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr + @AV_PIX_FMT_YUYV422 = 1, + /// packed RGB 8:8:8, 24bpp, RGBRGB... + @AV_PIX_FMT_RGB24 = 2, + /// packed RGB 8:8:8, 24bpp, BGRBGR... + @AV_PIX_FMT_BGR24 = 3, + /// planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples) + @AV_PIX_FMT_YUV422P = 4, + /// planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples) + @AV_PIX_FMT_YUV444P = 5, + /// planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples) + @AV_PIX_FMT_YUV410P = 6, + /// planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) + @AV_PIX_FMT_YUV411P = 7, + /// Y , 8bpp + @AV_PIX_FMT_GRAY8 = 8, + /// Y , 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb to the lsb + @AV_PIX_FMT_MONOWHITE = 9, + /// Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb + @AV_PIX_FMT_MONOBLACK = 10, + /// 8 bits with AV_PIX_FMT_RGB32 palette + @AV_PIX_FMT_PAL8 = 11, + /// planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting color_range + @AV_PIX_FMT_YUVJ420P = 12, + /// planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting color_range + @AV_PIX_FMT_YUVJ422P = 13, + /// planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting color_range + @AV_PIX_FMT_YUVJ444P = 14, + /// packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1 + @AV_PIX_FMT_UYVY422 = 15, + /// packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3 + @AV_PIX_FMT_UYYVYY411 = 16, + /// packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb) + @AV_PIX_FMT_BGR8 = 17, + /// packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits + @AV_PIX_FMT_BGR4 = 18, + /// packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb) + @AV_PIX_FMT_BGR4_BYTE = 19, + /// packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb) + @AV_PIX_FMT_RGB8 = 20, + /// packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits + @AV_PIX_FMT_RGB4 = 21, + /// packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb) + @AV_PIX_FMT_RGB4_BYTE = 22, + /// planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V) + @AV_PIX_FMT_NV12 = 23, + /// as above, but U and V bytes are swapped + @AV_PIX_FMT_NV21 = 24, + /// packed ARGB 8:8:8:8, 32bpp, ARGBARGB... + @AV_PIX_FMT_ARGB = 25, + /// packed RGBA 8:8:8:8, 32bpp, RGBARGBA... + @AV_PIX_FMT_RGBA = 26, + /// packed ABGR 8:8:8:8, 32bpp, ABGRABGR... + @AV_PIX_FMT_ABGR = 27, + /// packed BGRA 8:8:8:8, 32bpp, BGRABGRA... + @AV_PIX_FMT_BGRA = 28, + /// Y , 16bpp, big-endian + @AV_PIX_FMT_GRAY16BE = 29, + /// Y , 16bpp, little-endian + @AV_PIX_FMT_GRAY16LE = 30, + /// planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples) + @AV_PIX_FMT_YUV440P = 31, + /// planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range + @AV_PIX_FMT_YUVJ440P = 32, + /// planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples) + @AV_PIX_FMT_YUVA420P = 33, + /// packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big-endian + @AV_PIX_FMT_RGB48BE = 34, + /// packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as little-endian + @AV_PIX_FMT_RGB48LE = 35, + /// packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian + @AV_PIX_FMT_RGB565BE = 36, + /// packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian + @AV_PIX_FMT_RGB565LE = 37, + /// packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), big-endian , X=unused/undefined + @AV_PIX_FMT_RGB555BE = 38, + /// packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), little-endian, X=unused/undefined + @AV_PIX_FMT_RGB555LE = 39, + /// packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), big-endian + @AV_PIX_FMT_BGR565BE = 40, + /// packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), little-endian + @AV_PIX_FMT_BGR565LE = 41, + /// packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), big-endian , X=unused/undefined + @AV_PIX_FMT_BGR555BE = 42, + /// packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), little-endian, X=unused/undefined + @AV_PIX_FMT_BGR555LE = 43, + /// Hardware acceleration through VA-API, data[3] contains a VASurfaceID. + @AV_PIX_FMT_VAAPI = 44, + /// planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian + @AV_PIX_FMT_YUV420P16LE = 45, + /// planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian + @AV_PIX_FMT_YUV420P16BE = 46, + /// planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + @AV_PIX_FMT_YUV422P16LE = 47, + /// planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + @AV_PIX_FMT_YUV422P16BE = 48, + /// planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian + @AV_PIX_FMT_YUV444P16LE = 49, + /// planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian + @AV_PIX_FMT_YUV444P16BE = 50, + /// HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer + @AV_PIX_FMT_DXVA2_VLD = 51, + /// packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), little-endian, X=unused/undefined + @AV_PIX_FMT_RGB444LE = 52, + /// packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), big-endian, X=unused/undefined + @AV_PIX_FMT_RGB444BE = 53, + /// packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), little-endian, X=unused/undefined + @AV_PIX_FMT_BGR444LE = 54, + /// packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), big-endian, X=unused/undefined + @AV_PIX_FMT_BGR444BE = 55, + /// 8 bits gray, 8 bits alpha + @AV_PIX_FMT_YA8 = 56, + /// alias for AV_PIX_FMT_YA8 + @AV_PIX_FMT_Y400A = 56, + /// alias for AV_PIX_FMT_YA8 + @AV_PIX_FMT_GRAY8A = 56, + /// packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big-endian + @AV_PIX_FMT_BGR48BE = 57, + /// packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as little-endian + @AV_PIX_FMT_BGR48LE = 58, + /// planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian + @AV_PIX_FMT_YUV420P9BE = 59, + /// planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian + @AV_PIX_FMT_YUV420P9LE = 60, + /// planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian + @AV_PIX_FMT_YUV420P10BE = 61, + /// planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian + @AV_PIX_FMT_YUV420P10LE = 62, + /// planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + @AV_PIX_FMT_YUV422P10BE = 63, + /// planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + @AV_PIX_FMT_YUV422P10LE = 64, + /// planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian + @AV_PIX_FMT_YUV444P9BE = 65, + /// planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian + @AV_PIX_FMT_YUV444P9LE = 66, + /// planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian + @AV_PIX_FMT_YUV444P10BE = 67, + /// planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian + @AV_PIX_FMT_YUV444P10LE = 68, + /// planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + @AV_PIX_FMT_YUV422P9BE = 69, + /// planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + @AV_PIX_FMT_YUV422P9LE = 70, + /// planar GBR 4:4:4 24bpp + @AV_PIX_FMT_GBRP = 71, + @AV_PIX_FMT_GBR24P = 71, + /// planar GBR 4:4:4 27bpp, big-endian + @AV_PIX_FMT_GBRP9BE = 72, + /// planar GBR 4:4:4 27bpp, little-endian + @AV_PIX_FMT_GBRP9LE = 73, + /// planar GBR 4:4:4 30bpp, big-endian + @AV_PIX_FMT_GBRP10BE = 74, + /// planar GBR 4:4:4 30bpp, little-endian + @AV_PIX_FMT_GBRP10LE = 75, + /// planar GBR 4:4:4 48bpp, big-endian + @AV_PIX_FMT_GBRP16BE = 76, + /// planar GBR 4:4:4 48bpp, little-endian + @AV_PIX_FMT_GBRP16LE = 77, + /// planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples) + @AV_PIX_FMT_YUVA422P = 78, + /// planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples) + @AV_PIX_FMT_YUVA444P = 79, + /// planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), big-endian + @AV_PIX_FMT_YUVA420P9BE = 80, + /// planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), little-endian + @AV_PIX_FMT_YUVA420P9LE = 81, + /// planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), big-endian + @AV_PIX_FMT_YUVA422P9BE = 82, + /// planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), little-endian + @AV_PIX_FMT_YUVA422P9LE = 83, + /// planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), big-endian + @AV_PIX_FMT_YUVA444P9BE = 84, + /// planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), little-endian + @AV_PIX_FMT_YUVA444P9LE = 85, + /// planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian) + @AV_PIX_FMT_YUVA420P10BE = 86, + /// planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian) + @AV_PIX_FMT_YUVA420P10LE = 87, + /// planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian) + @AV_PIX_FMT_YUVA422P10BE = 88, + /// planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian) + @AV_PIX_FMT_YUVA422P10LE = 89, + /// planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian) + @AV_PIX_FMT_YUVA444P10BE = 90, + /// planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian) + @AV_PIX_FMT_YUVA444P10LE = 91, + /// planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian) + @AV_PIX_FMT_YUVA420P16BE = 92, + /// planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian) + @AV_PIX_FMT_YUVA420P16LE = 93, + /// planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian) + @AV_PIX_FMT_YUVA422P16BE = 94, + /// planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian) + @AV_PIX_FMT_YUVA422P16LE = 95, + /// planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian) + @AV_PIX_FMT_YUVA444P16BE = 96, + /// planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian) + @AV_PIX_FMT_YUVA444P16LE = 97, + /// HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface + @AV_PIX_FMT_VDPAU = 98, + /// packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as little-endian, the 4 lower bits are set to 0 + @AV_PIX_FMT_XYZ12LE = 99, + /// packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as big-endian, the 4 lower bits are set to 0 + @AV_PIX_FMT_XYZ12BE = 100, + /// interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples) + @AV_PIX_FMT_NV16 = 101, + /// interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + @AV_PIX_FMT_NV20LE = 102, + /// interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + @AV_PIX_FMT_NV20BE = 103, + /// packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian + @AV_PIX_FMT_RGBA64BE = 104, + /// packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian + @AV_PIX_FMT_RGBA64LE = 105, + /// packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian + @AV_PIX_FMT_BGRA64BE = 106, + /// packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian + @AV_PIX_FMT_BGRA64LE = 107, + /// packed YUV 4:2:2, 16bpp, Y0 Cr Y1 Cb + @AV_PIX_FMT_YVYU422 = 108, + /// 16 bits gray, 16 bits alpha (big-endian) + @AV_PIX_FMT_YA16BE = 109, + /// 16 bits gray, 16 bits alpha (little-endian) + @AV_PIX_FMT_YA16LE = 110, + /// planar GBRA 4:4:4:4 32bpp + @AV_PIX_FMT_GBRAP = 111, + /// planar GBRA 4:4:4:4 64bpp, big-endian + @AV_PIX_FMT_GBRAP16BE = 112, + /// planar GBRA 4:4:4:4 64bpp, little-endian + @AV_PIX_FMT_GBRAP16LE = 113, + /// HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure. + @AV_PIX_FMT_QSV = 114, + /// HW acceleration though MMAL, data[3] contains a pointer to the MMAL_BUFFER_HEADER_T structure. + @AV_PIX_FMT_MMAL = 115, + /// HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView pointer + @AV_PIX_FMT_D3D11VA_VLD = 116, + /// HW acceleration through CUDA. data[i] contain CUdeviceptr pointers exactly as for system memory frames. + @AV_PIX_FMT_CUDA = 117, + /// packed RGB 8:8:8, 32bpp, XRGBXRGB... X=unused/undefined + @AV_PIX_FMT_0RGB = 118, + /// packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined + @AV_PIX_FMT_RGB0 = 119, + /// packed BGR 8:8:8, 32bpp, XBGRXBGR... X=unused/undefined + @AV_PIX_FMT_0BGR = 120, + /// packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined + @AV_PIX_FMT_BGR0 = 121, + /// planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian + @AV_PIX_FMT_YUV420P12BE = 122, + /// planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian + @AV_PIX_FMT_YUV420P12LE = 123, + /// planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian + @AV_PIX_FMT_YUV420P14BE = 124, + /// planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian + @AV_PIX_FMT_YUV420P14LE = 125, + /// planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + @AV_PIX_FMT_YUV422P12BE = 126, + /// planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + @AV_PIX_FMT_YUV422P12LE = 127, + /// planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian + @AV_PIX_FMT_YUV422P14BE = 128, + /// planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian + @AV_PIX_FMT_YUV422P14LE = 129, + /// planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian + @AV_PIX_FMT_YUV444P12BE = 130, + /// planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian + @AV_PIX_FMT_YUV444P12LE = 131, + /// planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian + @AV_PIX_FMT_YUV444P14BE = 132, + /// planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian + @AV_PIX_FMT_YUV444P14LE = 133, + /// planar GBR 4:4:4 36bpp, big-endian + @AV_PIX_FMT_GBRP12BE = 134, + /// planar GBR 4:4:4 36bpp, little-endian + @AV_PIX_FMT_GBRP12LE = 135, + /// planar GBR 4:4:4 42bpp, big-endian + @AV_PIX_FMT_GBRP14BE = 136, + /// planar GBR 4:4:4 42bpp, little-endian + @AV_PIX_FMT_GBRP14LE = 137, + /// planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV411P and setting color_range + @AV_PIX_FMT_YUVJ411P = 138, + /// bayer, BGBG..(odd line), GRGR..(even line), 8-bit samples + @AV_PIX_FMT_BAYER_BGGR8 = 139, + /// bayer, RGRG..(odd line), GBGB..(even line), 8-bit samples + @AV_PIX_FMT_BAYER_RGGB8 = 140, + /// bayer, GBGB..(odd line), RGRG..(even line), 8-bit samples + @AV_PIX_FMT_BAYER_GBRG8 = 141, + /// bayer, GRGR..(odd line), BGBG..(even line), 8-bit samples + @AV_PIX_FMT_BAYER_GRBG8 = 142, + /// bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, little-endian + @AV_PIX_FMT_BAYER_BGGR16LE = 143, + /// bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, big-endian + @AV_PIX_FMT_BAYER_BGGR16BE = 144, + /// bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, little-endian + @AV_PIX_FMT_BAYER_RGGB16LE = 145, + /// bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, big-endian + @AV_PIX_FMT_BAYER_RGGB16BE = 146, + /// bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, little-endian + @AV_PIX_FMT_BAYER_GBRG16LE = 147, + /// bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, big-endian + @AV_PIX_FMT_BAYER_GBRG16BE = 148, + /// bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, little-endian + @AV_PIX_FMT_BAYER_GRBG16LE = 149, + /// bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, big-endian + @AV_PIX_FMT_BAYER_GRBG16BE = 150, + /// XVideo Motion Acceleration via common packet passing + @AV_PIX_FMT_XVMC = 151, + /// planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian + @AV_PIX_FMT_YUV440P10LE = 152, + /// planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian + @AV_PIX_FMT_YUV440P10BE = 153, + /// planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian + @AV_PIX_FMT_YUV440P12LE = 154, + /// planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian + @AV_PIX_FMT_YUV440P12BE = 155, + /// packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), little-endian + @AV_PIX_FMT_AYUV64LE = 156, + /// packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), big-endian + @AV_PIX_FMT_AYUV64BE = 157, + /// hardware decoding through Videotoolbox + @AV_PIX_FMT_VIDEOTOOLBOX = 158, + /// like NV12, with 10bpp per component, data in the high bits, zeros in the low bits, little-endian + @AV_PIX_FMT_P010LE = 159, + /// like NV12, with 10bpp per component, data in the high bits, zeros in the low bits, big-endian + @AV_PIX_FMT_P010BE = 160, + /// planar GBR 4:4:4:4 48bpp, big-endian + @AV_PIX_FMT_GBRAP12BE = 161, + /// planar GBR 4:4:4:4 48bpp, little-endian + @AV_PIX_FMT_GBRAP12LE = 162, + /// planar GBR 4:4:4:4 40bpp, big-endian + @AV_PIX_FMT_GBRAP10BE = 163, + /// planar GBR 4:4:4:4 40bpp, little-endian + @AV_PIX_FMT_GBRAP10LE = 164, + /// hardware decoding through MediaCodec + @AV_PIX_FMT_MEDIACODEC = 165, + /// Y , 12bpp, big-endian + @AV_PIX_FMT_GRAY12BE = 166, + /// Y , 12bpp, little-endian + @AV_PIX_FMT_GRAY12LE = 167, + /// Y , 10bpp, big-endian + @AV_PIX_FMT_GRAY10BE = 168, + /// Y , 10bpp, little-endian + @AV_PIX_FMT_GRAY10LE = 169, + /// like NV12, with 16bpp per component, little-endian + @AV_PIX_FMT_P016LE = 170, + /// like NV12, with 16bpp per component, big-endian + @AV_PIX_FMT_P016BE = 171, + /// Hardware surfaces for Direct3D11. + @AV_PIX_FMT_D3D11 = 172, + /// Y , 9bpp, big-endian + @AV_PIX_FMT_GRAY9BE = 173, + /// Y , 9bpp, little-endian + @AV_PIX_FMT_GRAY9LE = 174, + /// IEEE-754 single precision planar GBR 4:4:4, 96bpp, big-endian + @AV_PIX_FMT_GBRPF32BE = 175, + /// IEEE-754 single precision planar GBR 4:4:4, 96bpp, little-endian + @AV_PIX_FMT_GBRPF32LE = 176, + /// IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, big-endian + @AV_PIX_FMT_GBRAPF32BE = 177, + /// IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, little-endian + @AV_PIX_FMT_GBRAPF32LE = 178, + /// DRM-managed buffers exposed through PRIME buffer sharing. + @AV_PIX_FMT_DRM_PRIME = 179, + /// Hardware surfaces for OpenCL. + @AV_PIX_FMT_OPENCL = 180, + /// Y , 14bpp, big-endian + @AV_PIX_FMT_GRAY14BE = 181, + /// Y , 14bpp, little-endian + @AV_PIX_FMT_GRAY14LE = 182, + /// IEEE-754 single precision Y, 32bpp, big-endian + @AV_PIX_FMT_GRAYF32BE = 183, + /// IEEE-754 single precision Y, 32bpp, little-endian + @AV_PIX_FMT_GRAYF32LE = 184, + /// planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), 12b alpha, big-endian + @AV_PIX_FMT_YUVA422P12BE = 185, + /// planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), 12b alpha, little-endian + @AV_PIX_FMT_YUVA422P12LE = 186, + /// planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), 12b alpha, big-endian + @AV_PIX_FMT_YUVA444P12BE = 187, + /// planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), 12b alpha, little-endian + @AV_PIX_FMT_YUVA444P12LE = 188, + /// planar YUV 4:4:4, 24bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V) + @AV_PIX_FMT_NV24 = 189, + /// as above, but U and V bytes are swapped + @AV_PIX_FMT_NV42 = 190, + /// Vulkan hardware images. + @AV_PIX_FMT_VULKAN = 191, + /// packed YUV 4:2:2 like YUYV422, 20bpp, data in the high bits, big-endian + @AV_PIX_FMT_Y210BE = 192, + /// packed YUV 4:2:2 like YUYV422, 20bpp, data in the high bits, little-endian + @AV_PIX_FMT_Y210LE = 193, + /// packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), little-endian, X=unused/undefined + @AV_PIX_FMT_X2RGB10LE = 194, + /// packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), big-endian, X=unused/undefined + @AV_PIX_FMT_X2RGB10BE = 195, + /// packed BGR 10:10:10, 30bpp, (msb)2X 10B 10G 10R(lsb), little-endian, X=unused/undefined + @AV_PIX_FMT_X2BGR10LE = 196, + /// packed BGR 10:10:10, 30bpp, (msb)2X 10B 10G 10R(lsb), big-endian, X=unused/undefined + @AV_PIX_FMT_X2BGR10BE = 197, + /// interleaved chroma YUV 4:2:2, 20bpp, data in the high bits, big-endian + @AV_PIX_FMT_P210BE = 198, + /// interleaved chroma YUV 4:2:2, 20bpp, data in the high bits, little-endian + @AV_PIX_FMT_P210LE = 199, + /// interleaved chroma YUV 4:4:4, 30bpp, data in the high bits, big-endian + @AV_PIX_FMT_P410BE = 200, + /// interleaved chroma YUV 4:4:4, 30bpp, data in the high bits, little-endian + @AV_PIX_FMT_P410LE = 201, + /// interleaved chroma YUV 4:2:2, 32bpp, big-endian + @AV_PIX_FMT_P216BE = 202, + /// interleaved chroma YUV 4:2:2, 32bpp, little-endian + @AV_PIX_FMT_P216LE = 203, + /// interleaved chroma YUV 4:4:4, 48bpp, big-endian + @AV_PIX_FMT_P416BE = 204, + /// interleaved chroma YUV 4:4:4, 48bpp, little-endian + @AV_PIX_FMT_P416LE = 205, + /// number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of formats might differ between versions + @AV_PIX_FMT_NB = 206, +} + +/// Rounding methods. +public enum AVRounding : int +{ + /// Round toward zero. + @AV_ROUND_ZERO = 0, + /// Round away from zero. + @AV_ROUND_INF = 1, + /// Round toward -infinity. + @AV_ROUND_DOWN = 2, + /// Round toward +infinity. + @AV_ROUND_UP = 3, + /// Round to nearest and halfway cases away from zero. + @AV_ROUND_NEAR_INF = 5, + /// Flag telling rescaling functions to pass `INT64_MIN`/`MAX` through unchanged, avoiding special cases for #AV_NOPTS_VALUE. + @AV_ROUND_PASS_MINMAX = 8192, +} + +/// Audio sample formats +public enum AVSampleFormat : int +{ + @AV_SAMPLE_FMT_NONE = -1, + /// unsigned 8 bits + @AV_SAMPLE_FMT_U8 = 0, + /// signed 16 bits + @AV_SAMPLE_FMT_S16 = 1, + /// signed 32 bits + @AV_SAMPLE_FMT_S32 = 2, + /// float + @AV_SAMPLE_FMT_FLT = 3, + /// double + @AV_SAMPLE_FMT_DBL = 4, + /// unsigned 8 bits, planar + @AV_SAMPLE_FMT_U8P = 5, + /// signed 16 bits, planar + @AV_SAMPLE_FMT_S16P = 6, + /// signed 32 bits, planar + @AV_SAMPLE_FMT_S32P = 7, + /// float, planar + @AV_SAMPLE_FMT_FLTP = 8, + /// double, planar + @AV_SAMPLE_FMT_DBLP = 9, + /// signed 64 bits + @AV_SAMPLE_FMT_S64 = 10, + /// signed 64 bits, planar + @AV_SAMPLE_FMT_S64P = 11, + /// Number of sample formats. DO NOT USE if linking dynamically + @AV_SAMPLE_FMT_NB = 12, +} + +public enum AVSideDataParamChangeFlags : int +{ + @AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_COUNT = 1, + @AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_LAYOUT = 2, + @AV_SIDE_DATA_PARAM_CHANGE_SAMPLE_RATE = 4, + @AV_SIDE_DATA_PARAM_CHANGE_DIMENSIONS = 8, +} + +/// @} +public enum AVStreamParseType : int +{ + @AVSTREAM_PARSE_NONE = 0, + /// full parsing and repack + @AVSTREAM_PARSE_FULL = 1, + /// Only parse headers, do not repack. + @AVSTREAM_PARSE_HEADERS = 2, + /// full parsing and interpolation of timestamps for frames not starting on a packet boundary + @AVSTREAM_PARSE_TIMESTAMPS = 3, + /// full parsing and repack of the first frame only, only implemented for H.264 currently + @AVSTREAM_PARSE_FULL_ONCE = 4, + /// full parsing and repack with timestamp and position generation by parser for raw this assumes that each packet in the file contains no demuxer level headers and just codec level data, otherwise position generation would fail + @AVSTREAM_PARSE_FULL_RAW = 5, +} + +/// @} +public enum AVSubtitleType : int +{ + @SUBTITLE_NONE = 0, + /// A bitmap, pict will be set + @SUBTITLE_BITMAP = 1, + /// Plain text, the text field must be set by the decoder and is authoritative. ass and pict fields may contain approximations. + @SUBTITLE_TEXT = 2, + /// Formatted text, the ass field must be set by the decoder and is authoritative. pict and text fields may contain approximations. + @SUBTITLE_ASS = 3, +} + +public enum AVTimebaseSource : int +{ + @AVFMT_TBCF_AUTO = -1, + @AVFMT_TBCF_DECODER = 0, + @AVFMT_TBCF_DEMUXER = 1, + @AVFMT_TBCF_R_FRAMERATE = 2, +} + +public enum AVTimecodeFlag : int +{ + /// timecode is drop frame + @AV_TIMECODE_FLAG_DROPFRAME = 1, + /// timecode wraps after 24 hours + @AV_TIMECODE_FLAG_24HOURSMAX = 2, + /// negative time values are allowed + @AV_TIMECODE_FLAG_ALLOWNEGATIVE = 4, +} + +/// Dithering algorithms +public enum SwrDitherType : int +{ + @SWR_DITHER_NONE = 0, + @SWR_DITHER_RECTANGULAR = 1, + @SWR_DITHER_TRIANGULAR = 2, + @SWR_DITHER_TRIANGULAR_HIGHPASS = 3, + /// not part of API/ABI + @SWR_DITHER_NS = 64, + @SWR_DITHER_NS_LIPSHITZ = 65, + @SWR_DITHER_NS_F_WEIGHTED = 66, + @SWR_DITHER_NS_MODIFIED_E_WEIGHTED = 67, + @SWR_DITHER_NS_IMPROVED_E_WEIGHTED = 68, + @SWR_DITHER_NS_SHIBATA = 69, + @SWR_DITHER_NS_LOW_SHIBATA = 70, + @SWR_DITHER_NS_HIGH_SHIBATA = 71, + /// not part of API/ABI + @SWR_DITHER_NB = 72, +} + +/// Resampling Engines +public enum SwrEngine : int +{ + /// SW Resampler + @SWR_ENGINE_SWR = 0, + /// SoX Resampler + @SWR_ENGINE_SOXR = 1, + /// not part of API/ABI + @SWR_ENGINE_NB = 2, +} + +/// Resampling Filter Types +public enum SwrFilterType : int +{ + /// Cubic + @SWR_FILTER_TYPE_CUBIC = 0, + /// Blackman Nuttall windowed sinc + @SWR_FILTER_TYPE_BLACKMAN_NUTTALL = 1, + /// Kaiser windowed sinc + @SWR_FILTER_TYPE_KAISER = 2, +} + diff --git a/FFmpeg.AutoGen/generated/Structs.g.cs b/FFmpeg.AutoGen/generated/Structs.g.cs new file mode 100644 index 00000000..fe83ddd6 --- /dev/null +++ b/FFmpeg.AutoGen/generated/Structs.g.cs @@ -0,0 +1,2587 @@ +using System; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen; + +public unsafe partial struct _GUID +{ + public ulong @Data1; + public ushort @Data2; + public ushort @Data3; + public byte_array8 @Data4; +} + +public unsafe partial struct _iobuf +{ + public void* @_Placeholder; +} + +/// Deprecated and unused struct to use for initializing an abuffersink context. +public unsafe partial struct AVABufferSinkParams +{ + /// list of allowed sample formats, terminated by AV_SAMPLE_FMT_NONE + public AVSampleFormat* @sample_fmts; + /// list of allowed channel layouts, terminated by -1 + public long* @channel_layouts; + /// list of allowed channel counts, terminated by -1 + public int* @channel_counts; + /// if not 0, accept any channel count or layout + public int @all_channel_counts; + /// list of allowed sample rates, terminated by -1 + public int* @sample_rates; +} + +public unsafe partial struct AVBitStreamFilter +{ + public byte* @name; + /// A list of codec ids supported by the filter, terminated by AV_CODEC_ID_NONE. May be NULL, in that case the bitstream filter works with any codec id. + public AVCodecID* @codec_ids; + /// A class for the private data, used to declare bitstream filter private AVOptions. This field is NULL for bitstream filters that do not declare any options. + public AVClass* @priv_class; +} + +/// The bitstream filter state. +public unsafe partial struct AVBSFContext +{ + /// A class for logging and AVOptions + public AVClass* @av_class; + /// The bitstream filter this context is an instance of. + public AVBitStreamFilter* @filter; + /// Opaque filter-specific private data. If filter->priv_class is non-NULL, this is an AVOptions-enabled struct. + public void* @priv_data; + /// Parameters of the input stream. This field is allocated in av_bsf_alloc(), it needs to be filled by the caller before av_bsf_init(). + public AVCodecParameters* @par_in; + /// Parameters of the output stream. This field is allocated in av_bsf_alloc(), it is set by the filter in av_bsf_init(). + public AVCodecParameters* @par_out; + /// The timebase used for the timestamps of the input packets. Set by the caller before av_bsf_init(). + public AVRational @time_base_in; + /// The timebase used for the timestamps of the output packets. Set by the filter in av_bsf_init(). + public AVRational @time_base_out; +} + +/// A reference to a data buffer. +public unsafe partial struct AVBufferRef +{ + public AVBuffer* @buffer; + /// The data buffer. It is considered writable if and only if this is the only reference to the buffer, in which case av_buffer_is_writable() returns 1. + public byte* @data; + /// Size of data in bytes. + public ulong @size; +} + +/// Deprecated and unused struct to use for initializing a buffersink context. +public unsafe partial struct AVBufferSinkParams +{ + /// list of allowed pixel formats, terminated by AV_PIX_FMT_NONE + public AVPixelFormat* @pixel_fmts; +} + +/// This structure contains the parameters describing the frames that will be passed to this filter. +public unsafe partial struct AVBufferSrcParameters +{ + /// video: the pixel format, value corresponds to enum AVPixelFormat audio: the sample format, value corresponds to enum AVSampleFormat + public int @format; + /// The timebase to be used for the timestamps on the input frames. + public AVRational @time_base; + /// Video only, the display dimensions of the input frames. + public int @width; + /// Video only, the display dimensions of the input frames. + public int @height; + /// Video only, the sample (pixel) aspect ratio. + public AVRational @sample_aspect_ratio; + /// Video only, the frame rate of the input video. This field must only be set to a non-zero value if input stream has a known constant framerate and should be left at its initial value if the framerate is variable or unknown. + public AVRational @frame_rate; + /// Video with a hwaccel pixel format only. This should be a reference to an AVHWFramesContext instance describing the input frames. + public AVBufferRef* @hw_frames_ctx; + /// Audio only, the audio sampling rate in samples per second. + public int @sample_rate; + /// Audio only, the audio channel layout + [Obsolete("use ch_layout")] + public ulong @channel_layout; + /// Audio only, the audio channel layout + public AVChannelLayout @ch_layout; +} + +/// An AVChannelCustom defines a single channel within a custom order layout +public unsafe partial struct AVChannelCustom +{ + public AVChannel @id; + public byte_array16 @name; + public void* @opaque; +} + +/// An AVChannelLayout holds information about the channel layout of audio data. +public unsafe partial struct AVChannelLayout +{ + /// Channel order used in this layout. This is a mandatory field. + public AVChannelOrder @order; + /// Number of channels in this layout. Mandatory field. + public int @nb_channels; + public AVChannelLayout_u @u; + /// For some private data of the user. + public void* @opaque; +} + +/// Details about which channels are present in this layout. For AV_CHANNEL_ORDER_UNSPEC, this field is undefined and must not be used. +[StructLayout(LayoutKind.Explicit)] +public unsafe partial struct AVChannelLayout_u +{ + /// This member must be used for AV_CHANNEL_ORDER_NATIVE, and may be used for AV_CHANNEL_ORDER_AMBISONIC to signal non-diegetic channels. It is a bitmask, where the position of each set bit means that the AVChannel with the corresponding value is present. + [FieldOffset(0)] + public ulong @mask; + /// This member must be used when the channel order is AV_CHANNEL_ORDER_CUSTOM. It is a nb_channels-sized array, with each element signalling the presence of the AVChannel with the corresponding value in map[i].id. + [FieldOffset(0)] + public AVChannelCustom* @map; +} + +public unsafe partial struct AVChapter +{ + /// unique ID to identify the chapter + public long @id; + /// time base in which the start/end timestamps are specified + public AVRational @time_base; + /// chapter start/end time in time_base units + public long @start; + /// chapter start/end time in time_base units + public long @end; + public AVDictionary* @metadata; +} + +/// Describe the class of an AVClass context structure. That is an arbitrary struct of which the first field is a pointer to an AVClass struct (e.g. AVCodecContext, AVFormatContext etc.). +public unsafe partial struct AVClass +{ + /// The name of the class; usually it is the same name as the context structure type to which the AVClass is associated. + public byte* @class_name; + /// A pointer to a function which returns the name of a context instance ctx associated with the class. + public AVClass_item_name_func @item_name; + /// a pointer to the first option specified in the class if any or NULL + public AVOption* @option; + /// LIBAVUTIL_VERSION with which this structure was created. This is used to allow fields to be added without requiring major version bumps everywhere. + public int @version; + /// Offset in the structure where log_level_offset is stored. 0 means there is no such variable + public int @log_level_offset_offset; + /// Offset in the structure where a pointer to the parent context for logging is stored. For example a decoder could pass its AVCodecContext to eval as such a parent context, which an av_log() implementation could then leverage to display the parent context. The offset can be NULL. + public int @parent_log_context_offset; + /// Category used for visualization (like color) This is only set if the category is equal for all objects using this class. available since version (51 << 16 | 56 << 8 | 100) + public AVClassCategory @category; + /// Callback to return the category. available since version (51 << 16 | 59 << 8 | 100) + public AVClass_get_category_func @get_category; + /// Callback to return the supported/allowed ranges. available since version (52.12) + public AVClass_query_ranges_func @query_ranges; + /// Return next AVOptions-enabled child or NULL + public AVClass_child_next_func @child_next; + /// Iterate over the AVClasses corresponding to potential AVOptions-enabled children. + public AVClass_child_class_iterate_func @child_class_iterate; +} + +/// AVCodec. +public unsafe partial struct AVCodec +{ + /// Name of the codec implementation. The name is globally unique among encoders and among decoders (but an encoder and a decoder can share the same name). This is the primary way to find a codec from the user perspective. + public byte* @name; + /// Descriptive name for the codec, meant to be more human readable than name. You should use the NULL_IF_CONFIG_SMALL() macro to define it. + public byte* @long_name; + public AVMediaType @type; + public AVCodecID @id; + /// Codec capabilities. see AV_CODEC_CAP_* + public int @capabilities; + /// maximum value for lowres supported by the decoder + public byte @max_lowres; + /// array of supported framerates, or NULL if any, array is terminated by {0,0} + public AVRational* @supported_framerates; + /// array of supported pixel formats, or NULL if unknown, array is terminated by -1 + public AVPixelFormat* @pix_fmts; + /// array of supported audio samplerates, or NULL if unknown, array is terminated by 0 + public int* @supported_samplerates; + /// array of supported sample formats, or NULL if unknown, array is terminated by -1 + public AVSampleFormat* @sample_fmts; + /// array of support channel layouts, or NULL if unknown. array is terminated by 0 + public ulong* @channel_layouts; + /// AVClass for the private context + public AVClass* @priv_class; + /// array of recognized profiles, or NULL if unknown, array is terminated by {FF_PROFILE_UNKNOWN} + public AVProfile* @profiles; + /// Group name of the codec implementation. This is a short symbolic name of the wrapper backing this codec. A wrapper uses some kind of external implementation for the codec, such as an external library, or a codec implementation provided by the OS or the hardware. If this field is NULL, this is a builtin, libavcodec native codec. If non-NULL, this will be the suffix in AVCodec.name in most cases (usually AVCodec.name will be of the form "<codec_name>_<wrapper_name>"). + public byte* @wrapper_name; + /// Array of supported channel layouts, terminated with a zeroed layout. + public AVChannelLayout* @ch_layouts; +} + +/// main external API structure. New fields can be added to the end with minor version bumps. Removal, reordering and changes to existing fields require a major version bump. You can use AVOptions (av_opt* / av_set/get*()) to access these fields from user applications. The name string for AVOptions options matches the associated command line parameter name and can be found in libavcodec/options_table.h The AVOption/command line parameter names differ in some cases from the C structure field names for historic reasons or brevity. sizeof(AVCodecContext) must not be used outside libav*. +public unsafe partial struct AVCodecContext +{ + /// information on struct for av_log - set by avcodec_alloc_context3 + public AVClass* @av_class; + public int @log_level_offset; + public AVMediaType @codec_type; + public AVCodec* @codec; + public AVCodecID @codec_id; + /// fourcc (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + 'A'). This is used to work around some encoder bugs. A demuxer should set this to what is stored in the field used to identify the codec. If there are multiple such fields in a container then the demuxer should choose the one which maximizes the information about the used codec. If the codec tag field in a container is larger than 32 bits then the demuxer should remap the longer ID to 32 bits with a table or other structure. Alternatively a new extra_codec_tag + size could be added but for this a clear advantage must be demonstrated first. - encoding: Set by user, if not then the default based on codec_id will be used. - decoding: Set by user, will be converted to uppercase by libavcodec during init. + public uint @codec_tag; + public void* @priv_data; + /// Private context used for internal data. + public AVCodecInternal* @internal; + /// Private data of the user, can be used to carry app specific stuff. - encoding: Set by user. - decoding: Set by user. + public void* @opaque; + /// the average bitrate - encoding: Set by user; unused for constant quantizer encoding. - decoding: Set by user, may be overwritten by libavcodec if this info is available in the stream + public long @bit_rate; + /// number of bits the bitstream is allowed to diverge from the reference. the reference can be CBR (for CBR pass1) or VBR (for pass2) - encoding: Set by user; unused for constant quantizer encoding. - decoding: unused + public int @bit_rate_tolerance; + /// Global quality for codecs which cannot change it per frame. This should be proportional to MPEG-1/2/4 qscale. - encoding: Set by user. - decoding: unused + public int @global_quality; + /// - encoding: Set by user. - decoding: unused + public int @compression_level; + /// AV_CODEC_FLAG_*. - encoding: Set by user. - decoding: Set by user. + public int @flags; + /// AV_CODEC_FLAG2_* - encoding: Set by user. - decoding: Set by user. + public int @flags2; + /// some codecs need / can use extradata like Huffman tables. MJPEG: Huffman tables rv10: additional flags MPEG-4: global headers (they can be in the bitstream or here) The allocated memory should be AV_INPUT_BUFFER_PADDING_SIZE bytes larger than extradata_size to avoid problems if it is read with the bitstream reader. The bytewise contents of extradata must not depend on the architecture or CPU endianness. Must be allocated with the av_malloc() family of functions. - encoding: Set/allocated/freed by libavcodec. - decoding: Set/allocated/freed by user. + public byte* @extradata; + public int @extradata_size; + /// This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented. For fixed-fps content, timebase should be 1/framerate and timestamp increments should be identically 1. This often, but not always is the inverse of the frame rate or field rate for video. 1/time_base is not the average frame rate if the frame rate is not constant. + public AVRational @time_base; + /// For some codecs, the time base is closer to the field rate than the frame rate. Most notably, H.264 and MPEG-2 specify time_base as half of frame duration if no telecine is used ... + public int @ticks_per_frame; + /// Codec delay. + public int @delay; + /// picture width / height. + public int @width; + /// picture width / height. + public int @height; + /// Bitstream width / height, may be different from width/height e.g. when the decoded frame is cropped before being output or lowres is enabled. + public int @coded_width; + /// Bitstream width / height, may be different from width/height e.g. when the decoded frame is cropped before being output or lowres is enabled. + public int @coded_height; + /// the number of pictures in a group of pictures, or 0 for intra_only - encoding: Set by user. - decoding: unused + public int @gop_size; + /// Pixel format, see AV_PIX_FMT_xxx. May be set by the demuxer if known from headers. May be overridden by the decoder if it knows better. + public AVPixelFormat @pix_fmt; + /// If non NULL, 'draw_horiz_band' is called by the libavcodec decoder to draw a horizontal band. It improves cache usage. Not all codecs can do that. You must check the codec capabilities beforehand. When multithreading is used, it may be called from multiple threads at the same time; threads might draw different parts of the same AVFrame, or multiple AVFrames, and there is no guarantee that slices will be drawn in order. The function is also used by hardware acceleration APIs. It is called at least once during frame decoding to pass the data needed for hardware render. In that mode instead of pixel data, AVFrame points to a structure specific to the acceleration API. The application reads the structure and can change some fields to indicate progress or mark state. - encoding: unused - decoding: Set by user. + public AVCodecContext_draw_horiz_band_func @draw_horiz_band; + /// Callback to negotiate the pixel format. Decoding only, may be set by the caller before avcodec_open2(). + public AVCodecContext_get_format_func @get_format; + /// maximum number of B-frames between non-B-frames Note: The output will be delayed by max_b_frames+1 relative to the input. - encoding: Set by user. - decoding: unused + public int @max_b_frames; + /// qscale factor between IP and B-frames If > 0 then the last P-frame quantizer will be used (q= lastp_q*factor+offset). If < 0 then normal ratecontrol will be done (q= -normal_q*factor+offset). - encoding: Set by user. - decoding: unused + public float @b_quant_factor; + /// qscale offset between IP and B-frames - encoding: Set by user. - decoding: unused + public float @b_quant_offset; + /// Size of the frame reordering buffer in the decoder. For MPEG-2 it is 1 IPB or 0 low delay IP. - encoding: Set by libavcodec. - decoding: Set by libavcodec. + public int @has_b_frames; + /// qscale factor between P- and I-frames If > 0 then the last P-frame quantizer will be used (q = lastp_q * factor + offset). If < 0 then normal ratecontrol will be done (q= -normal_q*factor+offset). - encoding: Set by user. - decoding: unused + public float @i_quant_factor; + /// qscale offset between P and I-frames - encoding: Set by user. - decoding: unused + public float @i_quant_offset; + /// luminance masking (0-> disabled) - encoding: Set by user. - decoding: unused + public float @lumi_masking; + /// temporary complexity masking (0-> disabled) - encoding: Set by user. - decoding: unused + public float @temporal_cplx_masking; + /// spatial complexity masking (0-> disabled) - encoding: Set by user. - decoding: unused + public float @spatial_cplx_masking; + /// p block masking (0-> disabled) - encoding: Set by user. - decoding: unused + public float @p_masking; + /// darkness masking (0-> disabled) - encoding: Set by user. - decoding: unused + public float @dark_masking; + /// slice count - encoding: Set by libavcodec. - decoding: Set by user (or 0). + public int @slice_count; + /// slice offsets in the frame in bytes - encoding: Set/allocated by libavcodec. - decoding: Set/allocated by user (or NULL). + public int* @slice_offset; + /// sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel. Numerator and denominator must be relatively prime and smaller than 256 for some video standards. - encoding: Set by user. - decoding: Set by libavcodec. + public AVRational @sample_aspect_ratio; + /// motion estimation comparison function - encoding: Set by user. - decoding: unused + public int @me_cmp; + /// subpixel motion estimation comparison function - encoding: Set by user. - decoding: unused + public int @me_sub_cmp; + /// macroblock comparison function (not supported yet) - encoding: Set by user. - decoding: unused + public int @mb_cmp; + /// interlaced DCT comparison function - encoding: Set by user. - decoding: unused + public int @ildct_cmp; + /// ME diamond size & shape - encoding: Set by user. - decoding: unused + public int @dia_size; + /// amount of previous MV predictors (2a+1 x 2a+1 square) - encoding: Set by user. - decoding: unused + public int @last_predictor_count; + /// motion estimation prepass comparison function - encoding: Set by user. - decoding: unused + public int @me_pre_cmp; + /// ME prepass diamond size & shape - encoding: Set by user. - decoding: unused + public int @pre_dia_size; + /// subpel ME quality - encoding: Set by user. - decoding: unused + public int @me_subpel_quality; + /// maximum motion estimation search range in subpel units If 0 then no limit. + public int @me_range; + /// slice flags - encoding: unused - decoding: Set by user. + public int @slice_flags; + /// macroblock decision mode - encoding: Set by user. - decoding: unused + public int @mb_decision; + /// custom intra quantization matrix Must be allocated with the av_malloc() family of functions, and will be freed in avcodec_free_context(). - encoding: Set/allocated by user, freed by libavcodec. Can be NULL. - decoding: Set/allocated/freed by libavcodec. + public ushort* @intra_matrix; + /// custom inter quantization matrix Must be allocated with the av_malloc() family of functions, and will be freed in avcodec_free_context(). - encoding: Set/allocated by user, freed by libavcodec. Can be NULL. - decoding: Set/allocated/freed by libavcodec. + public ushort* @inter_matrix; + /// precision of the intra DC coefficient - 8 - encoding: Set by user. - decoding: Set by libavcodec + public int @intra_dc_precision; + /// Number of macroblock rows at the top which are skipped. - encoding: unused - decoding: Set by user. + public int @skip_top; + /// Number of macroblock rows at the bottom which are skipped. - encoding: unused - decoding: Set by user. + public int @skip_bottom; + /// minimum MB Lagrange multiplier - encoding: Set by user. - decoding: unused + public int @mb_lmin; + /// maximum MB Lagrange multiplier - encoding: Set by user. - decoding: unused + public int @mb_lmax; + /// - encoding: Set by user. - decoding: unused + public int @bidir_refine; + /// minimum GOP size - encoding: Set by user. - decoding: unused + public int @keyint_min; + /// number of reference frames - encoding: Set by user. - decoding: Set by lavc. + public int @refs; + /// Note: Value depends upon the compare function used for fullpel ME. - encoding: Set by user. - decoding: unused + public int @mv0_threshold; + /// Chromaticity coordinates of the source primaries. - encoding: Set by user - decoding: Set by libavcodec + public AVColorPrimaries @color_primaries; + /// Color Transfer Characteristic. - encoding: Set by user - decoding: Set by libavcodec + public AVColorTransferCharacteristic @color_trc; + /// YUV colorspace type. - encoding: Set by user - decoding: Set by libavcodec + public AVColorSpace @colorspace; + /// MPEG vs JPEG YUV range. - encoding: Set by user - decoding: Set by libavcodec + public AVColorRange @color_range; + /// This defines the location of chroma samples. - encoding: Set by user - decoding: Set by libavcodec + public AVChromaLocation @chroma_sample_location; + /// Number of slices. Indicates number of picture subdivisions. Used for parallelized decoding. - encoding: Set by user - decoding: unused + public int @slices; + /// Field order - encoding: set by libavcodec - decoding: Set by user. + public AVFieldOrder @field_order; + /// samples per second + public int @sample_rate; + /// number of audio channels + [Obsolete("use ch_layout.nb_channels")] + public int @channels; + /// sample format + public AVSampleFormat @sample_fmt; + /// Number of samples per channel in an audio frame. + public int @frame_size; + /// Frame counter, set by libavcodec. + public int @frame_number; + /// number of bytes per packet if constant and known or 0 Used by some WAV based audio codecs. + public int @block_align; + /// Audio cutoff bandwidth (0 means "automatic") - encoding: Set by user. - decoding: unused + public int @cutoff; + /// Audio channel layout. - encoding: set by user. - decoding: set by user, may be overwritten by libavcodec. + [Obsolete("use ch_layout")] + public ulong @channel_layout; + /// Request decoder to use this channel layout if it can (0 for default) - encoding: unused - decoding: Set by user. + [Obsolete("use \"downmix\" codec private option")] + public ulong @request_channel_layout; + /// Type of service that the audio stream conveys. - encoding: Set by user. - decoding: Set by libavcodec. + public AVAudioServiceType @audio_service_type; + /// desired sample format - encoding: Not used. - decoding: Set by user. Decoder will decode to this format if it can. + public AVSampleFormat @request_sample_fmt; + /// This callback is called at the beginning of each frame to get data buffer(s) for it. There may be one contiguous buffer for all the data or there may be a buffer per each data plane or anything in between. What this means is, you may set however many entries in buf[] you feel necessary. Each buffer must be reference-counted using the AVBuffer API (see description of buf[] below). + public AVCodecContext_get_buffer2_func @get_buffer2; + /// amount of qscale change between easy & hard scenes (0.0-1.0) + public float @qcompress; + /// amount of qscale smoothing over time (0.0-1.0) + public float @qblur; + /// minimum quantizer - encoding: Set by user. - decoding: unused + public int @qmin; + /// maximum quantizer - encoding: Set by user. - decoding: unused + public int @qmax; + /// maximum quantizer difference between frames - encoding: Set by user. - decoding: unused + public int @max_qdiff; + /// decoder bitstream buffer size - encoding: Set by user. - decoding: unused + public int @rc_buffer_size; + /// ratecontrol override, see RcOverride - encoding: Allocated/set/freed by user. - decoding: unused + public int @rc_override_count; + public RcOverride* @rc_override; + /// maximum bitrate - encoding: Set by user. - decoding: Set by user, may be overwritten by libavcodec. + public long @rc_max_rate; + /// minimum bitrate - encoding: Set by user. - decoding: unused + public long @rc_min_rate; + /// Ratecontrol attempt to use, at maximum, <value> of what can be used without an underflow. - encoding: Set by user. - decoding: unused. + public float @rc_max_available_vbv_use; + /// Ratecontrol attempt to use, at least, <value> times the amount needed to prevent a vbv overflow. - encoding: Set by user. - decoding: unused. + public float @rc_min_vbv_overflow_use; + /// Number of bits which should be loaded into the rc buffer before decoding starts. - encoding: Set by user. - decoding: unused + public int @rc_initial_buffer_occupancy; + /// trellis RD quantization - encoding: Set by user. - decoding: unused + public int @trellis; + /// pass1 encoding statistics output buffer - encoding: Set by libavcodec. - decoding: unused + public byte* @stats_out; + /// pass2 encoding statistics input buffer Concatenated stuff from stats_out of pass1 should be placed here. - encoding: Allocated/set/freed by user. - decoding: unused + public byte* @stats_in; + /// Work around bugs in encoders which sometimes cannot be detected automatically. - encoding: Set by user - decoding: Set by user + public int @workaround_bugs; + /// strictly follow the standard (MPEG-4, ...). - encoding: Set by user. - decoding: Set by user. Setting this to STRICT or higher means the encoder and decoder will generally do stupid things, whereas setting it to unofficial or lower will mean the encoder might produce output that is not supported by all spec-compliant decoders. Decoders don't differentiate between normal, unofficial and experimental (that is, they always try to decode things when they can) unless they are explicitly asked to behave stupidly (=strictly conform to the specs) + public int @strict_std_compliance; + /// error concealment flags - encoding: unused - decoding: Set by user. + public int @error_concealment; + /// debug - encoding: Set by user. - decoding: Set by user. + public int @debug; + /// Error recognition; may misdetect some more or less valid parts as errors. - encoding: Set by user. - decoding: Set by user. + public int @err_recognition; + /// opaque 64-bit number (generally a PTS) that will be reordered and output in AVFrame.reordered_opaque - encoding: Set by libavcodec to the reordered_opaque of the input frame corresponding to the last returned packet. Only supported by encoders with the AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE capability. - decoding: Set by user. + public long @reordered_opaque; + /// Hardware accelerator in use - encoding: unused. - decoding: Set by libavcodec + public AVHWAccel* @hwaccel; + /// Hardware accelerator context. For some hardware accelerators, a global context needs to be provided by the user. In that case, this holds display-dependent data FFmpeg cannot instantiate itself. Please refer to the FFmpeg HW accelerator documentation to know how to fill this. - encoding: unused - decoding: Set by user + public void* @hwaccel_context; + /// error - encoding: Set by libavcodec if flags & AV_CODEC_FLAG_PSNR. - decoding: unused + public ulong_array8 @error; + /// DCT algorithm, see FF_DCT_* below - encoding: Set by user. - decoding: unused + public int @dct_algo; + /// IDCT algorithm, see FF_IDCT_* below. - encoding: Set by user. - decoding: Set by user. + public int @idct_algo; + /// bits per sample/pixel from the demuxer (needed for huffyuv). - encoding: Set by libavcodec. - decoding: Set by user. + public int @bits_per_coded_sample; + /// Bits per sample/pixel of internal libavcodec pixel/sample format. - encoding: set by user. - decoding: set by libavcodec. + public int @bits_per_raw_sample; + /// low resolution decoding, 1-> 1/2 size, 2->1/4 size - encoding: unused - decoding: Set by user. + public int @lowres; + /// thread count is used to decide how many independent tasks should be passed to execute() - encoding: Set by user. - decoding: Set by user. + public int @thread_count; + /// Which multithreading methods to use. Use of FF_THREAD_FRAME will increase decoding delay by one frame per thread, so clients which cannot provide future frames should not use it. + public int @thread_type; + /// Which multithreading methods are in use by the codec. - encoding: Set by libavcodec. - decoding: Set by libavcodec. + public int @active_thread_type; + /// Set by the client if its custom get_buffer() callback can be called synchronously from another thread, which allows faster multithreaded decoding. draw_horiz_band() will be called from other threads regardless of this setting. Ignored if the default get_buffer() is used. - encoding: Set by user. - decoding: Set by user. + [Obsolete("the custom get_buffer2() callback should always be thread-safe. Thread-unsafe get_buffer2() implementations will be invalid starting with LIBAVCODEC_VERSION_MAJOR=60; in other words, libavcodec will behave as if this field was always set to 1. Callers that want to be forward compatible with future libavcodec versions should wrap access to this field in #if LIBAVCODEC_VERSION_MAJOR < 60")] + public int @thread_safe_callbacks; + /// The codec may call this to execute several independent things. It will return only after finishing all tasks. The user may replace this with some multithreaded implementation, the default implementation will execute the parts serially. + public AVCodecContext_execute_func @execute; + /// The codec may call this to execute several independent things. It will return only after finishing all tasks. The user may replace this with some multithreaded implementation, the default implementation will execute the parts serially. + public AVCodecContext_execute2_func @execute2; + /// noise vs. sse weight for the nsse comparison function - encoding: Set by user. - decoding: unused + public int @nsse_weight; + /// profile - encoding: Set by user. - decoding: Set by libavcodec. + public int @profile; + /// level - encoding: Set by user. - decoding: Set by libavcodec. + public int @level; + /// Skip loop filtering for selected frames. - encoding: unused - decoding: Set by user. + public AVDiscard @skip_loop_filter; + /// Skip IDCT/dequantization for selected frames. - encoding: unused - decoding: Set by user. + public AVDiscard @skip_idct; + /// Skip decoding for selected frames. - encoding: unused - decoding: Set by user. + public AVDiscard @skip_frame; + /// Header containing style information for text subtitles. For SUBTITLE_ASS subtitle type, it should contain the whole ASS [Script Info] and [V4+ Styles] section, plus the [Events] line and the Format line following. It shouldn't include any Dialogue line. - encoding: Set/allocated/freed by user (before avcodec_open2()) - decoding: Set/allocated/freed by libavcodec (by avcodec_open2()) + public byte* @subtitle_header; + public int @subtitle_header_size; + /// Audio only. The number of "priming" samples (padding) inserted by the encoder at the beginning of the audio. I.e. this number of leading decoded samples must be discarded by the caller to get the original audio without leading padding. + public int @initial_padding; + /// - decoding: For codecs that store a framerate value in the compressed bitstream, the decoder may export it here. { 0, 1} when unknown. - encoding: May be used to signal the framerate of CFR content to an encoder. + public AVRational @framerate; + /// Nominal unaccelerated pixel format, see AV_PIX_FMT_xxx. - encoding: unused. - decoding: Set by libavcodec before calling get_format() + public AVPixelFormat @sw_pix_fmt; + /// Timebase in which pkt_dts/pts and AVPacket.dts/pts are. - encoding unused. - decoding set by user. + public AVRational @pkt_timebase; + /// AVCodecDescriptor - encoding: unused. - decoding: set by libavcodec. + public AVCodecDescriptor* @codec_descriptor; + /// Current statistics for PTS correction. - decoding: maintained and used by libavcodec, not intended to be used by user apps - encoding: unused + public long @pts_correction_num_faulty_pts; + /// Number of incorrect PTS values so far + public long @pts_correction_num_faulty_dts; + /// Number of incorrect DTS values so far + public long @pts_correction_last_pts; + /// PTS of the last frame + public long @pts_correction_last_dts; + /// Character encoding of the input subtitles file. - decoding: set by user - encoding: unused + public byte* @sub_charenc; + /// Subtitles character encoding mode. Formats or codecs might be adjusting this setting (if they are doing the conversion themselves for instance). - decoding: set by libavcodec - encoding: unused + public int @sub_charenc_mode; + /// Skip processing alpha if supported by codec. Note that if the format uses pre-multiplied alpha (common with VP6, and recommended due to better video quality/compression) the image will look as if alpha-blended onto a black background. However for formats that do not use pre-multiplied alpha there might be serious artefacts (though e.g. libswscale currently assumes pre-multiplied alpha anyway). + public int @skip_alpha; + /// Number of samples to skip after a discontinuity - decoding: unused - encoding: set by libavcodec + public int @seek_preroll; + [Obsolete("unused")] + public int @debug_mv; + /// custom intra quantization matrix - encoding: Set by user, can be NULL. - decoding: unused. + public ushort* @chroma_intra_matrix; + /// dump format separator. can be ", " or " " or anything else - encoding: Set by user. - decoding: Set by user. + public byte* @dump_separator; + /// ',' separated list of allowed decoders. If NULL then all are allowed - encoding: unused - decoding: set by user + public byte* @codec_whitelist; + /// Properties of the stream that gets decoded - encoding: unused - decoding: set by libavcodec + public uint @properties; + /// Additional data associated with the entire coded stream. + public AVPacketSideData* @coded_side_data; + public int @nb_coded_side_data; + /// A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames. The reference is set by the caller and afterwards owned (and freed) by libavcodec - it should never be read by the caller after being set. + public AVBufferRef* @hw_frames_ctx; + [Obsolete("unused")] + public int @sub_text_format; + /// Audio only. The amount of padding (in samples) appended by the encoder to the end of the audio. I.e. this number of decoded samples must be discarded by the caller from the end of the stream to get the original audio without any trailing padding. + public int @trailing_padding; + /// The number of pixels per image to maximally accept. + public long @max_pixels; + /// A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/decoder. The reference is set by the caller and afterwards owned (and freed) by libavcodec. + public AVBufferRef* @hw_device_ctx; + /// Bit set of AV_HWACCEL_FLAG_* flags, which affect hardware accelerated decoding (if active). - encoding: unused - decoding: Set by user (either before avcodec_open2(), or in the AVCodecContext.get_format callback) + public int @hwaccel_flags; + /// Video decoding only. Certain video codecs support cropping, meaning that only a sub-rectangle of the decoded frame is intended for display. This option controls how cropping is handled by libavcodec. + public int @apply_cropping; + public int @extra_hw_frames; + /// The percentage of damaged samples to discard a frame. + public int @discard_damaged_percentage; + /// The number of samples per frame to maximally accept. + public long @max_samples; + /// Bit set of AV_CODEC_EXPORT_DATA_* flags, which affects the kind of metadata exported in frame, packet, or coded stream side data by decoders and encoders. + public int @export_side_data; + /// This callback is called at the beginning of each packet to get a data buffer for it. + public AVCodecContext_get_encode_buffer_func @get_encode_buffer; + /// Audio channel layout. - encoding: must be set by the caller, to one of AVCodec.ch_layouts. - decoding: may be set by the caller if known e.g. from the container. The decoder can then override during decoding as needed. + public AVChannelLayout @ch_layout; +} + +/// This struct describes the properties of a single codec described by an AVCodecID. +public unsafe partial struct AVCodecDescriptor +{ + public AVCodecID @id; + public AVMediaType @type; + /// Name of the codec described by this descriptor. It is non-empty and unique for each codec descriptor. It should contain alphanumeric characters and '_' only. + public byte* @name; + /// A more descriptive name for this codec. May be NULL. + public byte* @long_name; + /// Codec properties, a combination of AV_CODEC_PROP_* flags. + public int @props; + /// MIME type(s) associated with the codec. May be NULL; if not, a NULL-terminated array of MIME types. The first item is always non-NULL and is the preferred MIME type. + public byte** @mime_types; + /// If non-NULL, an array of profiles recognized for this codec. Terminated with FF_PROFILE_UNKNOWN. + public AVProfile* @profiles; +} + +public unsafe partial struct AVCodecHWConfig +{ + /// For decoders, a hardware pixel format which that decoder may be able to decode to if suitable hardware is available. + public AVPixelFormat @pix_fmt; + /// Bit set of AV_CODEC_HW_CONFIG_METHOD_* flags, describing the possible setup methods which can be used with this configuration. + public int @methods; + /// The device type associated with the configuration. + public AVHWDeviceType @device_type; +} + +/// This struct describes the properties of an encoded stream. +public unsafe partial struct AVCodecParameters +{ + /// General type of the encoded data. + public AVMediaType @codec_type; + /// Specific type of the encoded data (the codec used). + public AVCodecID @codec_id; + /// Additional information about the codec (corresponds to the AVI FOURCC). + public uint @codec_tag; + /// Extra binary data needed for initializing the decoder, codec-dependent. + public byte* @extradata; + /// Size of the extradata content in bytes. + public int @extradata_size; + /// - video: the pixel format, the value corresponds to enum AVPixelFormat. - audio: the sample format, the value corresponds to enum AVSampleFormat. + public int @format; + /// The average bitrate of the encoded data (in bits per second). + public long @bit_rate; + /// The number of bits per sample in the codedwords. + public int @bits_per_coded_sample; + /// This is the number of valid bits in each output sample. If the sample format has more bits, the least significant bits are additional padding bits, which are always 0. Use right shifts to reduce the sample to its actual size. For example, audio formats with 24 bit samples will have bits_per_raw_sample set to 24, and format set to AV_SAMPLE_FMT_S32. To get the original sample use "(int32_t)sample >> 8"." + public int @bits_per_raw_sample; + /// Codec-specific bitstream restrictions that the stream conforms to. + public int @profile; + public int @level; + /// Video only. The dimensions of the video frame in pixels. + public int @width; + public int @height; + /// Video only. The aspect ratio (width / height) which a single pixel should have when displayed. + public AVRational @sample_aspect_ratio; + /// Video only. The order of the fields in interlaced video. + public AVFieldOrder @field_order; + /// Video only. Additional colorspace characteristics. + public AVColorRange @color_range; + public AVColorPrimaries @color_primaries; + public AVColorTransferCharacteristic @color_trc; + public AVColorSpace @color_space; + public AVChromaLocation @chroma_location; + /// Video only. Number of delayed frames. + public int @video_delay; + /// Audio only. The channel layout bitmask. May be 0 if the channel layout is unknown or unspecified, otherwise the number of bits set must be equal to the channels field. + [Obsolete("use ch_layout")] + public ulong @channel_layout; + /// Audio only. The number of audio channels. + [Obsolete("use ch_layout.nb_channels")] + public int @channels; + /// Audio only. The number of audio samples per second. + public int @sample_rate; + /// Audio only. The number of bytes per coded audio frame, required by some formats. + public int @block_align; + /// Audio only. Audio frame size, if known. Required by some formats to be static. + public int @frame_size; + /// Audio only. The amount of padding (in samples) inserted by the encoder at the beginning of the audio. I.e. this number of leading decoded samples must be discarded by the caller to get the original audio without leading padding. + public int @initial_padding; + /// Audio only. The amount of padding (in samples) appended by the encoder to the end of the audio. I.e. this number of decoded samples must be discarded by the caller from the end of the stream to get the original audio without any trailing padding. + public int @trailing_padding; + /// Audio only. Number of samples to skip after a discontinuity. + public int @seek_preroll; + /// Audio only. The channel layout and number of channels. + public AVChannelLayout @ch_layout; +} + +public unsafe partial struct AVCodecParser +{ + public int_array7 @codec_ids; + public int @priv_data_size; + public AVCodecParser_parser_init_func @parser_init; + public AVCodecParser_parser_parse_func @parser_parse; + public AVCodecParser_parser_close_func @parser_close; + public AVCodecParser_split_func @split; +} + +public unsafe partial struct AVCodecParserContext +{ + public void* @priv_data; + public AVCodecParser* @parser; + public long @frame_offset; + public long @cur_offset; + public long @next_frame_offset; + public int @pict_type; + /// This field is used for proper frame duration computation in lavf. It signals, how much longer the frame duration of the current frame is compared to normal frame duration. + public int @repeat_pict; + public long @pts; + public long @dts; + public long @last_pts; + public long @last_dts; + public int @fetch_timestamp; + public int @cur_frame_start_index; + public long_array4 @cur_frame_offset; + public long_array4 @cur_frame_pts; + public long_array4 @cur_frame_dts; + public int @flags; + /// byte offset from starting packet start + public long @offset; + public long_array4 @cur_frame_end; + /// Set by parser to 1 for key frames and 0 for non-key frames. It is initialized to -1, so if the parser doesn't set this flag, old-style fallback using AV_PICTURE_TYPE_I picture type as key frames will be used. + public int @key_frame; + /// Synchronization point for start of timestamp generation. + public int @dts_sync_point; + /// Offset of the current timestamp against last timestamp sync point in units of AVCodecContext.time_base. + public int @dts_ref_dts_delta; + /// Presentation delay of current frame in units of AVCodecContext.time_base. + public int @pts_dts_delta; + /// Position of the packet in file. + public long_array4 @cur_frame_pos; + /// Byte position of currently parsed frame in stream. + public long @pos; + /// Previous frame byte position. + public long @last_pos; + /// Duration of the current frame. For audio, this is in units of 1 / AVCodecContext.sample_rate. For all other types, this is in units of AVCodecContext.time_base. + public int @duration; + public AVFieldOrder @field_order; + /// Indicate whether a picture is coded as a frame, top field or bottom field. + public AVPictureStructure @picture_structure; + /// Picture number incremented in presentation or output order. This field may be reinitialized at the first picture of a new sequence. + public int @output_picture_number; + /// Dimensions of the decoded video intended for presentation. + public int @width; + public int @height; + /// Dimensions of the coded video. + public int @coded_width; + public int @coded_height; + /// The format of the coded data, corresponds to enum AVPixelFormat for video and for enum AVSampleFormat for audio. + public int @format; +} + +public unsafe partial struct AVComponentDescriptor +{ + /// Which of the 4 planes contains the component. + public int @plane; + /// Number of elements between 2 horizontally consecutive pixels. Elements are bits for bitstream formats, bytes otherwise. + public int @step; + /// Number of elements before the component of the first pixel. Elements are bits for bitstream formats, bytes otherwise. + public int @offset; + /// Number of least significant bits that must be shifted away to get the value. + public int @shift; + /// Number of bits in the component. + public int @depth; +} + +/// Content light level needed by to transmit HDR over HDMI (CTA-861.3). +public unsafe partial struct AVContentLightMetadata +{ + /// Max content light level (cd/m^2). + public uint @MaxCLL; + /// Max average light level per frame (cd/m^2). + public uint @MaxFALL; +} + +/// This structure describes the bitrate properties of an encoded bitstream. It roughly corresponds to a subset the VBV parameters for MPEG-2 or HRD parameters for H.264/HEVC. +public unsafe partial struct AVCPBProperties +{ + /// Maximum bitrate of the stream, in bits per second. Zero if unknown or unspecified. + public long @max_bitrate; + /// Minimum bitrate of the stream, in bits per second. Zero if unknown or unspecified. + public long @min_bitrate; + /// Average bitrate of the stream, in bits per second. Zero if unknown or unspecified. + public long @avg_bitrate; + /// The size of the buffer to which the ratecontrol is applied, in bits. Zero if unknown or unspecified. + public long @buffer_size; + /// The delay between the time the packet this structure is associated with is received and the time when it should be decoded, in periods of a 27MHz clock. + public ulong @vbv_delay; +} + +/// D3D11 frame descriptor for pool allocation. +public unsafe partial struct AVD3D11FrameDescriptor +{ + /// The texture in which the frame is located. The reference count is managed by the AVBufferRef, and destroying the reference will release the interface. + public ID3D11Texture2D* @texture; + /// The index into the array texture element representing the frame, or 0 if the texture is not an array texture. + public long @index; +} + +/// This structure is used to provides the necessary configurations and data to the Direct3D11 FFmpeg HWAccel implementation. +public unsafe partial struct AVD3D11VAContext +{ + /// D3D11 decoder object + public ID3D11VideoDecoder* @decoder; + /// D3D11 VideoContext + public ID3D11VideoContext* @video_context; + /// D3D11 configuration used to create the decoder + public D3D11_VIDEO_DECODER_CONFIG* @cfg; + /// The number of surface in the surface array + public uint @surface_count; + /// The array of Direct3D surfaces used to create the decoder + public ID3D11VideoDecoderOutputView** @surface; + /// A bit field configuring the workarounds needed for using the decoder + public ulong @workaround; + /// Private to the FFmpeg AVHWAccel implementation + public uint @report_id; + /// Mutex to access video_context + public void* @context_mutex; +} + +/// This struct is allocated as AVHWDeviceContext.hwctx +public unsafe partial struct AVD3D11VADeviceContext +{ + /// Device used for texture creation and access. This can also be used to set the libavcodec decoding device. + public ID3D11Device* @device; + /// If unset, this will be set from the device field on init. + public ID3D11DeviceContext* @device_context; + /// If unset, this will be set from the device field on init. + public ID3D11VideoDevice* @video_device; + /// If unset, this will be set from the device_context field on init. + public ID3D11VideoContext* @video_context; + /// Callbacks for locking. They protect accesses to device_context and video_context calls. They also protect access to the internal staging texture (for av_hwframe_transfer_data() calls). They do NOT protect access to hwcontext or decoder state in general. + public AVD3D11VADeviceContext_lock_func @lock; + public AVD3D11VADeviceContext_unlock_func @unlock; + public void* @lock_ctx; +} + +/// This struct is allocated as AVHWFramesContext.hwctx +public unsafe partial struct AVD3D11VAFramesContext +{ + /// The canonical texture used for pool allocation. If this is set to NULL on init, the hwframes implementation will allocate and set an array texture if initial_pool_size > 0. + public ID3D11Texture2D* @texture; + /// D3D11_TEXTURE2D_DESC.BindFlags used for texture creation. The user must at least set D3D11_BIND_DECODER if the frames context is to be used for video decoding. This field is ignored/invalid if a user-allocated texture is provided. + public uint @BindFlags; + /// D3D11_TEXTURE2D_DESC.MiscFlags used for texture creation. This field is ignored/invalid if a user-allocated texture is provided. + public uint @MiscFlags; + /// In case if texture structure member above is not NULL contains the same texture pointer for all elements and different indexes into the array texture. In case if texture structure member above is NULL, all elements contains pointers to separate non-array textures and 0 indexes. This field is ignored/invalid if a user-allocated texture is provided. + public AVD3D11FrameDescriptor* @texture_infos; +} + +/// Structure describes device capabilities. +public unsafe partial struct AVDeviceCapabilitiesQuery +{ + public AVClass* @av_class; + public AVFormatContext* @device_context; + public AVCodecID @codec; + public AVSampleFormat @sample_format; + public AVPixelFormat @pixel_format; + public int @sample_rate; + public int @channels; + public long @channel_layout; + public int @window_width; + public int @window_height; + public int @frame_width; + public int @frame_height; + public AVRational @fps; +} + +/// Structure describes basic parameters of the device. +public unsafe partial struct AVDeviceInfo +{ + /// device name, format depends on device + public byte* @device_name; + /// human friendly name + public byte* @device_description; + /// array indicating what media types(s), if any, a device can provide. If null, cannot provide any + public AVMediaType* @media_types; + /// length of media_types array, 0 if device cannot provide any media types + public int @nb_media_types; +} + +/// List of devices. +public unsafe partial struct AVDeviceInfoList +{ + /// list of autodetected devices + public AVDeviceInfo** @devices; + /// number of autodetected devices + public int @nb_devices; + /// index of default device or -1 if no default + public int @default_device; +} + +public unsafe partial struct AVDeviceRect +{ + /// x coordinate of top left corner + public int @x; + /// y coordinate of top left corner + public int @y; + /// width + public int @width; + /// height + public int @height; +} + +public unsafe partial struct AVDictionaryEntry +{ + public byte* @key; + public byte* @value; +} + +/// This struct is allocated as AVHWDeviceContext.hwctx +public unsafe partial struct AVDXVA2DeviceContext +{ + public IDirect3DDeviceManager9* @devmgr; +} + +/// This struct is allocated as AVHWFramesContext.hwctx +public unsafe partial struct AVDXVA2FramesContext +{ + /// The surface type (e.g. DXVA2_VideoProcessorRenderTarget or DXVA2_VideoDecoderRenderTarget). Must be set by the caller. + public ulong @surface_type; + /// The surface pool. When an external pool is not provided by the caller, this will be managed (allocated and filled on init, freed on uninit) by libavutil. + public IDirect3DSurface9** @surfaces; + public int @nb_surfaces; + /// Certain drivers require the decoder to be destroyed before the surfaces. To allow internally managed pools to work properly in such cases, this field is provided. + public IDirectXVideoDecoder* @decoder_to_release; +} + +/// This struct represents dynamic metadata for color volume transform - application 4 of SMPTE 2094-40:2016 standard. +public unsafe partial struct AVDynamicHDRPlus +{ + /// Country code by Rec. ITU-T T.35 Annex A. The value shall be 0xB5. + public byte @itu_t_t35_country_code; + /// Application version in the application defining document in ST-2094 suite. The value shall be set to 0. + public byte @application_version; + /// The number of processing windows. The value shall be in the range of 1 to 3, inclusive. + public byte @num_windows; + /// The color transform parameters for every processing window. + public AVHDRPlusColorTransformParams_array3 @params; + /// The nominal maximum display luminance of the targeted system display, in units of 0.0001 candelas per square metre. The value shall be in the range of 0 to 10000, inclusive. + public AVRational @targeted_system_display_maximum_luminance; + /// This flag shall be equal to 0 in bit streams conforming to this version of this Specification. The value 1 is reserved for future use. + public byte @targeted_system_display_actual_peak_luminance_flag; + /// The number of rows in the targeted system_display_actual_peak_luminance array. The value shall be in the range of 2 to 25, inclusive. + public byte @num_rows_targeted_system_display_actual_peak_luminance; + /// The number of columns in the targeted_system_display_actual_peak_luminance array. The value shall be in the range of 2 to 25, inclusive. + public byte @num_cols_targeted_system_display_actual_peak_luminance; + /// The normalized actual peak luminance of the targeted system display. The values should be in the range of 0 to 1, inclusive and in multiples of 1/15. + public AVRational_array25x25 @targeted_system_display_actual_peak_luminance; + /// This flag shall be equal to 0 in bitstreams conforming to this version of this Specification. The value 1 is reserved for future use. + public byte @mastering_display_actual_peak_luminance_flag; + /// The number of rows in the mastering_display_actual_peak_luminance array. The value shall be in the range of 2 to 25, inclusive. + public byte @num_rows_mastering_display_actual_peak_luminance; + /// The number of columns in the mastering_display_actual_peak_luminance array. The value shall be in the range of 2 to 25, inclusive. + public byte @num_cols_mastering_display_actual_peak_luminance; + /// The normalized actual peak luminance of the mastering display used for mastering the image essence. The values should be in the range of 0 to 1, inclusive and in multiples of 1/15. + public AVRational_array25x25 @mastering_display_actual_peak_luminance; +} + +/// Filter definition. This defines the pads a filter contains, and all the callback functions used to interact with the filter. +public unsafe partial struct AVFilter +{ + /// Filter name. Must be non-NULL and unique among filters. + public byte* @name; + /// A description of the filter. May be NULL. + public byte* @description; + /// List of static inputs. + public AVFilterPad* @inputs; + /// List of static outputs. + public AVFilterPad* @outputs; + /// A class for the private data, used to declare filter private AVOptions. This field is NULL for filters that do not declare any options. + public AVClass* @priv_class; + /// A combination of AVFILTER_FLAG_* + public int @flags; + /// The number of entries in the list of inputs. + public byte @nb_inputs; + /// The number of entries in the list of outputs. + public byte @nb_outputs; + /// This field determines the state of the formats union. It is an enum FilterFormatsState value. + public byte @formats_state; + /// Filter pre-initialization function + public AVFilter_preinit_func @preinit; + /// Filter initialization function. + public AVFilter_init_func @init; + /// Should be set instead of AVFilter.init "init" by the filters that want to pass a dictionary of AVOptions to nested contexts that are allocated during init. + public AVFilter_init_dict_func @init_dict; + /// Filter uninitialization function. + public AVFilter_uninit_func @uninit; + public AVFilter_formats @formats; + /// size of private data to allocate for the filter + public int @priv_size; + /// Additional flags for avfilter internal use only. + public int @flags_internal; + /// Make the filter instance process a command. + public AVFilter_process_command_func @process_command; + /// Filter activation function. + public AVFilter_activate_func @activate; +} + +/// The state of the following union is determined by formats_state. See the documentation of enum FilterFormatsState in internal.h. +[StructLayout(LayoutKind.Explicit)] +public unsafe partial struct AVFilter_formats +{ + /// Query formats supported by the filter on its inputs and outputs. + [FieldOffset(0)] + public _query_func_func @query_func; + /// A pointer to an array of admissible pixel formats delimited by AV_PIX_FMT_NONE. The generic code will use this list to indicate that this filter supports each of these pixel formats, provided that all inputs and outputs use the same pixel format. + [FieldOffset(0)] + public AVPixelFormat* @pixels_list; + /// Analogous to pixels, but delimited by AV_SAMPLE_FMT_NONE and restricted to filters that only have AVMEDIA_TYPE_AUDIO inputs and outputs. + [FieldOffset(0)] + public AVSampleFormat* @samples_list; + /// Equivalent to { pix_fmt, AV_PIX_FMT_NONE } as pixels_list. + [FieldOffset(0)] + public AVPixelFormat @pix_fmt; + /// Equivalent to { sample_fmt, AV_SAMPLE_FMT_NONE } as samples_list. + [FieldOffset(0)] + public AVSampleFormat @sample_fmt; +} + +/// An instance of a filter +public unsafe partial struct AVFilterContext +{ + /// needed for av_log() and filters common options + public AVClass* @av_class; + /// the AVFilter of which this is an instance + public AVFilter* @filter; + /// name of this filter instance + public byte* @name; + /// array of input pads + public AVFilterPad* @input_pads; + /// array of pointers to input links + public AVFilterLink** @inputs; + /// number of input pads + public uint @nb_inputs; + /// array of output pads + public AVFilterPad* @output_pads; + /// array of pointers to output links + public AVFilterLink** @outputs; + /// number of output pads + public uint @nb_outputs; + /// private data for use by the filter + public void* @priv; + /// filtergraph this filter belongs to + public AVFilterGraph* @graph; + /// Type of multithreading being allowed/used. A combination of AVFILTER_THREAD_* flags. + public int @thread_type; + /// An opaque struct for libavfilter internal use. + public AVFilterInternal* @internal; + public AVFilterCommand* @command_queue; + /// enable expression string + public byte* @enable_str; + /// parsed expression (AVExpr*) + public void* @enable; + /// variable values for the enable expression + public double* @var_values; + /// the enabled state from the last expression evaluation + public int @is_disabled; + /// For filters which will create hardware frames, sets the device the filter should create them in. All other filters will ignore this field: in particular, a filter which consumes or processes hardware frames will instead use the hw_frames_ctx field in AVFilterLink to carry the hardware context information. + public AVBufferRef* @hw_device_ctx; + /// Max number of threads allowed in this filter instance. If <= 0, its value is ignored. Overrides global number of threads set per filter graph. + public int @nb_threads; + /// Ready status of the filter. A non-0 value means that the filter needs activating; a higher value suggests a more urgent activation. + public uint @ready; + /// Sets the number of extra hardware frames which the filter will allocate on its output links for use in following filters or by the caller. + public int @extra_hw_frames; +} + +/// Lists of formats / etc. supported by an end of a link. +public unsafe partial struct AVFilterFormatsConfig +{ + /// List of supported formats (pixel or sample). + public AVFilterFormats* @formats; + /// Lists of supported sample rates, only for audio. + public AVFilterFormats* @samplerates; + /// Lists of supported channel layouts, only for audio. + public AVFilterChannelLayouts* @channel_layouts; +} + +public unsafe partial struct AVFilterGraph +{ + public AVClass* @av_class; + public AVFilterContext** @filters; + public uint @nb_filters; + /// sws options to use for the auto-inserted scale filters + public byte* @scale_sws_opts; + /// Type of multithreading allowed for filters in this graph. A combination of AVFILTER_THREAD_* flags. + public int @thread_type; + /// Maximum number of threads used by filters in this graph. May be set by the caller before adding any filters to the filtergraph. Zero (the default) means that the number of threads is determined automatically. + public int @nb_threads; + /// Opaque object for libavfilter internal use. + public AVFilterGraphInternal* @internal; + /// Opaque user data. May be set by the caller to an arbitrary value, e.g. to be used from callbacks like AVFilterGraph.execute. Libavfilter will not touch this field in any way. + public void* @opaque; + /// This callback may be set by the caller immediately after allocating the graph and before adding any filters to it, to provide a custom multithreading implementation. + public AVFilterGraph_execute_func @execute; + /// swr options to use for the auto-inserted aresample filters, Access ONLY through AVOptions + public byte* @aresample_swr_opts; + /// Private fields + public AVFilterLink** @sink_links; + public int @sink_links_count; + public uint @disable_auto_convert; +} + +/// A linked-list of the inputs/outputs of the filter chain. +public unsafe partial struct AVFilterInOut +{ + /// unique name for this input/output in the list + public byte* @name; + /// filter context associated to this input/output + public AVFilterContext* @filter_ctx; + /// index of the filt_ctx pad to use for linking + public int @pad_idx; + /// next input/input in the list, NULL if this is the last + public AVFilterInOut* @next; +} + +/// A link between two filters. This contains pointers to the source and destination filters between which this link exists, and the indexes of the pads involved. In addition, this link also contains the parameters which have been negotiated and agreed upon between the filter, such as image dimensions, format, etc. +public unsafe partial struct AVFilterLink +{ + /// source filter + public AVFilterContext* @src; + /// output pad on the source filter + public AVFilterPad* @srcpad; + /// dest filter + public AVFilterContext* @dst; + /// input pad on the dest filter + public AVFilterPad* @dstpad; + /// filter media type + public AVMediaType @type; + /// agreed upon image width + public int @w; + /// agreed upon image height + public int @h; + /// agreed upon sample aspect ratio + public AVRational @sample_aspect_ratio; + /// channel layout of current buffer (see libavutil/channel_layout.h) + [Obsolete("use ch_layout")] + public ulong @channel_layout; + /// samples per second + public int @sample_rate; + /// agreed upon media format + public int @format; + /// Define the time base used by the PTS of the frames/samples which will pass through this link. During the configuration stage, each filter is supposed to change only the output timebase, while the timebase of the input link is assumed to be an unchangeable property. + public AVRational @time_base; + /// channel layout of current buffer (see libavutil/channel_layout.h) + public AVChannelLayout @ch_layout; + /// Lists of supported formats / etc. supported by the input filter. + public AVFilterFormatsConfig @incfg; + /// Lists of supported formats / etc. supported by the output filter. + public AVFilterFormatsConfig @outcfg; + public AVFilterLink_init_state @init_state; + /// Graph the filter belongs to. + public AVFilterGraph* @graph; + /// Current timestamp of the link, as defined by the most recent frame(s), in link time_base units. + public long @current_pts; + /// Current timestamp of the link, as defined by the most recent frame(s), in AV_TIME_BASE units. + public long @current_pts_us; + /// Index in the age array. + public int @age_index; + /// Frame rate of the stream on the link, or 1/0 if unknown or variable; if left to 0/0, will be automatically copied from the first input of the source filter if it exists. + public AVRational @frame_rate; + /// Minimum number of samples to filter at once. If filter_frame() is called with fewer samples, it will accumulate them in fifo. This field and the related ones must not be changed after filtering has started. If 0, all related fields are ignored. + public int @min_samples; + /// Maximum number of samples to filter at once. If filter_frame() is called with more samples, it will split them. + public int @max_samples; + /// Number of past frames sent through the link. + public long @frame_count_in; + /// Number of past frames sent through the link. + public long @frame_count_out; + /// Number of past samples sent through the link. + public long @sample_count_in; + /// Number of past samples sent through the link. + public long @sample_count_out; + /// A pointer to a FFFramePool struct. + public void* @frame_pool; + /// True if a frame is currently wanted on the output of this filter. Set when ff_request_frame() is called by the output, cleared when a frame is filtered. + public int @frame_wanted_out; + /// For hwaccel pixel formats, this should be a reference to the AVHWFramesContext describing the frames. + public AVBufferRef* @hw_frames_ctx; + /// Internal structure members. The fields below this limit are internal for libavfilter's use and must in no way be accessed by applications. + public byte_array61440 @reserved; +} + +/// Format I/O context. New fields can be added to the end with minor version bumps. Removal, reordering and changes to existing fields require a major version bump. sizeof(AVFormatContext) must not be used outside libav*, use avformat_alloc_context() to create an AVFormatContext. +public unsafe partial struct AVFormatContext +{ + /// A class for logging and avoptions. Set by avformat_alloc_context(). Exports (de)muxer private options if they exist. + public AVClass* @av_class; + /// The input container format. + public AVInputFormat* @iformat; + /// The output container format. + public AVOutputFormat* @oformat; + /// Format private data. This is an AVOptions-enabled struct if and only if iformat/oformat.priv_class is not NULL. + public void* @priv_data; + /// I/O context. + public AVIOContext* @pb; + /// Flags signalling stream properties. A combination of AVFMTCTX_*. Set by libavformat. + public int @ctx_flags; + /// Number of elements in AVFormatContext.streams. + public uint @nb_streams; + /// A list of all streams in the file. New streams are created with avformat_new_stream(). + public AVStream** @streams; + /// input or output URL. Unlike the old filename field, this field has no length restriction. + public byte* @url; + /// Position of the first frame of the component, in AV_TIME_BASE fractional seconds. NEVER set this value directly: It is deduced from the AVStream values. + public long @start_time; + /// Duration of the stream, in AV_TIME_BASE fractional seconds. Only set this value if you know none of the individual stream durations and also do not set any of them. This is deduced from the AVStream values if not set. + public long @duration; + /// Total stream bitrate in bit/s, 0 if not available. Never set it directly if the file_size and the duration are known as FFmpeg can compute it automatically. + public long @bit_rate; + public uint @packet_size; + public int @max_delay; + /// Flags modifying the (de)muxer behaviour. A combination of AVFMT_FLAG_*. Set by the user before avformat_open_input() / avformat_write_header(). + public int @flags; + /// Maximum number of bytes read from input in order to determine stream properties. Used when reading the global header and in avformat_find_stream_info(). + public long @probesize; + /// Maximum duration (in AV_TIME_BASE units) of the data read from input in avformat_find_stream_info(). Demuxing only, set by the caller before avformat_find_stream_info(). Can be set to 0 to let avformat choose using a heuristic. + public long @max_analyze_duration; + public byte* @key; + public int @keylen; + public uint @nb_programs; + public AVProgram** @programs; + /// Forced video codec_id. Demuxing: Set by user. + public AVCodecID @video_codec_id; + /// Forced audio codec_id. Demuxing: Set by user. + public AVCodecID @audio_codec_id; + /// Forced subtitle codec_id. Demuxing: Set by user. + public AVCodecID @subtitle_codec_id; + /// Maximum amount of memory in bytes to use for the index of each stream. If the index exceeds this size, entries will be discarded as needed to maintain a smaller size. This can lead to slower or less accurate seeking (depends on demuxer). Demuxers for which a full in-memory index is mandatory will ignore this. - muxing: unused - demuxing: set by user + public uint @max_index_size; + /// Maximum amount of memory in bytes to use for buffering frames obtained from realtime capture devices. + public uint @max_picture_buffer; + /// Number of chapters in AVChapter array. When muxing, chapters are normally written in the file header, so nb_chapters should normally be initialized before write_header is called. Some muxers (e.g. mov and mkv) can also write chapters in the trailer. To write chapters in the trailer, nb_chapters must be zero when write_header is called and non-zero when write_trailer is called. - muxing: set by user - demuxing: set by libavformat + public uint @nb_chapters; + public AVChapter** @chapters; + /// Metadata that applies to the whole file. + public AVDictionary* @metadata; + /// Start time of the stream in real world time, in microseconds since the Unix epoch (00:00 1st January 1970). That is, pts=0 in the stream was captured at this real world time. - muxing: Set by the caller before avformat_write_header(). If set to either 0 or AV_NOPTS_VALUE, then the current wall-time will be used. - demuxing: Set by libavformat. AV_NOPTS_VALUE if unknown. Note that the value may become known after some number of frames have been received. + public long @start_time_realtime; + /// The number of frames used for determining the framerate in avformat_find_stream_info(). Demuxing only, set by the caller before avformat_find_stream_info(). + public int @fps_probe_size; + /// Error recognition; higher values will detect more errors but may misdetect some more or less valid parts as errors. Demuxing only, set by the caller before avformat_open_input(). + public int @error_recognition; + /// Custom interrupt callbacks for the I/O layer. + public AVIOInterruptCB @interrupt_callback; + /// Flags to enable debugging. + public int @debug; + /// Maximum buffering duration for interleaving. + public long @max_interleave_delta; + /// Allow non-standard and experimental extension + public int @strict_std_compliance; + /// Flags indicating events happening on the file, a combination of AVFMT_EVENT_FLAG_*. + public int @event_flags; + /// Maximum number of packets to read while waiting for the first timestamp. Decoding only. + public int @max_ts_probe; + /// Avoid negative timestamps during muxing. Any value of the AVFMT_AVOID_NEG_TS_* constants. Note, this works better when using av_interleaved_write_frame(). - muxing: Set by user - demuxing: unused + public int @avoid_negative_ts; + /// Transport stream id. This will be moved into demuxer private options. Thus no API/ABI compatibility + public int @ts_id; + /// Audio preload in microseconds. Note, not all formats support this and unpredictable things may happen if it is used when not supported. - encoding: Set by user - decoding: unused + public int @audio_preload; + /// Max chunk time in microseconds. Note, not all formats support this and unpredictable things may happen if it is used when not supported. - encoding: Set by user - decoding: unused + public int @max_chunk_duration; + /// Max chunk size in bytes Note, not all formats support this and unpredictable things may happen if it is used when not supported. - encoding: Set by user - decoding: unused + public int @max_chunk_size; + /// forces the use of wallclock timestamps as pts/dts of packets This has undefined results in the presence of B frames. - encoding: unused - decoding: Set by user + public int @use_wallclock_as_timestamps; + /// avio flags, used to force AVIO_FLAG_DIRECT. - encoding: unused - decoding: Set by user + public int @avio_flags; + /// The duration field can be estimated through various ways, and this field can be used to know how the duration was estimated. - encoding: unused - decoding: Read by user + public AVDurationEstimationMethod @duration_estimation_method; + /// Skip initial bytes when opening stream - encoding: unused - decoding: Set by user + public long @skip_initial_bytes; + /// Correct single timestamp overflows - encoding: unused - decoding: Set by user + public uint @correct_ts_overflow; + /// Force seeking to any (also non key) frames. - encoding: unused - decoding: Set by user + public int @seek2any; + /// Flush the I/O context after each packet. - encoding: Set by user - decoding: unused + public int @flush_packets; + /// format probing score. The maximal score is AVPROBE_SCORE_MAX, its set when the demuxer probes the format. - encoding: unused - decoding: set by avformat, read by user + public int @probe_score; + /// Maximum number of bytes read from input in order to identify the AVInputFormat "input format". Only used when the format is not set explicitly by the caller. + public int @format_probesize; + /// ',' separated list of allowed decoders. If NULL then all are allowed - encoding: unused - decoding: set by user + public byte* @codec_whitelist; + /// ',' separated list of allowed demuxers. If NULL then all are allowed - encoding: unused - decoding: set by user + public byte* @format_whitelist; + /// IO repositioned flag. This is set by avformat when the underlaying IO context read pointer is repositioned, for example when doing byte based seeking. Demuxers can use the flag to detect such changes. + public int @io_repositioned; + /// Forced video codec. This allows forcing a specific decoder, even when there are multiple with the same codec_id. Demuxing: Set by user + public AVCodec* @video_codec; + /// Forced audio codec. This allows forcing a specific decoder, even when there are multiple with the same codec_id. Demuxing: Set by user + public AVCodec* @audio_codec; + /// Forced subtitle codec. This allows forcing a specific decoder, even when there are multiple with the same codec_id. Demuxing: Set by user + public AVCodec* @subtitle_codec; + /// Forced data codec. This allows forcing a specific decoder, even when there are multiple with the same codec_id. Demuxing: Set by user + public AVCodec* @data_codec; + /// Number of bytes to be written as padding in a metadata header. Demuxing: Unused. Muxing: Set by user via av_format_set_metadata_header_padding. + public int @metadata_header_padding; + /// User data. This is a place for some private data of the user. + public void* @opaque; + /// Callback used by devices to communicate with application. + public AVFormatContext_control_message_cb_func @control_message_cb; + /// Output timestamp offset, in microseconds. Muxing: set by user + public long @output_ts_offset; + /// dump format separator. can be ", " or " " or anything else - muxing: Set by user. - demuxing: Set by user. + public byte* @dump_separator; + /// Forced Data codec_id. Demuxing: Set by user. + public AVCodecID @data_codec_id; + /// ',' separated list of allowed protocols. - encoding: unused - decoding: set by user + public byte* @protocol_whitelist; + /// A callback for opening new IO streams. + public AVFormatContext_io_open_func @io_open; + /// A callback for closing the streams opened with AVFormatContext.io_open(). + public AVFormatContext_io_close_func @io_close; + /// ',' separated list of disallowed protocols. - encoding: unused - decoding: set by user + public byte* @protocol_blacklist; + /// The maximum number of streams. - encoding: unused - decoding: set by user + public int @max_streams; + /// Skip duration calcuation in estimate_timings_from_pts. - encoding: unused - decoding: set by user + public int @skip_estimate_duration_from_pts; + /// Maximum number of packets that can be probed - encoding: unused - decoding: set by user + public int @max_probe_packets; + /// A callback for closing the streams opened with AVFormatContext.io_open(). + public AVFormatContext_io_close2_func @io_close2; +} + +/// This structure describes decoded (raw) audio or video data. +public unsafe partial struct AVFrame +{ + /// pointer to the picture/channel planes. This might be different from the first allocated byte. For video, it could even point to the end of the image data. + public byte_ptrArray8 @data; + /// For video, a positive or negative value, which is typically indicating the size in bytes of each picture line, but it can also be: - the negative byte size of lines for vertical flipping (with data[n] pointing to the end of the data - a positive or negative multiple of the byte size as for accessing even and odd fields of a frame (possibly flipped) + public int_array8 @linesize; + /// pointers to the data planes/channels. + public byte** @extended_data; + /// Video frames only. The coded dimensions (in pixels) of the video frame, i.e. the size of the rectangle that contains some well-defined values. + public int @width; + /// Video frames only. The coded dimensions (in pixels) of the video frame, i.e. the size of the rectangle that contains some well-defined values. + public int @height; + /// number of audio samples (per channel) described by this frame + public int @nb_samples; + /// format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames, enum AVSampleFormat for audio) + public int @format; + /// 1 -> keyframe, 0-> not + public int @key_frame; + /// Picture type of the frame. + public AVPictureType @pict_type; + /// Sample aspect ratio for the video frame, 0/1 if unknown/unspecified. + public AVRational @sample_aspect_ratio; + /// Presentation timestamp in time_base units (time when frame should be shown to user). + public long @pts; + /// DTS copied from the AVPacket that triggered returning this frame. (if frame threading isn't used) This is also the Presentation time of this AVFrame calculated from only AVPacket.dts values without pts values. + public long @pkt_dts; + /// Time base for the timestamps in this frame. In the future, this field may be set on frames output by decoders or filters, but its value will be by default ignored on input to encoders or filters. + public AVRational @time_base; + /// picture number in bitstream order + public int @coded_picture_number; + /// picture number in display order + public int @display_picture_number; + /// quality (between 1 (good) and FF_LAMBDA_MAX (bad)) + public int @quality; + /// for some private data of the user + public void* @opaque; + /// When decoding, this signals how much the picture must be delayed. extra_delay = repeat_pict / (2*fps) + public int @repeat_pict; + /// The content of the picture is interlaced. + public int @interlaced_frame; + /// If the content is interlaced, is top field displayed first. + public int @top_field_first; + /// Tell user application that palette has changed from previous frame. + public int @palette_has_changed; + /// reordered opaque 64 bits (generally an integer or a double precision float PTS but can be anything). The user sets AVCodecContext.reordered_opaque to represent the input at that time, the decoder reorders values as needed and sets AVFrame.reordered_opaque to exactly one of the values provided by the user through AVCodecContext.reordered_opaque + public long @reordered_opaque; + /// Sample rate of the audio data. + public int @sample_rate; + /// Channel layout of the audio data. + [Obsolete("use ch_layout instead")] + public ulong @channel_layout; + /// AVBuffer references backing the data for this frame. All the pointers in data and extended_data must point inside one of the buffers in buf or extended_buf. This array must be filled contiguously -- if buf[i] is non-NULL then buf[j] must also be non-NULL for all j < i. + public AVBufferRef_ptrArray8 @buf; + /// For planar audio which requires more than AV_NUM_DATA_POINTERS AVBufferRef pointers, this array will hold all the references which cannot fit into AVFrame.buf. + public AVBufferRef** @extended_buf; + /// Number of elements in extended_buf. + public int @nb_extended_buf; + public AVFrameSideData** @side_data; + public int @nb_side_data; + /// Frame flags, a combination of lavu_frame_flags + public int @flags; + /// MPEG vs JPEG YUV range. - encoding: Set by user - decoding: Set by libavcodec + public AVColorRange @color_range; + public AVColorPrimaries @color_primaries; + public AVColorTransferCharacteristic @color_trc; + /// YUV colorspace type. - encoding: Set by user - decoding: Set by libavcodec + public AVColorSpace @colorspace; + public AVChromaLocation @chroma_location; + /// frame timestamp estimated using various heuristics, in stream time base - encoding: unused - decoding: set by libavcodec, read by user. + public long @best_effort_timestamp; + /// reordered pos from the last AVPacket that has been input into the decoder - encoding: unused - decoding: Read by user. + public long @pkt_pos; + /// duration of the corresponding packet, expressed in AVStream->time_base units, 0 if unknown. - encoding: unused - decoding: Read by user. + public long @pkt_duration; + /// metadata. - encoding: Set by user. - decoding: Set by libavcodec. + public AVDictionary* @metadata; + /// decode error flags of the frame, set to a combination of FF_DECODE_ERROR_xxx flags if the decoder produced a frame, but there were errors during the decoding. - encoding: unused - decoding: set by libavcodec, read by user. + public int @decode_error_flags; + /// number of audio channels, only used for audio. - encoding: unused - decoding: Read by user. + [Obsolete("use ch_layout instead")] + public int @channels; + /// size of the corresponding packet containing the compressed frame. It is set to a negative value if unknown. - encoding: unused - decoding: set by libavcodec, read by user. + public int @pkt_size; + /// For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame. + public AVBufferRef* @hw_frames_ctx; + /// AVBufferRef for free use by the API user. FFmpeg will never check the contents of the buffer ref. FFmpeg calls av_buffer_unref() on it when the frame is unreferenced. av_frame_copy_props() calls create a new reference with av_buffer_ref() for the target frame's opaque_ref field. + public AVBufferRef* @opaque_ref; + /// cropping Video frames only. The number of pixels to discard from the the top/bottom/left/right border of the frame to obtain the sub-rectangle of the frame intended for presentation. @{ + public ulong @crop_top; + public ulong @crop_bottom; + public ulong @crop_left; + public ulong @crop_right; + /// AVBufferRef for internal use by a single libav* library. Must not be used to transfer data between libraries. Has to be NULL when ownership of the frame leaves the respective library. + public AVBufferRef* @private_ref; + /// Channel layout of the audio data. + public AVChannelLayout @ch_layout; +} + +/// Structure to hold side data for an AVFrame. +public unsafe partial struct AVFrameSideData +{ + public AVFrameSideDataType @type; + public byte* @data; + public ulong @size; + public AVDictionary* @metadata; + public AVBufferRef* @buf; +} + +/// Color transform parameters at a processing window in a dynamic metadata for SMPTE 2094-40. +public unsafe partial struct AVHDRPlusColorTransformParams +{ + /// The relative x coordinate of the top left pixel of the processing window. The value shall be in the range of 0 and 1, inclusive and in multiples of 1/(width of Picture - 1). The value 1 corresponds to the absolute coordinate of width of Picture - 1. The value for first processing window shall be 0. + public AVRational @window_upper_left_corner_x; + /// The relative y coordinate of the top left pixel of the processing window. The value shall be in the range of 0 and 1, inclusive and in multiples of 1/(height of Picture - 1). The value 1 corresponds to the absolute coordinate of height of Picture - 1. The value for first processing window shall be 0. + public AVRational @window_upper_left_corner_y; + /// The relative x coordinate of the bottom right pixel of the processing window. The value shall be in the range of 0 and 1, inclusive and in multiples of 1/(width of Picture - 1). The value 1 corresponds to the absolute coordinate of width of Picture - 1. The value for first processing window shall be 1. + public AVRational @window_lower_right_corner_x; + /// The relative y coordinate of the bottom right pixel of the processing window. The value shall be in the range of 0 and 1, inclusive and in multiples of 1/(height of Picture - 1). The value 1 corresponds to the absolute coordinate of height of Picture - 1. The value for first processing window shall be 1. + public AVRational @window_lower_right_corner_y; + /// The x coordinate of the center position of the concentric internal and external ellipses of the elliptical pixel selector in the processing window. The value shall be in the range of 0 to (width of Picture - 1), inclusive and in multiples of 1 pixel. + public ushort @center_of_ellipse_x; + /// The y coordinate of the center position of the concentric internal and external ellipses of the elliptical pixel selector in the processing window. The value shall be in the range of 0 to (height of Picture - 1), inclusive and in multiples of 1 pixel. + public ushort @center_of_ellipse_y; + /// The clockwise rotation angle in degree of arc with respect to the positive direction of the x-axis of the concentric internal and external ellipses of the elliptical pixel selector in the processing window. The value shall be in the range of 0 to 180, inclusive and in multiples of 1. + public byte @rotation_angle; + /// The semi-major axis value of the internal ellipse of the elliptical pixel selector in amount of pixels in the processing window. The value shall be in the range of 1 to 65535, inclusive and in multiples of 1 pixel. + public ushort @semimajor_axis_internal_ellipse; + /// The semi-major axis value of the external ellipse of the elliptical pixel selector in amount of pixels in the processing window. The value shall not be less than semimajor_axis_internal_ellipse of the current processing window. The value shall be in the range of 1 to 65535, inclusive and in multiples of 1 pixel. + public ushort @semimajor_axis_external_ellipse; + /// The semi-minor axis value of the external ellipse of the elliptical pixel selector in amount of pixels in the processing window. The value shall be in the range of 1 to 65535, inclusive and in multiples of 1 pixel. + public ushort @semiminor_axis_external_ellipse; + /// Overlap process option indicates one of the two methods of combining rendered pixels in the processing window in an image with at least one elliptical pixel selector. For overlapping elliptical pixel selectors in an image, overlap_process_option shall have the same value. + public AVHDRPlusOverlapProcessOption @overlap_process_option; + /// The maximum of the color components of linearized RGB values in the processing window in the scene. The values should be in the range of 0 to 1, inclusive and in multiples of 0.00001. maxscl[ 0 ], maxscl[ 1 ], and maxscl[ 2 ] are corresponding to R, G, B color components respectively. + public AVRational_array3 @maxscl; + /// The average of linearized maxRGB values in the processing window in the scene. The value should be in the range of 0 to 1, inclusive and in multiples of 0.00001. + public AVRational @average_maxrgb; + /// The number of linearized maxRGB values at given percentiles in the processing window in the scene. The maximum value shall be 15. + public byte @num_distribution_maxrgb_percentiles; + /// The linearized maxRGB values at given percentiles in the processing window in the scene. + public AVHDRPlusPercentile_array15 @distribution_maxrgb; + /// The fraction of selected pixels in the image that contains the brightest pixel in the scene. The value shall be in the range of 0 to 1, inclusive and in multiples of 0.001. + public AVRational @fraction_bright_pixels; + /// This flag indicates that the metadata for the tone mapping function in the processing window is present (for value of 1). + public byte @tone_mapping_flag; + /// The x coordinate of the separation point between the linear part and the curved part of the tone mapping function. The value shall be in the range of 0 to 1, excluding 0 and in multiples of 1/4095. + public AVRational @knee_point_x; + /// The y coordinate of the separation point between the linear part and the curved part of the tone mapping function. The value shall be in the range of 0 to 1, excluding 0 and in multiples of 1/4095. + public AVRational @knee_point_y; + /// The number of the intermediate anchor parameters of the tone mapping function in the processing window. The maximum value shall be 15. + public byte @num_bezier_curve_anchors; + /// The intermediate anchor parameters of the tone mapping function in the processing window in the scene. The values should be in the range of 0 to 1, inclusive and in multiples of 1/1023. + public AVRational_array15 @bezier_curve_anchors; + /// This flag shall be equal to 0 in bitstreams conforming to this version of this Specification. Other values are reserved for future use. + public byte @color_saturation_mapping_flag; + /// The color saturation gain in the processing window in the scene. The value shall be in the range of 0 to 63/8, inclusive and in multiples of 1/8. The default value shall be 1. + public AVRational @color_saturation_weight; +} + +/// Represents the percentile at a specific percentage in a distribution. +public unsafe partial struct AVHDRPlusPercentile +{ + /// The percentage value corresponding to a specific percentile linearized RGB value in the processing window in the scene. The value shall be in the range of 0 to100, inclusive. + public byte @percentage; + /// The linearized maxRGB value at a specific percentile in the processing window in the scene. The value shall be in the range of 0 to 1, inclusive and in multiples of 0.00001. + public AVRational @percentile; +} + +public unsafe partial struct AVHWAccel +{ + /// Name of the hardware accelerated codec. The name is globally unique among encoders and among decoders (but an encoder and a decoder can share the same name). + public byte* @name; + /// Type of codec implemented by the hardware accelerator. + public AVMediaType @type; + /// Codec implemented by the hardware accelerator. + public AVCodecID @id; + /// Supported pixel format. + public AVPixelFormat @pix_fmt; + /// Hardware accelerated codec capabilities. see AV_HWACCEL_CODEC_CAP_* + public int @capabilities; + /// Allocate a custom buffer + public AVHWAccel_alloc_frame_func @alloc_frame; + /// Called at the beginning of each frame or field picture. + public AVHWAccel_start_frame_func @start_frame; + /// Callback for parameter data (SPS/PPS/VPS etc). + public AVHWAccel_decode_params_func @decode_params; + /// Callback for each slice. + public AVHWAccel_decode_slice_func @decode_slice; + /// Called at the end of each frame or field picture. + public AVHWAccel_end_frame_func @end_frame; + /// Size of per-frame hardware accelerator private data. + public int @frame_priv_data_size; + /// Initialize the hwaccel private data. + public AVHWAccel_init_func @init; + /// Uninitialize the hwaccel private data. + public AVHWAccel_uninit_func @uninit; + /// Size of the private data to allocate in AVCodecInternal.hwaccel_priv_data. + public int @priv_data_size; + /// Internal hwaccel capabilities. + public int @caps_internal; + /// Fill the given hw_frames context with current codec parameters. Called from get_format. Refer to avcodec_get_hw_frames_parameters() for details. + public AVHWAccel_frame_params_func @frame_params; +} + +/// This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e. state that is not tied to a concrete processing configuration. E.g., in an API that supports hardware-accelerated encoding and decoding, this struct will (if possible) wrap the state that is common to both encoding and decoding and from which specific instances of encoders or decoders can be derived. +public unsafe partial struct AVHWDeviceContext +{ + /// A class for logging. Set by av_hwdevice_ctx_alloc(). + public AVClass* @av_class; + /// Private data used internally by libavutil. Must not be accessed in any way by the caller. + public AVHWDeviceInternal* @internal; + /// This field identifies the underlying API used for hardware access. + public AVHWDeviceType @type; + /// The format-specific data, allocated and freed by libavutil along with this context. + public void* @hwctx; + /// This field may be set by the caller before calling av_hwdevice_ctx_init(). + public AVHWDeviceContext_free_func @free; + /// Arbitrary user data, to be used e.g. by the free() callback. + public void* @user_opaque; +} + +/// This struct describes the constraints on hardware frames attached to a given device with a hardware-specific configuration. This is returned by av_hwdevice_get_hwframe_constraints() and must be freed by av_hwframe_constraints_free() after use. +public unsafe partial struct AVHWFramesConstraints +{ + /// A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE. This member will always be filled. + public AVPixelFormat* @valid_hw_formats; + /// A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE. Can be NULL if this information is not known. + public AVPixelFormat* @valid_sw_formats; + /// The minimum size of frames in this hw_frames_ctx. (Zero if not known.) + public int @min_width; + public int @min_height; + /// The maximum size of frames in this hw_frames_ctx. (INT_MAX if not known / no limit.) + public int @max_width; + public int @max_height; +} + +/// This struct describes a set or pool of "hardware" frames (i.e. those with data not located in normal system memory). All the frames in the pool are assumed to be allocated in the same way and interchangeable. +public unsafe partial struct AVHWFramesContext +{ + /// A class for logging. + public AVClass* @av_class; + /// Private data used internally by libavutil. Must not be accessed in any way by the caller. + public AVHWFramesInternal* @internal; + /// A reference to the parent AVHWDeviceContext. This reference is owned and managed by the enclosing AVHWFramesContext, but the caller may derive additional references from it. + public AVBufferRef* @device_ref; + /// The parent AVHWDeviceContext. This is simply a pointer to device_ref->data provided for convenience. + public AVHWDeviceContext* @device_ctx; + /// The format-specific data, allocated and freed automatically along with this context. + public void* @hwctx; + /// This field may be set by the caller before calling av_hwframe_ctx_init(). + public AVHWFramesContext_free_func @free; + /// Arbitrary user data, to be used e.g. by the free() callback. + public void* @user_opaque; + /// A pool from which the frames are allocated by av_hwframe_get_buffer(). This field may be set by the caller before calling av_hwframe_ctx_init(). The buffers returned by calling av_buffer_pool_get() on this pool must have the properties described in the documentation in the corresponding hw type's header (hwcontext_*.h). The pool will be freed strictly before this struct's free() callback is invoked. + public AVBufferPool* @pool; + /// Initial size of the frame pool. If a device type does not support dynamically resizing the pool, then this is also the maximum pool size. + public int @initial_pool_size; + /// The pixel format identifying the underlying HW surface type. + public AVPixelFormat @format; + /// The pixel format identifying the actual data layout of the hardware frames. + public AVPixelFormat @sw_format; + /// The allocated dimensions of the frames in this pool. + public int @width; + /// The allocated dimensions of the frames in this pool. + public int @height; +} + +public unsafe partial struct AVIndexEntry +{ + public long @pos; + /// Timestamp in AVStream.time_base units, preferably the time from which on correctly decoded frames are available when seeking to this entry. That means preferable PTS on keyframe based formats. But demuxers can choose to store a different timestamp, if it is more convenient for the implementation or nothing better is known + public long @timestamp; + /// Flag is used to indicate which frame should be discarded after decoding. + public int @flags2_size30; + /// Minimum distance between this and the previous keyframe, used to avoid unneeded searching. + public int @min_distance; +} + +/// @{ +public unsafe partial struct AVInputFormat +{ + /// A comma separated list of short names for the format. New names may be appended with a minor bump. + public byte* @name; + /// Descriptive name for the format, meant to be more human-readable than name. You should use the NULL_IF_CONFIG_SMALL() macro to define it. + public byte* @long_name; + /// Can use flags: AVFMT_NOFILE, AVFMT_NEEDNUMBER, AVFMT_SHOW_IDS, AVFMT_NOTIMESTAMPS, AVFMT_GENERIC_INDEX, AVFMT_TS_DISCONT, AVFMT_NOBINSEARCH, AVFMT_NOGENSEARCH, AVFMT_NO_BYTE_SEEK, AVFMT_SEEK_TO_PTS. + public int @flags; + /// If extensions are defined, then no probe is done. You should usually not use extension format guessing because it is not reliable enough + public byte* @extensions; + public AVCodecTag** @codec_tag; + /// AVClass for the private context + public AVClass* @priv_class; + /// Comma-separated list of mime types. It is used check for matching mime types while probing. + public byte* @mime_type; + /// *************************************************************** No fields below this line are part of the public API. They may not be used outside of libavformat and can be changed and removed at will. New public fields should be added right above. **************************************************************** + public int @raw_codec_id; + /// Size of private data so that it can be allocated in the wrapper. + public int @priv_data_size; + /// Internal flags. See FF_FMT_FLAG_* in internal.h. + public int @flags_internal; + /// Tell if a given file has a chance of being parsed as this format. The buffer provided is guaranteed to be AVPROBE_PADDING_SIZE bytes big so you do not have to check for that unless you need more. + public AVInputFormat_read_probe_func @read_probe; + /// Read the format header and initialize the AVFormatContext structure. Return 0 if OK. 'avformat_new_stream' should be called to create new streams. + public AVInputFormat_read_header_func @read_header; + /// Read one packet and put it in 'pkt'. pts and flags are also set. 'avformat_new_stream' can be called only if the flag AVFMTCTX_NOHEADER is used and only in the calling thread (not in a background thread). + public AVInputFormat_read_packet_func @read_packet; + /// Close the stream. The AVFormatContext and AVStreams are not freed by this function + public AVInputFormat_read_close_func @read_close; + /// Seek to a given timestamp relative to the frames in stream component stream_index. + public AVInputFormat_read_seek_func @read_seek; + /// Get the next timestamp in stream[stream_index].time_base units. + public AVInputFormat_read_timestamp_func @read_timestamp; + /// Start/resume playing - only meaningful if using a network-based format (RTSP). + public AVInputFormat_read_play_func @read_play; + /// Pause playing - only meaningful if using a network-based format (RTSP). + public AVInputFormat_read_pause_func @read_pause; + /// Seek to timestamp ts. Seeking will be done so that the point from which all active streams can be presented successfully will be closest to ts and within min/max_ts. Active streams are all streams that have AVStream.discard < AVDISCARD_ALL. + public AVInputFormat_read_seek2_func @read_seek2; + /// Returns device list with it properties. + public AVInputFormat_get_device_list_func @get_device_list; +} + +/// Bytestream IO Context. New public fields can be added with minor version bumps. Removal, reordering and changes to existing public fields require a major version bump. sizeof(AVIOContext) must not be used outside libav*. +public unsafe partial struct AVIOContext +{ + /// A class for private options. + public AVClass* @av_class; + /// Start of the buffer. + public byte* @buffer; + /// Maximum buffer size + public int @buffer_size; + /// Current position in the buffer + public byte* @buf_ptr; + /// End of the data, may be less than buffer+buffer_size if the read function returned less data than requested, e.g. for streams where no more data has been received yet. + public byte* @buf_end; + /// A private pointer, passed to the read/write/seek/... functions. + public void* @opaque; + public AVIOContext_read_packet_func @read_packet; + public AVIOContext_write_packet_func @write_packet; + public AVIOContext_seek_func @seek; + /// position in the file of the current buffer + public long @pos; + /// true if was unable to read due to error or eof + public int @eof_reached; + /// contains the error code or 0 if no error happened + public int @error; + /// true if open for writing + public int @write_flag; + public int @max_packet_size; + /// Try to buffer at least this amount of data before flushing it. + public int @min_packet_size; + public ulong @checksum; + public byte* @checksum_ptr; + public AVIOContext_update_checksum_func @update_checksum; + /// Pause or resume playback for network streaming protocols - e.g. MMS. + public AVIOContext_read_pause_func @read_pause; + /// Seek to a given timestamp in stream with the specified stream_index. Needed for some network streaming protocols which don't support seeking to byte position. + public AVIOContext_read_seek_func @read_seek; + /// A combination of AVIO_SEEKABLE_ flags or 0 when the stream is not seekable. + public int @seekable; + /// avio_read and avio_write should if possible be satisfied directly instead of going through a buffer, and avio_seek will always call the underlying seek function directly. + public int @direct; + /// ',' separated list of allowed protocols. + public byte* @protocol_whitelist; + /// ',' separated list of disallowed protocols. + public byte* @protocol_blacklist; + /// A callback that is used instead of write_packet. + public AVIOContext_write_data_type_func @write_data_type; + /// If set, don't call write_data_type separately for AVIO_DATA_MARKER_BOUNDARY_POINT, but ignore them and treat them as AVIO_DATA_MARKER_UNKNOWN (to avoid needlessly small chunks of data returned from the callback). + public int @ignore_boundary_point; + [Obsolete("field utilized privately by libavformat. For a public statistic of how many bytes were written out, see AVIOContext::bytes_written.")] + public long @written; + /// Maximum reached position before a backward seek in the write buffer, used keeping track of already written data for a later flush. + public byte* @buf_ptr_max; + /// Read-only statistic of bytes read for this AVIOContext. + public long @bytes_read; + /// Read-only statistic of bytes written for this AVIOContext. + public long @bytes_written; +} + +public unsafe partial struct AVIODirContext +{ + public URLContext* @url_context; +} + +/// Describes single entry of the directory. +public unsafe partial struct AVIODirEntry +{ + /// Filename + public byte* @name; + /// Type of the entry + public int @type; + /// Set to 1 when name is encoded with UTF-8, 0 otherwise. Name can be encoded with UTF-8 even though 0 is set. + public int @utf8; + /// File size in bytes, -1 if unknown. + public long @size; + /// Time of last modification in microseconds since unix epoch, -1 if unknown. + public long @modification_timestamp; + /// Time of last access in microseconds since unix epoch, -1 if unknown. + public long @access_timestamp; + /// Time of last status change in microseconds since unix epoch, -1 if unknown. + public long @status_change_timestamp; + /// User ID of owner, -1 if unknown. + public long @user_id; + /// Group ID of owner, -1 if unknown. + public long @group_id; + /// Unix file mode, -1 if unknown. + public long @filemode; +} + +/// Callback for checking whether to abort blocking functions. AVERROR_EXIT is returned in this case by the interrupted function. During blocking operations, callback is called with opaque as parameter. If the callback returns 1, the blocking operation will be aborted. +public unsafe partial struct AVIOInterruptCB +{ + public AVIOInterruptCB_callback_func @callback; + public void* @opaque; +} + +/// Mastering display metadata capable of representing the color volume of the display used to master the content (SMPTE 2086:2014). +public unsafe partial struct AVMasteringDisplayMetadata +{ + /// CIE 1931 xy chromaticity coords of color primaries (r, g, b order). + public AVRational_array3x2 @display_primaries; + /// CIE 1931 xy chromaticity coords of white point. + public AVRational_array2 @white_point; + /// Min luminance of mastering display (cd/m^2). + public AVRational @min_luminance; + /// Max luminance of mastering display (cd/m^2). + public AVRational @max_luminance; + /// Flag indicating whether the display primaries (and white point) are set. + public int @has_primaries; + /// Flag indicating whether the luminance (min_ and max_) have been set. + public int @has_luminance; +} + +/// AVOption +public unsafe partial struct AVOption +{ + public byte* @name; + /// short English help text + public byte* @help; + /// The offset relative to the context structure where the option value is stored. It should be 0 for named constants. + public int @offset; + public AVOptionType @type; + public AVOption_default_val @default_val; + /// minimum valid value for the option + public double @min; + /// maximum valid value for the option + public double @max; + public int @flags; + /// The logical unit to which the option belongs. Non-constant options and corresponding named constants share the same unit. May be NULL. + public byte* @unit; +} + +/// the default value for scalar options +[StructLayout(LayoutKind.Explicit)] +public unsafe partial struct AVOption_default_val +{ + [FieldOffset(0)] + public long @i64; + [FieldOffset(0)] + public double @dbl; + [FieldOffset(0)] + public byte* @str; + [FieldOffset(0)] + public AVRational @q; +} + +/// A single allowed range of values, or a single allowed value. +public unsafe partial struct AVOptionRange +{ + public byte* @str; + /// Value range. For string ranges this represents the min/max length. For dimensions this represents the min/max pixel count or width/height in multi-component case. + public double @value_min; + /// Value range. For string ranges this represents the min/max length. For dimensions this represents the min/max pixel count or width/height in multi-component case. + public double @value_max; + /// Value's component range. For string this represents the unicode range for chars, 0-127 limits to ASCII. + public double @component_min; + /// Value's component range. For string this represents the unicode range for chars, 0-127 limits to ASCII. + public double @component_max; + /// Range flag. If set to 1 the struct encodes a range, if set to 0 a single value. + public int @is_range; +} + +/// List of AVOptionRange structs. +public unsafe partial struct AVOptionRanges +{ + /// Array of option ranges. + public AVOptionRange** @range; + /// Number of ranges per component. + public int @nb_ranges; + /// Number of componentes. + public int @nb_components; +} + +/// @{ +public unsafe partial struct AVOutputFormat +{ + public byte* @name; + /// Descriptive name for the format, meant to be more human-readable than name. You should use the NULL_IF_CONFIG_SMALL() macro to define it. + public byte* @long_name; + public byte* @mime_type; + /// comma-separated filename extensions + public byte* @extensions; + /// default audio codec + public AVCodecID @audio_codec; + /// default video codec + public AVCodecID @video_codec; + /// default subtitle codec + public AVCodecID @subtitle_codec; + /// can use flags: AVFMT_NOFILE, AVFMT_NEEDNUMBER, AVFMT_GLOBALHEADER, AVFMT_NOTIMESTAMPS, AVFMT_VARIABLE_FPS, AVFMT_NODIMENSIONS, AVFMT_NOSTREAMS, AVFMT_ALLOW_FLUSH, AVFMT_TS_NONSTRICT, AVFMT_TS_NEGATIVE + public int @flags; + /// List of supported codec_id-codec_tag pairs, ordered by "better choice first". The arrays are all terminated by AV_CODEC_ID_NONE. + public AVCodecTag** @codec_tag; + /// AVClass for the private context + public AVClass* @priv_class; + /// *************************************************************** No fields below this line are part of the public API. They may not be used outside of libavformat and can be changed and removed at will. New public fields should be added right above. **************************************************************** + public int @priv_data_size; + /// Internal flags. See FF_FMT_FLAG_* in internal.h. + public int @flags_internal; + public AVOutputFormat_write_header_func @write_header; + /// Write a packet. If AVFMT_ALLOW_FLUSH is set in flags, pkt can be NULL in order to flush data buffered in the muxer. When flushing, return 0 if there still is more data to flush, or 1 if everything was flushed and there is no more buffered data. + public AVOutputFormat_write_packet_func @write_packet; + public AVOutputFormat_write_trailer_func @write_trailer; + /// A format-specific function for interleavement. If unset, packets will be interleaved by dts. + public AVOutputFormat_interleave_packet_func @interleave_packet; + /// Test if the given codec can be stored in this container. + public AVOutputFormat_query_codec_func @query_codec; + public AVOutputFormat_get_output_timestamp_func @get_output_timestamp; + /// Allows sending messages from application to device. + public AVOutputFormat_control_message_func @control_message; + /// Write an uncoded AVFrame. + public AVOutputFormat_write_uncoded_frame_func @write_uncoded_frame; + /// Returns device list with it properties. + public AVOutputFormat_get_device_list_func @get_device_list; + /// default data codec + public AVCodecID @data_codec; + /// Initialize format. May allocate data here, and set any AVFormatContext or AVStream parameters that need to be set before packets are sent. This method must not write output. + public AVOutputFormat_init_func @init; + /// Deinitialize format. If present, this is called whenever the muxer is being destroyed, regardless of whether or not the header has been written. + public AVOutputFormat_deinit_func @deinit; + /// Set up any necessary bitstream filtering and extract any extra data needed for the global header. + public AVOutputFormat_check_bitstream_func @check_bitstream; +} + +/// This structure stores compressed data. It is typically exported by demuxers and then passed as input to decoders, or received as output from encoders and then passed to muxers. +public unsafe partial struct AVPacket +{ + /// A reference to the reference-counted buffer where the packet data is stored. May be NULL, then the packet data is not reference-counted. + public AVBufferRef* @buf; + /// Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will be presented to the user. Can be AV_NOPTS_VALUE if it is not stored in the file. pts MUST be larger or equal to dts as presentation cannot happen before decompression, unless one wants to view hex dumps. Some formats misuse the terms dts and pts/cts to mean something different. Such timestamps must be converted to true pts/dts before they are stored in AVPacket. + public long @pts; + /// Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed. Can be AV_NOPTS_VALUE if it is not stored in the file. + public long @dts; + public byte* @data; + public int @size; + public int @stream_index; + /// A combination of AV_PKT_FLAG values + public int @flags; + /// Additional packet data that can be provided by the container. Packet can contain several types of side information. + public AVPacketSideData* @side_data; + public int @side_data_elems; + /// Duration of this packet in AVStream->time_base units, 0 if unknown. Equals next_pts - this_pts in presentation order. + public long @duration; + /// byte position in stream, -1 if unknown + public long @pos; + /// for some private data of the user + public void* @opaque; + /// AVBufferRef for free use by the API user. FFmpeg will never check the contents of the buffer ref. FFmpeg calls av_buffer_unref() on it when the packet is unreferenced. av_packet_copy_props() calls create a new reference with av_buffer_ref() for the target packet's opaque_ref field. + public AVBufferRef* @opaque_ref; + /// Time base of the packet's timestamps. In the future, this field may be set on packets output by encoders or demuxers, but its value will be by default ignored on input to decoders or muxers. + public AVRational @time_base; +} + +public unsafe partial struct AVPacketList +{ + public AVPacket @pkt; + public AVPacketList* @next; +} + +public unsafe partial struct AVPacketSideData +{ + public byte* @data; + public ulong @size; + public AVPacketSideDataType @type; +} + +/// Pan Scan area. This specifies the area which should be displayed. Note there may be multiple such areas for one frame. +public unsafe partial struct AVPanScan +{ + /// id - encoding: Set by user. - decoding: Set by libavcodec. + public int @id; + /// width and height in 1/16 pel - encoding: Set by user. - decoding: Set by libavcodec. + public int @width; + public int @height; + /// position of the top left corner in 1/16 pel for up to 3 fields/frames - encoding: Set by user. - decoding: Set by libavcodec. + public short_array3x2 @position; +} + +/// Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes of an image. It also stores the subsampling factors and number of components. +public unsafe partial struct AVPixFmtDescriptor +{ + public byte* @name; + /// The number of components each pixel has, (1-4) + public byte @nb_components; + /// Amount to shift the luma width right to find the chroma width. For YV12 this is 1 for example. chroma_width = AV_CEIL_RSHIFT(luma_width, log2_chroma_w) The note above is needed to ensure rounding up. This value only refers to the chroma components. + public byte @log2_chroma_w; + /// Amount to shift the luma height right to find the chroma height. For YV12 this is 1 for example. chroma_height= AV_CEIL_RSHIFT(luma_height, log2_chroma_h) The note above is needed to ensure rounding up. This value only refers to the chroma components. + public byte @log2_chroma_h; + /// Combination of AV_PIX_FMT_FLAG_... flags. + public ulong @flags; + /// Parameters that describe how pixels are packed. If the format has 1 or 2 components, then luma is 0. If the format has 3 or 4 components: if the RGB flag is set then 0 is red, 1 is green and 2 is blue; otherwise 0 is luma, 1 is chroma-U and 2 is chroma-V. + public AVComponentDescriptor_array4 @comp; + /// Alternative comma-separated names. + public byte* @alias; +} + +/// This structure contains the data a format has to probe a file. +public unsafe partial struct AVProbeData +{ + public byte* @filename; + /// Buffer must have AVPROBE_PADDING_SIZE of extra allocated bytes filled with zero. + public byte* @buf; + /// Size of buf except extra allocated bytes + public int @buf_size; + /// mime_type, when known. + public byte* @mime_type; +} + +/// This structure supplies correlation between a packet timestamp and a wall clock production time. The definition follows the Producer Reference Time ('prft') as defined in ISO/IEC 14496-12 +public unsafe partial struct AVProducerReferenceTime +{ + /// A UTC timestamp, in microseconds, since Unix epoch (e.g, av_gettime()). + public long @wallclock; + public int @flags; +} + +/// AVProfile. +public unsafe partial struct AVProfile +{ + public int @profile; + /// short name for the profile + public byte* @name; +} + +/// New fields can be added to the end with minor version bumps. Removal, reordering and changes to existing fields require a major version bump. sizeof(AVProgram) must not be used outside libav*. +public unsafe partial struct AVProgram +{ + public int @id; + public int @flags; + /// selects which program to discard and which to feed to the caller + public AVDiscard @discard; + public uint* @stream_index; + public uint @nb_stream_indexes; + public AVDictionary* @metadata; + public int @program_num; + public int @pmt_pid; + public int @pcr_pid; + public int @pmt_version; + /// *************************************************************** All fields below this line are not part of the public API. They may not be used outside of libavformat and can be changed and removed at will. New public fields should be added right above. **************************************************************** + public long @start_time; + public long @end_time; + /// reference dts for wrap detection + public long @pts_wrap_reference; + /// behavior on wrap detection + public int @pts_wrap_behavior; +} + +/// Rational number (pair of numerator and denominator). +public unsafe partial struct AVRational +{ + /// Numerator + public int @num; + /// Denominator + public int @den; +} + +/// Structure describing a single Region Of Interest. +public unsafe partial struct AVRegionOfInterest +{ + /// Must be set to the size of this data structure (that is, sizeof(AVRegionOfInterest)). + public uint @self_size; + /// Distance in pixels from the top edge of the frame to the top and bottom edges and from the left edge of the frame to the left and right edges of the rectangle defining this region of interest. + public int @top; + public int @bottom; + public int @left; + public int @right; + /// Quantisation offset. + public AVRational @qoffset; +} + +/// Stream structure. New fields can be added to the end with minor version bumps. Removal, reordering and changes to existing fields require a major version bump. sizeof(AVStream) must not be used outside libav*. +public unsafe partial struct AVStream +{ + /// stream index in AVFormatContext + public int @index; + /// Format-specific stream ID. decoding: set by libavformat encoding: set by the user, replaced by libavformat if left unset + public int @id; + public void* @priv_data; + /// This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented. + public AVRational @time_base; + /// Decoding: pts of the first frame of the stream in presentation order, in stream time base. Only set this if you are absolutely 100% sure that the value you set it to really is the pts of the first frame. This may be undefined (AV_NOPTS_VALUE). + public long @start_time; + /// Decoding: duration of the stream, in stream time base. If a source file does not specify a duration, but does specify a bitrate, this value will be estimated from bitrate and file size. + public long @duration; + /// number of frames in this stream if known or 0 + public long @nb_frames; + /// Stream disposition - a combination of AV_DISPOSITION_* flags. - demuxing: set by libavformat when creating the stream or in avformat_find_stream_info(). - muxing: may be set by the caller before avformat_write_header(). + public int @disposition; + /// Selects which packets can be discarded at will and do not need to be demuxed. + public AVDiscard @discard; + /// sample aspect ratio (0 if unknown) - encoding: Set by user. - decoding: Set by libavformat. + public AVRational @sample_aspect_ratio; + public AVDictionary* @metadata; + /// Average framerate + public AVRational @avg_frame_rate; + /// For streams with AV_DISPOSITION_ATTACHED_PIC disposition, this packet will contain the attached picture. + public AVPacket @attached_pic; + /// An array of side data that applies to the whole stream (i.e. the container does not allow it to change between packets). + public AVPacketSideData* @side_data; + /// The number of elements in the AVStream.side_data array. + public int @nb_side_data; + /// Flags indicating events happening on the stream, a combination of AVSTREAM_EVENT_FLAG_*. + public int @event_flags; + /// Real base framerate of the stream. This is the lowest framerate with which all timestamps can be represented accurately (it is the least common multiple of all framerates in the stream). Note, this value is just a guess! For example, if the time base is 1/90000 and all frames have either approximately 3600 or 1800 timer ticks, then r_frame_rate will be 50/1. + public AVRational @r_frame_rate; + /// Codec parameters associated with this stream. Allocated and freed by libavformat in avformat_new_stream() and avformat_free_context() respectively. + public AVCodecParameters* @codecpar; + /// Number of bits in timestamps. Used for wrapping control. + public int @pts_wrap_bits; +} + +public unsafe partial struct AVSubtitle +{ + public ushort @format; + public uint @start_display_time; + public uint @end_display_time; + public uint @num_rects; + public AVSubtitleRect** @rects; + /// Same as packet pts, in AV_TIME_BASE + public long @pts; +} + +public unsafe partial struct AVSubtitleRect +{ + /// top left corner of pict, undefined when pict is not set + public int @x; + /// top left corner of pict, undefined when pict is not set + public int @y; + /// width of pict, undefined when pict is not set + public int @w; + /// height of pict, undefined when pict is not set + public int @h; + /// number of colors in pict, undefined when pict is not set + public int @nb_colors; + /// data+linesize for the bitmap of this subtitle. Can be set for text/ass as well once they are rendered. + public byte_ptrArray4 @data; + public int_array4 @linesize; + public AVSubtitleType @type; + /// 0 terminated plain UTF-8 text + public byte* @text; + /// 0 terminated ASS/SSA compatible event line. The presentation of this is unaffected by the other values in this struct. + public byte* @ass; + public int @flags; +} + +public unsafe partial struct AVTimecode +{ + /// timecode frame start (first base frame number) + public int @start; + /// flags such as drop frame, +24 hours support, ... + public uint @flags; + /// frame rate in rational form + public AVRational @rate; + /// frame per second; must be consistent with the rate field + public uint @fps; +} + +public unsafe partial struct D3D11_VIDEO_DECODER_CONFIG +{ + public _GUID @guidConfigBitstreamEncryption; + public _GUID @guidConfigMBcontrolEncryption; + public _GUID @guidConfigResidDiffEncryption; + public uint @ConfigBitstreamRaw; + public uint @ConfigMBcontrolRasterOrder; + public uint @ConfigResidDiffHost; + public uint @ConfigSpatialResid8; + public uint @ConfigResid8Subtraction; + public uint @ConfigSpatialHost8or9Clipping; + public uint @ConfigSpatialResidInterleaved; + public uint @ConfigIntraResidUnsigned; + public uint @ConfigResidDiffAccelerator; + public uint @ConfigHostInverseScan; + public uint @ConfigSpecificIDCT; + public uint @Config4GroupedCoefs; + public ushort @ConfigMinRenderTargetBuffCount; + public ushort @ConfigDecoderSpecific; +} + +public unsafe partial struct ID3D11Device +{ + public ID3D11DeviceVtbl* @lpVtbl; +} + +public unsafe partial struct ID3D11DeviceContext +{ + public ID3D11DeviceContextVtbl* @lpVtbl; +} + +public unsafe partial struct ID3D11DeviceContextVtbl +{ + public void* @QueryInterface; + public void* @AddRef; + public void* @Release; + public void* @GetDevice; + public void* @GetPrivateData; + public void* @SetPrivateData; + public void* @SetPrivateDataInterface; + public void* @VSSetConstantBuffers; + public void* @PSSetShaderResources; + public void* @PSSetShader; + public void* @PSSetSamplers; + public void* @VSSetShader; + public void* @DrawIndexed; + public void* @Draw; + public void* @Map; + public void* @Unmap; + public void* @PSSetConstantBuffers; + public void* @IASetInputLayout; + public void* @IASetVertexBuffers; + public void* @IASetIndexBuffer; + public void* @DrawIndexedInstanced; + public void* @DrawInstanced; + public void* @GSSetConstantBuffers; + public void* @GSSetShader; + public void* @IASetPrimitiveTopology; + public void* @VSSetShaderResources; + public void* @VSSetSamplers; + public void* @Begin; + public void* @End; + public void* @GetData; + public void* @SetPredication; + public void* @GSSetShaderResources; + public void* @GSSetSamplers; + public void* @OMSetRenderTargets; + public void* @OMSetRenderTargetsAndUnorderedAccessViews; + public void* @OMSetBlendState; + public void* @OMSetDepthStencilState; + public void* @SOSetTargets; + public void* @DrawAuto; + public void* @DrawIndexedInstancedIndirect; + public void* @DrawInstancedIndirect; + public void* @Dispatch; + public void* @DispatchIndirect; + public void* @RSSetState; + public void* @RSSetViewports; + public void* @RSSetScissorRects; + public void* @CopySubresourceRegion; + public void* @CopyResource; + public void* @UpdateSubresource; + public void* @CopyStructureCount; + public void* @ClearRenderTargetView; + public void* @ClearUnorderedAccessViewUint; + public void* @ClearUnorderedAccessViewFloat; + public void* @ClearDepthStencilView; + public void* @GenerateMips; + public void* @SetResourceMinLOD; + public void* @GetResourceMinLOD; + public void* @ResolveSubresource; + public void* @ExecuteCommandList; + public void* @HSSetShaderResources; + public void* @HSSetShader; + public void* @HSSetSamplers; + public void* @HSSetConstantBuffers; + public void* @DSSetShaderResources; + public void* @DSSetShader; + public void* @DSSetSamplers; + public void* @DSSetConstantBuffers; + public void* @CSSetShaderResources; + public void* @CSSetUnorderedAccessViews; + public void* @CSSetShader; + public void* @CSSetSamplers; + public void* @CSSetConstantBuffers; + public void* @VSGetConstantBuffers; + public void* @PSGetShaderResources; + public void* @PSGetShader; + public void* @PSGetSamplers; + public void* @VSGetShader; + public void* @PSGetConstantBuffers; + public void* @IAGetInputLayout; + public void* @IAGetVertexBuffers; + public void* @IAGetIndexBuffer; + public void* @GSGetConstantBuffers; + public void* @GSGetShader; + public void* @IAGetPrimitiveTopology; + public void* @VSGetShaderResources; + public void* @VSGetSamplers; + public void* @GetPredication; + public void* @GSGetShaderResources; + public void* @GSGetSamplers; + public void* @OMGetRenderTargets; + public void* @OMGetRenderTargetsAndUnorderedAccessViews; + public void* @OMGetBlendState; + public void* @OMGetDepthStencilState; + public void* @SOGetTargets; + public void* @RSGetState; + public void* @RSGetViewports; + public void* @RSGetScissorRects; + public void* @HSGetShaderResources; + public void* @HSGetShader; + public void* @HSGetSamplers; + public void* @HSGetConstantBuffers; + public void* @DSGetShaderResources; + public void* @DSGetShader; + public void* @DSGetSamplers; + public void* @DSGetConstantBuffers; + public void* @CSGetShaderResources; + public void* @CSGetUnorderedAccessViews; + public void* @CSGetShader; + public void* @CSGetSamplers; + public void* @CSGetConstantBuffers; + public void* @ClearState; + public void* @Flush; + public void* @GetType; + public void* @GetContextFlags; + public void* @FinishCommandList; +} + +public unsafe partial struct ID3D11DeviceVtbl +{ + public void* @QueryInterface; + public void* @AddRef; + public void* @Release; + public void* @CreateBuffer; + public void* @CreateTexture1D; + public void* @CreateTexture2D; + public void* @CreateTexture3D; + public void* @CreateShaderResourceView; + public void* @CreateUnorderedAccessView; + public void* @CreateRenderTargetView; + public void* @CreateDepthStencilView; + public void* @CreateInputLayout; + public void* @CreateVertexShader; + public void* @CreateGeometryShader; + public void* @CreateGeometryShaderWithStreamOutput; + public void* @CreatePixelShader; + public void* @CreateHullShader; + public void* @CreateDomainShader; + public void* @CreateComputeShader; + public void* @CreateClassLinkage; + public void* @CreateBlendState; + public void* @CreateDepthStencilState; + public void* @CreateRasterizerState; + public void* @CreateSamplerState; + public void* @CreateQuery; + public void* @CreatePredicate; + public void* @CreateCounter; + public void* @CreateDeferredContext; + public void* @OpenSharedResource; + public void* @CheckFormatSupport; + public void* @CheckMultisampleQualityLevels; + public void* @CheckCounterInfo; + public void* @CheckCounter; + public void* @CheckFeatureSupport; + public void* @GetPrivateData; + public void* @SetPrivateData; + public void* @SetPrivateDataInterface; + public void* @GetFeatureLevel; + public void* @GetCreationFlags; + public void* @GetDeviceRemovedReason; + public void* @GetImmediateContext; + public void* @SetExceptionMode; + public void* @GetExceptionMode; +} + +public unsafe partial struct ID3D11Texture2D +{ + public ID3D11Texture2DVtbl* @lpVtbl; +} + +public unsafe partial struct ID3D11Texture2DVtbl +{ + public void* @QueryInterface; + public void* @AddRef; + public void* @Release; + public void* @GetDevice; + public void* @GetPrivateData; + public void* @SetPrivateData; + public void* @SetPrivateDataInterface; + public void* @GetType; + public void* @SetEvictionPriority; + public void* @GetEvictionPriority; + public void* @GetDesc; +} + +public unsafe partial struct ID3D11VideoContext +{ + public ID3D11VideoContextVtbl* @lpVtbl; +} + +public unsafe partial struct ID3D11VideoContextVtbl +{ + public void* @QueryInterface; + public void* @AddRef; + public void* @Release; + public void* @GetDevice; + public void* @GetPrivateData; + public void* @SetPrivateData; + public void* @SetPrivateDataInterface; + public void* @GetDecoderBuffer; + public void* @ReleaseDecoderBuffer; + public void* @DecoderBeginFrame; + public void* @DecoderEndFrame; + public void* @SubmitDecoderBuffers; + public void* @DecoderExtension; + public void* @VideoProcessorSetOutputTargetRect; + public void* @VideoProcessorSetOutputBackgroundColor; + public void* @VideoProcessorSetOutputColorSpace; + public void* @VideoProcessorSetOutputAlphaFillMode; + public void* @VideoProcessorSetOutputConstriction; + public void* @VideoProcessorSetOutputStereoMode; + public void* @VideoProcessorSetOutputExtension; + public void* @VideoProcessorGetOutputTargetRect; + public void* @VideoProcessorGetOutputBackgroundColor; + public void* @VideoProcessorGetOutputColorSpace; + public void* @VideoProcessorGetOutputAlphaFillMode; + public void* @VideoProcessorGetOutputConstriction; + public void* @VideoProcessorGetOutputStereoMode; + public void* @VideoProcessorGetOutputExtension; + public void* @VideoProcessorSetStreamFrameFormat; + public void* @VideoProcessorSetStreamColorSpace; + public void* @VideoProcessorSetStreamOutputRate; + public void* @VideoProcessorSetStreamSourceRect; + public void* @VideoProcessorSetStreamDestRect; + public void* @VideoProcessorSetStreamAlpha; + public void* @VideoProcessorSetStreamPalette; + public void* @VideoProcessorSetStreamPixelAspectRatio; + public void* @VideoProcessorSetStreamLumaKey; + public void* @VideoProcessorSetStreamStereoFormat; + public void* @VideoProcessorSetStreamAutoProcessingMode; + public void* @VideoProcessorSetStreamFilter; + public void* @VideoProcessorSetStreamExtension; + public void* @VideoProcessorGetStreamFrameFormat; + public void* @VideoProcessorGetStreamColorSpace; + public void* @VideoProcessorGetStreamOutputRate; + public void* @VideoProcessorGetStreamSourceRect; + public void* @VideoProcessorGetStreamDestRect; + public void* @VideoProcessorGetStreamAlpha; + public void* @VideoProcessorGetStreamPalette; + public void* @VideoProcessorGetStreamPixelAspectRatio; + public void* @VideoProcessorGetStreamLumaKey; + public void* @VideoProcessorGetStreamStereoFormat; + public void* @VideoProcessorGetStreamAutoProcessingMode; + public void* @VideoProcessorGetStreamFilter; + public void* @VideoProcessorGetStreamExtension; + public void* @VideoProcessorBlt; + public void* @NegotiateCryptoSessionKeyExchange; + public void* @EncryptionBlt; + public void* @DecryptionBlt; + public void* @StartSessionKeyRefresh; + public void* @FinishSessionKeyRefresh; + public void* @GetEncryptionBltKey; + public void* @NegotiateAuthenticatedChannelKeyExchange; + public void* @QueryAuthenticatedChannel; + public void* @ConfigureAuthenticatedChannel; + public void* @VideoProcessorSetStreamRotation; + public void* @VideoProcessorGetStreamRotation; +} + +public unsafe partial struct ID3D11VideoDecoder +{ + public ID3D11VideoDecoderVtbl* @lpVtbl; +} + +public unsafe partial struct ID3D11VideoDecoderOutputView +{ + public ID3D11VideoDecoderOutputViewVtbl* @lpVtbl; +} + +public unsafe partial struct ID3D11VideoDecoderOutputViewVtbl +{ + public void* @QueryInterface; + public void* @AddRef; + public void* @Release; + public void* @GetDevice; + public void* @GetPrivateData; + public void* @SetPrivateData; + public void* @SetPrivateDataInterface; + public void* @GetResource; + public void* @GetDesc; +} + +public unsafe partial struct ID3D11VideoDecoderVtbl +{ + public void* @QueryInterface; + public void* @AddRef; + public void* @Release; + public void* @GetDevice; + public void* @GetPrivateData; + public void* @SetPrivateData; + public void* @SetPrivateDataInterface; + public void* @GetCreationParameters; + public void* @GetDriverHandle; +} + +public unsafe partial struct ID3D11VideoDevice +{ + public ID3D11VideoDeviceVtbl* @lpVtbl; +} + +public unsafe partial struct ID3D11VideoDeviceVtbl +{ + public void* @QueryInterface; + public void* @AddRef; + public void* @Release; + public void* @CreateVideoDecoder; + public void* @CreateVideoProcessor; + public void* @CreateAuthenticatedChannel; + public void* @CreateCryptoSession; + public void* @CreateVideoDecoderOutputView; + public void* @CreateVideoProcessorInputView; + public void* @CreateVideoProcessorOutputView; + public void* @CreateVideoProcessorEnumerator; + public void* @GetVideoDecoderProfileCount; + public void* @GetVideoDecoderProfile; + public void* @CheckVideoDecoderFormat; + public void* @GetVideoDecoderConfigCount; + public void* @GetVideoDecoderConfig; + public void* @GetContentProtectionCaps; + public void* @CheckCryptoKeyExchange; + public void* @SetPrivateData; + public void* @SetPrivateDataInterface; +} + +public unsafe partial struct IDirect3DDeviceManager9 +{ + public IDirect3DDeviceManager9Vtbl* @lpVtbl; +} + +public unsafe partial struct IDirect3DDeviceManager9Vtbl +{ + public void* @QueryInterface; + public void* @AddRef; + public void* @Release; + public void* @ResetDevice; + public void* @OpenDeviceHandle; + public void* @CloseDeviceHandle; + public void* @TestDevice; + public void* @LockDevice; + public void* @UnlockDevice; + public void* @GetVideoService; +} + +public unsafe partial struct IDirect3DSurface9 +{ + public IDirect3DSurface9Vtbl* @lpVtbl; +} + +public unsafe partial struct IDirect3DSurface9Vtbl +{ + public void* @QueryInterface; + public void* @AddRef; + public void* @Release; + public void* @GetDevice; + public void* @SetPrivateData; + public void* @GetPrivateData; + public void* @FreePrivateData; + public void* @SetPriority; + public void* @GetPriority; + public void* @PreLoad; + public void* @GetType; + public void* @GetContainer; + public void* @GetDesc; + public void* @LockRect; + public void* @UnlockRect; + public void* @GetDC; + public void* @ReleaseDC; +} + +public unsafe partial struct IDirectXVideoDecoder +{ + public IDirectXVideoDecoderVtbl* @lpVtbl; +} + +public unsafe partial struct IDirectXVideoDecoderVtbl +{ + public void* @QueryInterface; + public void* @AddRef; + public void* @Release; + public void* @GetVideoDecoderService; + public void* @GetCreationParameters; + public void* @GetBuffer; + public void* @ReleaseBuffer; + public void* @BeginFrame; + public void* @EndFrame; + public void* @Execute; +} + +public unsafe partial struct RcOverride +{ + public int @start_frame; + public int @end_frame; + public int @qscale; + public float @quality_factor; +} + +public unsafe partial struct SwsFilter +{ + public SwsVector* @lumH; + public SwsVector* @lumV; + public SwsVector* @chrH; + public SwsVector* @chrV; +} + +public unsafe partial struct SwsVector +{ + /// pointer to the list of coefficients + public double* @coeff; + /// number of coefficients in the vector + public int @length; +} + +/// Context for an Audio FIFO Buffer. +/// This struct is incomplete. +public unsafe partial struct AVAudioFifo +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVBPrint +{ +} + +/// Structure for chain/list of bitstream filters. Empty list can be allocated by av_bsf_list_alloc(). +/// This struct is incomplete. +public unsafe partial struct AVBSFList +{ +} + +/// A reference counted buffer type. It is opaque and is meant to be used through references (AVBufferRef). +/// This struct is incomplete. +public unsafe partial struct AVBuffer +{ +} + +/// The buffer pool. This structure is opaque and not meant to be accessed directly. It is allocated with av_buffer_pool_init() and freed with av_buffer_pool_uninit(). +/// This struct is incomplete. +public unsafe partial struct AVBufferPool +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVCodecInternal +{ +} + +/// ********************************************** +/// This struct is incomplete. +public unsafe partial struct AVCodecTag +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVDictionary +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVFilterChannelLayouts +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVFilterCommand +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVFilterFormats +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVFilterGraphInternal +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVFilterInternal +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVFilterPad +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVHWDeviceInternal +{ +} + +/// This struct is incomplete. +public unsafe partial struct AVHWFramesInternal +{ +} + +/// Low-complexity tree container +/// This struct is incomplete. +public unsafe partial struct AVTreeNode +{ +} + +/// The libswresample context. Unlike libavcodec and libavformat, this structure is opaque. This means that if you would like to set options, you must use the avoptions API and cannot directly set values to members of the structure. +/// This struct is incomplete. +public unsafe partial struct SwrContext +{ +} + +/// This struct is incomplete. +public unsafe partial struct SwsContext +{ +} + +/// This struct is incomplete. +public unsafe partial struct URLContext +{ +} + diff --git a/FFmpeg.AutoGen/generated/ffmpeg.functions.facade.g.cs b/FFmpeg.AutoGen/generated/ffmpeg.functions.facade.g.cs new file mode 100644 index 00000000..f68f29ca --- /dev/null +++ b/FFmpeg.AutoGen/generated/ffmpeg.functions.facade.g.cs @@ -0,0 +1,3463 @@ +using System; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen; + +public static unsafe partial class ffmpeg +{ + /// Create an AVABufferSinkParams structure. + [Obsolete()] + public static AVABufferSinkParams* av_abuffersink_params_alloc() => vectors.av_abuffersink_params_alloc(); + + /// Add an index entry into a sorted list. Update the entry if the list already contains it. + /// timestamp in the time base of the given stream + public static int av_add_index_entry(AVStream* @st, long @pos, long @timestamp, int @size, int @distance, int @flags) => vectors.av_add_index_entry(@st, @pos, @timestamp, @size, @distance, @flags); + + /// Add two rationals. + /// First rational + /// Second rational + /// b+c + public static AVRational av_add_q(AVRational @b, AVRational @c) => vectors.av_add_q(@b, @c); + + /// Add a value to a timestamp. + /// Input timestamp time base + /// Input timestamp + /// Time base of `inc` + /// Value to be added + public static long av_add_stable(AVRational @ts_tb, long @ts, AVRational @inc_tb, long @inc) => vectors.av_add_stable(@ts_tb, @ts, @inc_tb, @inc); + + /// Read data and append it to the current content of the AVPacket. If pkt->size is 0 this is identical to av_get_packet. Note that this uses av_grow_packet and thus involves a realloc which is inefficient. Thus this function should only be used when there is no reasonable way to know (an upper bound of) the final size. + /// associated IO context + /// packet + /// amount of data to read + /// >0 (read size) if OK, AVERROR_xxx otherwise, previous data will not be lost even if an error occurs. + public static int av_append_packet(AVIOContext* @s, AVPacket* @pkt, int @size) => vectors.av_append_packet(@s, @pkt, @size); + + /// Allocate an AVAudioFifo. + /// sample format + /// number of channels + /// initial allocation size, in samples + /// newly allocated AVAudioFifo, or NULL on error + public static AVAudioFifo* av_audio_fifo_alloc(AVSampleFormat @sample_fmt, int @channels, int @nb_samples) => vectors.av_audio_fifo_alloc(@sample_fmt, @channels, @nb_samples); + + /// Drain data from an AVAudioFifo. + /// AVAudioFifo to drain + /// number of samples to drain + /// 0 if OK, or negative AVERROR code on failure + public static int av_audio_fifo_drain(AVAudioFifo* @af, int @nb_samples) => vectors.av_audio_fifo_drain(@af, @nb_samples); + + /// Free an AVAudioFifo. + /// AVAudioFifo to free + public static void av_audio_fifo_free(AVAudioFifo* @af) => vectors.av_audio_fifo_free(@af); + + /// Peek data from an AVAudioFifo. + /// AVAudioFifo to read from + /// audio data plane pointers + /// number of samples to peek + /// number of samples actually peek, or negative AVERROR code on failure. The number of samples actually peek will not be greater than nb_samples, and will only be less than nb_samples if av_audio_fifo_size is less than nb_samples. + public static int av_audio_fifo_peek(AVAudioFifo* @af, void** @data, int @nb_samples) => vectors.av_audio_fifo_peek(@af, @data, @nb_samples); + + /// Peek data from an AVAudioFifo. + /// AVAudioFifo to read from + /// audio data plane pointers + /// number of samples to peek + /// offset from current read position + /// number of samples actually peek, or negative AVERROR code on failure. The number of samples actually peek will not be greater than nb_samples, and will only be less than nb_samples if av_audio_fifo_size is less than nb_samples. + public static int av_audio_fifo_peek_at(AVAudioFifo* @af, void** @data, int @nb_samples, int @offset) => vectors.av_audio_fifo_peek_at(@af, @data, @nb_samples, @offset); + + /// Read data from an AVAudioFifo. + /// AVAudioFifo to read from + /// audio data plane pointers + /// number of samples to read + /// number of samples actually read, or negative AVERROR code on failure. The number of samples actually read will not be greater than nb_samples, and will only be less than nb_samples if av_audio_fifo_size is less than nb_samples. + public static int av_audio_fifo_read(AVAudioFifo* @af, void** @data, int @nb_samples) => vectors.av_audio_fifo_read(@af, @data, @nb_samples); + + /// Reallocate an AVAudioFifo. + /// AVAudioFifo to reallocate + /// new allocation size, in samples + /// 0 if OK, or negative AVERROR code on failure + public static int av_audio_fifo_realloc(AVAudioFifo* @af, int @nb_samples) => vectors.av_audio_fifo_realloc(@af, @nb_samples); + + /// Reset the AVAudioFifo buffer. + /// AVAudioFifo to reset + public static void av_audio_fifo_reset(AVAudioFifo* @af) => vectors.av_audio_fifo_reset(@af); + + /// Get the current number of samples in the AVAudioFifo available for reading. + /// the AVAudioFifo to query + /// number of samples available for reading + public static int av_audio_fifo_size(AVAudioFifo* @af) => vectors.av_audio_fifo_size(@af); + + /// Get the current number of samples in the AVAudioFifo available for writing. + /// the AVAudioFifo to query + /// number of samples available for writing + public static int av_audio_fifo_space(AVAudioFifo* @af) => vectors.av_audio_fifo_space(@af); + + /// Write data to an AVAudioFifo. + /// AVAudioFifo to write to + /// audio data plane pointers + /// number of samples to write + /// number of samples actually written, or negative AVERROR code on failure. If successful, the number of samples actually written will always be nb_samples. + public static int av_audio_fifo_write(AVAudioFifo* @af, void** @data, int @nb_samples) => vectors.av_audio_fifo_write(@af, @data, @nb_samples); + + /// Append a description of a channel layout to a bprint buffer. + [Obsolete("use av_channel_layout_describe()")] + public static void av_bprint_channel_layout(AVBPrint* @bp, int @nb_channels, ulong @channel_layout) => vectors.av_bprint_channel_layout(@bp, @nb_channels, @channel_layout); + + /// Allocate a context for a given bitstream filter. The caller must fill in the context parameters as described in the documentation and then call av_bsf_init() before sending any data to the filter. + /// the filter for which to allocate an instance. + /// a pointer into which the pointer to the newly-allocated context will be written. It must be freed with av_bsf_free() after the filtering is done. + /// 0 on success, a negative AVERROR code on failure + public static int av_bsf_alloc(AVBitStreamFilter* @filter, AVBSFContext** @ctx) => vectors.av_bsf_alloc(@filter, @ctx); + + /// Reset the internal bitstream filter state. Should be called e.g. when seeking. + public static void av_bsf_flush(AVBSFContext* @ctx) => vectors.av_bsf_flush(@ctx); + + /// Free a bitstream filter context and everything associated with it; write NULL into the supplied pointer. + public static void av_bsf_free(AVBSFContext** @ctx) => vectors.av_bsf_free(@ctx); + + /// Returns a bitstream filter with the specified name or NULL if no such bitstream filter exists. + /// a bitstream filter with the specified name or NULL if no such bitstream filter exists. + public static AVBitStreamFilter* av_bsf_get_by_name(string @name) => vectors.av_bsf_get_by_name(@name); + + /// Get the AVClass for AVBSFContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + public static AVClass* av_bsf_get_class() => vectors.av_bsf_get_class(); + + /// Get null/pass-through bitstream filter. + /// Pointer to be set to new instance of pass-through bitstream filter + public static int av_bsf_get_null_filter(AVBSFContext** @bsf) => vectors.av_bsf_get_null_filter(@bsf); + + /// Prepare the filter for use, after all the parameters and options have been set. + public static int av_bsf_init(AVBSFContext* @ctx) => vectors.av_bsf_init(@ctx); + + /// Iterate over all registered bitstream filters. + /// a pointer where libavcodec will store the iteration state. Must point to NULL to start the iteration. + /// the next registered bitstream filter or NULL when the iteration is finished + public static AVBitStreamFilter* av_bsf_iterate(void** @opaque) => vectors.av_bsf_iterate(@opaque); + + /// Allocate empty list of bitstream filters. The list must be later freed by av_bsf_list_free() or finalized by av_bsf_list_finalize(). + /// Pointer to on success, NULL in case of failure + public static AVBSFList* av_bsf_list_alloc() => vectors.av_bsf_list_alloc(); + + /// Append bitstream filter to the list of bitstream filters. + /// List to append to + /// Filter context to be appended + /// >=0 on success, negative AVERROR in case of failure + public static int av_bsf_list_append(AVBSFList* @lst, AVBSFContext* @bsf) => vectors.av_bsf_list_append(@lst, @bsf); + + /// Construct new bitstream filter context given it's name and options and append it to the list of bitstream filters. + /// List to append to + /// Name of the bitstream filter + /// Options for the bitstream filter, can be set to NULL + /// >=0 on success, negative AVERROR in case of failure + public static int av_bsf_list_append2(AVBSFList* @lst, string @bsf_name, AVDictionary** @options) => vectors.av_bsf_list_append2(@lst, @bsf_name, @options); + + /// Finalize list of bitstream filters. + /// Filter list structure to be transformed + /// Pointer to be set to newly created structure representing the chain of bitstream filters + /// >=0 on success, negative AVERROR in case of failure + public static int av_bsf_list_finalize(AVBSFList** @lst, AVBSFContext** @bsf) => vectors.av_bsf_list_finalize(@lst, @bsf); + + /// Free list of bitstream filters. + /// Pointer to pointer returned by av_bsf_list_alloc() + public static void av_bsf_list_free(AVBSFList** @lst) => vectors.av_bsf_list_free(@lst); + + /// Parse string describing list of bitstream filters and create single AVBSFContext describing the whole chain of bitstream filters. Resulting AVBSFContext can be treated as any other AVBSFContext freshly allocated by av_bsf_alloc(). + /// String describing chain of bitstream filters in format `bsf1[=opt1=val1:opt2=val2][,bsf2]` + /// Pointer to be set to newly created structure representing the chain of bitstream filters + /// >=0 on success, negative AVERROR in case of failure + public static int av_bsf_list_parse_str(string @str, AVBSFContext** @bsf) => vectors.av_bsf_list_parse_str(@str, @bsf); + + /// Retrieve a filtered packet. + /// this struct will be filled with the contents of the filtered packet. It is owned by the caller and must be freed using av_packet_unref() when it is no longer needed. This parameter should be "clean" (i.e. freshly allocated with av_packet_alloc() or unreffed with av_packet_unref()) when this function is called. If this function returns successfully, the contents of pkt will be completely overwritten by the returned data. On failure, pkt is not touched. + /// - 0 on success. - AVERROR(EAGAIN) if more packets need to be sent to the filter (using av_bsf_send_packet()) to get more output. - AVERROR_EOF if there will be no further output from the filter. - Another negative AVERROR value if an error occurs. + public static int av_bsf_receive_packet(AVBSFContext* @ctx, AVPacket* @pkt) => vectors.av_bsf_receive_packet(@ctx, @pkt); + + /// Submit a packet for filtering. + /// the packet to filter. The bitstream filter will take ownership of the packet and reset the contents of pkt. pkt is not touched if an error occurs. If pkt is empty (i.e. NULL, or pkt->data is NULL and pkt->side_data_elems zero), it signals the end of the stream (i.e. no more non-empty packets will be sent; sending more empty packets does nothing) and will cause the filter to output any packets it may have buffered internally. + /// - 0 on success. - AVERROR(EAGAIN) if packets need to be retrieved from the filter (using av_bsf_receive_packet()) before new input can be consumed. - Another negative AVERROR value if an error occurs. + public static int av_bsf_send_packet(AVBSFContext* @ctx, AVPacket* @pkt) => vectors.av_bsf_send_packet(@ctx, @pkt); + + /// Allocate an AVBuffer of the given size using av_malloc(). + /// an AVBufferRef of given size or NULL when out of memory + public static AVBufferRef* av_buffer_alloc(ulong @size) => vectors.av_buffer_alloc(@size); + + /// Same as av_buffer_alloc(), except the returned buffer will be initialized to zero. + public static AVBufferRef* av_buffer_allocz(ulong @size) => vectors.av_buffer_allocz(@size); + + /// Create an AVBuffer from an existing array. + /// data array + /// size of data in bytes + /// a callback for freeing this buffer's data + /// parameter to be got for processing or passed to free + /// a combination of AV_BUFFER_FLAG_* + /// an AVBufferRef referring to data on success, NULL on failure. + public static AVBufferRef* av_buffer_create(byte* @data, ulong @size, av_buffer_create_free_func @free, void* @opaque, int @flags) => vectors.av_buffer_create(@data, @size, @free, @opaque, @flags); + + /// Default free callback, which calls av_free() on the buffer data. This function is meant to be passed to av_buffer_create(), not called directly. + public static void av_buffer_default_free(void* @opaque, byte* @data) => vectors.av_buffer_default_free(@opaque, @data); + + /// Returns the opaque parameter set by av_buffer_create. + /// the opaque parameter set by av_buffer_create. + public static void* av_buffer_get_opaque(AVBufferRef* @buf) => vectors.av_buffer_get_opaque(@buf); + + public static int av_buffer_get_ref_count(AVBufferRef* @buf) => vectors.av_buffer_get_ref_count(@buf); + + /// Returns 1 if the caller may write to the data referred to by buf (which is true if and only if buf is the only reference to the underlying AVBuffer). Return 0 otherwise. A positive answer is valid until av_buffer_ref() is called on buf. + /// 1 if the caller may write to the data referred to by buf (which is true if and only if buf is the only reference to the underlying AVBuffer). Return 0 otherwise. A positive answer is valid until av_buffer_ref() is called on buf. + public static int av_buffer_is_writable(AVBufferRef* @buf) => vectors.av_buffer_is_writable(@buf); + + /// Create a writable reference from a given buffer reference, avoiding data copy if possible. + /// buffer reference to make writable. On success, buf is either left untouched, or it is unreferenced and a new writable AVBufferRef is written in its place. On failure, buf is left untouched. + /// 0 on success, a negative AVERROR on failure. + public static int av_buffer_make_writable(AVBufferRef** @buf) => vectors.av_buffer_make_writable(@buf); + + /// Query the original opaque parameter of an allocated buffer in the pool. + /// a buffer reference to a buffer returned by av_buffer_pool_get. + /// the opaque parameter set by the buffer allocator function of the buffer pool. + public static void* av_buffer_pool_buffer_get_opaque(AVBufferRef* @ref) => vectors.av_buffer_pool_buffer_get_opaque(@ref); + + /// Allocate a new AVBuffer, reusing an old buffer from the pool when available. This function may be called simultaneously from multiple threads. + /// a reference to the new buffer on success, NULL on error. + public static AVBufferRef* av_buffer_pool_get(AVBufferPool* @pool) => vectors.av_buffer_pool_get(@pool); + + /// Allocate and initialize a buffer pool. + /// size of each buffer in this pool + /// a function that will be used to allocate new buffers when the pool is empty. May be NULL, then the default allocator will be used (av_buffer_alloc()). + /// newly created buffer pool on success, NULL on error. + public static AVBufferPool* av_buffer_pool_init(ulong @size, av_buffer_pool_init_alloc_func @alloc) => vectors.av_buffer_pool_init(@size, @alloc); + + /// Allocate and initialize a buffer pool with a more complex allocator. + /// size of each buffer in this pool + /// arbitrary user data used by the allocator + /// a function that will be used to allocate new buffers when the pool is empty. May be NULL, then the default allocator will be used (av_buffer_alloc()). + /// a function that will be called immediately before the pool is freed. I.e. after av_buffer_pool_uninit() is called by the caller and all the frames are returned to the pool and freed. It is intended to uninitialize the user opaque data. May be NULL. + /// newly created buffer pool on success, NULL on error. + public static AVBufferPool* av_buffer_pool_init2(ulong @size, void* @opaque, av_buffer_pool_init2_alloc_func @alloc, av_buffer_pool_init2_pool_free_func @pool_free) => vectors.av_buffer_pool_init2(@size, @opaque, @alloc, @pool_free); + + /// Mark the pool as being available for freeing. It will actually be freed only once all the allocated buffers associated with the pool are released. Thus it is safe to call this function while some of the allocated buffers are still in use. + /// pointer to the pool to be freed. It will be set to NULL. + public static void av_buffer_pool_uninit(AVBufferPool** @pool) => vectors.av_buffer_pool_uninit(@pool); + + /// Reallocate a given buffer. + /// a buffer reference to reallocate. On success, buf will be unreferenced and a new reference with the required size will be written in its place. On failure buf will be left untouched. *buf may be NULL, then a new buffer is allocated. + /// required new buffer size. + /// 0 on success, a negative AVERROR on failure. + public static int av_buffer_realloc(AVBufferRef** @buf, ulong @size) => vectors.av_buffer_realloc(@buf, @size); + + /// Create a new reference to an AVBuffer. + /// a new AVBufferRef referring to the same AVBuffer as buf or NULL on failure. + public static AVBufferRef* av_buffer_ref(AVBufferRef* @buf) => vectors.av_buffer_ref(@buf); + + /// Ensure dst refers to the same data as src. + /// Pointer to either a valid buffer reference or NULL. On success, this will point to a buffer reference equivalent to src. On failure, dst will be left untouched. + /// A buffer reference to replace dst with. May be NULL, then this function is equivalent to av_buffer_unref(dst). + /// 0 on success AVERROR(ENOMEM) on memory allocation failure. + public static int av_buffer_replace(AVBufferRef** @dst, AVBufferRef* @src) => vectors.av_buffer_replace(@dst, @src); + + /// Free a given reference and automatically free the buffer if there are no more references to it. + /// the reference to be freed. The pointer is set to NULL on return. + public static void av_buffer_unref(AVBufferRef** @buf) => vectors.av_buffer_unref(@buf); + + public static int av_buffersink_get_ch_layout(AVFilterContext* @ctx, AVChannelLayout* @ch_layout) => vectors.av_buffersink_get_ch_layout(@ctx, @ch_layout); + + [Obsolete()] + public static ulong av_buffersink_get_channel_layout(AVFilterContext* @ctx) => vectors.av_buffersink_get_channel_layout(@ctx); + + public static int av_buffersink_get_channels(AVFilterContext* @ctx) => vectors.av_buffersink_get_channels(@ctx); + + public static int av_buffersink_get_format(AVFilterContext* @ctx) => vectors.av_buffersink_get_format(@ctx); + + /// Get a frame with filtered data from sink and put it in frame. + /// pointer to a context of a buffersink or abuffersink AVFilter. + /// pointer to an allocated frame that will be filled with data. The data must be freed using av_frame_unref() / av_frame_free() + /// - >= 0 if a frame was successfully returned. - AVERROR(EAGAIN) if no frames are available at this point; more input frames must be added to the filtergraph to get more output. - AVERROR_EOF if there will be no more output frames on this sink. - A different negative AVERROR code in other failure cases. + public static int av_buffersink_get_frame(AVFilterContext* @ctx, AVFrame* @frame) => vectors.av_buffersink_get_frame(@ctx, @frame); + + /// Get a frame with filtered data from sink and put it in frame. + /// pointer to a buffersink or abuffersink filter context. + /// pointer to an allocated frame that will be filled with data. The data must be freed using av_frame_unref() / av_frame_free() + /// a combination of AV_BUFFERSINK_FLAG_* flags + /// >= 0 in for success, a negative AVERROR code for failure. + public static int av_buffersink_get_frame_flags(AVFilterContext* @ctx, AVFrame* @frame, int @flags) => vectors.av_buffersink_get_frame_flags(@ctx, @frame, @flags); + + public static AVRational av_buffersink_get_frame_rate(AVFilterContext* @ctx) => vectors.av_buffersink_get_frame_rate(@ctx); + + public static int av_buffersink_get_h(AVFilterContext* @ctx) => vectors.av_buffersink_get_h(@ctx); + + public static AVBufferRef* av_buffersink_get_hw_frames_ctx(AVFilterContext* @ctx) => vectors.av_buffersink_get_hw_frames_ctx(@ctx); + + public static AVRational av_buffersink_get_sample_aspect_ratio(AVFilterContext* @ctx) => vectors.av_buffersink_get_sample_aspect_ratio(@ctx); + + public static int av_buffersink_get_sample_rate(AVFilterContext* @ctx) => vectors.av_buffersink_get_sample_rate(@ctx); + + /// Same as av_buffersink_get_frame(), but with the ability to specify the number of samples read. This function is less efficient than av_buffersink_get_frame(), because it copies the data around. + /// pointer to a context of the abuffersink AVFilter. + /// pointer to an allocated frame that will be filled with data. The data must be freed using av_frame_unref() / av_frame_free() frame will contain exactly nb_samples audio samples, except at the end of stream, when it can contain less than nb_samples. + /// The return codes have the same meaning as for av_buffersink_get_frame(). + public static int av_buffersink_get_samples(AVFilterContext* @ctx, AVFrame* @frame, int @nb_samples) => vectors.av_buffersink_get_samples(@ctx, @frame, @nb_samples); + + public static AVRational av_buffersink_get_time_base(AVFilterContext* @ctx) => vectors.av_buffersink_get_time_base(@ctx); + + /// Get the properties of the stream @{ + public static AVMediaType av_buffersink_get_type(AVFilterContext* @ctx) => vectors.av_buffersink_get_type(@ctx); + + public static int av_buffersink_get_w(AVFilterContext* @ctx) => vectors.av_buffersink_get_w(@ctx); + + /// Create an AVBufferSinkParams structure. + [Obsolete()] + public static AVBufferSinkParams* av_buffersink_params_alloc() => vectors.av_buffersink_params_alloc(); + + /// Set the frame size for an audio buffer sink. + public static void av_buffersink_set_frame_size(AVFilterContext* @ctx, uint @frame_size) => vectors.av_buffersink_set_frame_size(@ctx, @frame_size); + + /// Add a frame to the buffer source. + /// an instance of the buffersrc filter + /// frame to be added. If the frame is reference counted, this function will take ownership of the reference(s) and reset the frame. Otherwise the frame data will be copied. If this function returns an error, the input frame is not touched. + /// 0 on success, a negative AVERROR on error. + public static int av_buffersrc_add_frame(AVFilterContext* @ctx, AVFrame* @frame) => vectors.av_buffersrc_add_frame(@ctx, @frame); + + /// Add a frame to the buffer source. + /// pointer to a buffer source context + /// a frame, or NULL to mark EOF + /// a combination of AV_BUFFERSRC_FLAG_* + /// >= 0 in case of success, a negative AVERROR code in case of failure + public static int av_buffersrc_add_frame_flags(AVFilterContext* @buffer_src, AVFrame* @frame, int @flags) => vectors.av_buffersrc_add_frame_flags(@buffer_src, @frame, @flags); + + /// Close the buffer source after EOF. + public static int av_buffersrc_close(AVFilterContext* @ctx, long @pts, uint @flags) => vectors.av_buffersrc_close(@ctx, @pts, @flags); + + /// Get the number of failed requests. + public static uint av_buffersrc_get_nb_failed_requests(AVFilterContext* @buffer_src) => vectors.av_buffersrc_get_nb_failed_requests(@buffer_src); + + /// Allocate a new AVBufferSrcParameters instance. It should be freed by the caller with av_free(). + public static AVBufferSrcParameters* av_buffersrc_parameters_alloc() => vectors.av_buffersrc_parameters_alloc(); + + /// Initialize the buffersrc or abuffersrc filter with the provided parameters. This function may be called multiple times, the later calls override the previous ones. Some of the parameters may also be set through AVOptions, then whatever method is used last takes precedence. + /// an instance of the buffersrc or abuffersrc filter + /// the stream parameters. The frames later passed to this filter must conform to those parameters. All the allocated fields in param remain owned by the caller, libavfilter will make internal copies or references when necessary. + /// 0 on success, a negative AVERROR code on failure. + public static int av_buffersrc_parameters_set(AVFilterContext* @ctx, AVBufferSrcParameters* @param) => vectors.av_buffersrc_parameters_set(@ctx, @param); + + /// Add a frame to the buffer source. + /// an instance of the buffersrc filter + /// frame to be added. If the frame is reference counted, this function will make a new reference to it. Otherwise the frame data will be copied. + /// 0 on success, a negative AVERROR on error + public static int av_buffersrc_write_frame(AVFilterContext* @ctx, AVFrame* @frame) => vectors.av_buffersrc_write_frame(@ctx, @frame); + + /// Allocate a memory block for an array with av_mallocz(). + /// Number of elements + /// Size of the single element + /// Pointer to the allocated block, or `NULL` if the block cannot be allocated + public static void* av_calloc(ulong @nmemb, ulong @size) => vectors.av_calloc(@nmemb, @size); + + /// Get a human readable string describing a given channel. + /// pre-allocated buffer where to put the generated string + /// size in bytes of the buffer. + /// amount of bytes needed to hold the output string, or a negative AVERROR on failure. If the returned value is bigger than buf_size, then the string was truncated. + public static int av_channel_description(byte* @buf, ulong @buf_size, AVChannel @channel) => vectors.av_channel_description(@buf, @buf_size, @channel); + + /// bprint variant of av_channel_description(). + public static void av_channel_description_bprint(AVBPrint* @bp, AVChannel @channel_id) => vectors.av_channel_description_bprint(@bp, @channel_id); + + /// This is the inverse function of av_channel_name(). + /// the channel with the given name AV_CHAN_NONE when name does not identify a known channel + public static AVChannel av_channel_from_string(string @name) => vectors.av_channel_from_string(@name); + + /// Get the channel with the given index in a channel layout. + /// input channel layout + /// channel with the index idx in channel_layout on success or AV_CHAN_NONE on failure (if idx is not valid or the channel order is unspecified) + public static AVChannel av_channel_layout_channel_from_index(AVChannelLayout* @channel_layout, uint @idx) => vectors.av_channel_layout_channel_from_index(@channel_layout, @idx); + + /// Get a channel described by the given string. + /// input channel layout + /// a channel described by the given string in channel_layout on success or AV_CHAN_NONE on failure (if the string is not valid or the channel order is unspecified) + public static AVChannel av_channel_layout_channel_from_string(AVChannelLayout* @channel_layout, string @name) => vectors.av_channel_layout_channel_from_string(@channel_layout, @name); + + /// Check whether a channel layout is valid, i.e. can possibly describe audio data. + /// input channel layout + /// 1 if channel_layout is valid, 0 otherwise. + public static int av_channel_layout_check(AVChannelLayout* @channel_layout) => vectors.av_channel_layout_check(@channel_layout); + + /// Check whether two channel layouts are semantically the same, i.e. the same channels are present on the same positions in both. + /// input channel layout + /// input channel layout + /// 0 if chl and chl1 are equal, 1 if they are not equal. A negative AVERROR code if one or both are invalid. + public static int av_channel_layout_compare(AVChannelLayout* @chl, AVChannelLayout* @chl1) => vectors.av_channel_layout_compare(@chl, @chl1); + + /// Make a copy of a channel layout. This differs from just assigning src to dst in that it allocates and copies the map for AV_CHANNEL_ORDER_CUSTOM. + /// destination channel layout + /// source channel layout + /// 0 on success, a negative AVERROR on error. + public static int av_channel_layout_copy(AVChannelLayout* @dst, AVChannelLayout* @src) => vectors.av_channel_layout_copy(@dst, @src); + + /// Get the default channel layout for a given number of channels. + /// number of channels + public static void av_channel_layout_default(AVChannelLayout* @ch_layout, int @nb_channels) => vectors.av_channel_layout_default(@ch_layout, @nb_channels); + + /// Get a human-readable string describing the channel layout properties. The string will be in the same format that is accepted by av_channel_layout_from_string(), allowing to rebuild the same channel layout, except for opaque pointers. + /// channel layout to be described + /// pre-allocated buffer where to put the generated string + /// size in bytes of the buffer. + /// amount of bytes needed to hold the output string, or a negative AVERROR on failure. If the returned value is bigger than buf_size, then the string was truncated. + public static int av_channel_layout_describe(AVChannelLayout* @channel_layout, byte* @buf, ulong @buf_size) => vectors.av_channel_layout_describe(@channel_layout, @buf, @buf_size); + + /// bprint variant of av_channel_layout_describe(). + /// 0 on success, or a negative AVERROR value on failure. + public static int av_channel_layout_describe_bprint(AVChannelLayout* @channel_layout, AVBPrint* @bp) => vectors.av_channel_layout_describe_bprint(@channel_layout, @bp); + + /// Get the channel with the given index in channel_layout. + [Obsolete("use av_channel_layout_channel_from_index()")] + public static ulong av_channel_layout_extract_channel(ulong @channel_layout, int @index) => vectors.av_channel_layout_extract_channel(@channel_layout, @index); + + /// Initialize a native channel layout from a bitmask indicating which channels are present. + /// the layout structure to be initialized + /// bitmask describing the channel layout + /// 0 on success AVERROR(EINVAL) for invalid mask values + public static int av_channel_layout_from_mask(AVChannelLayout* @channel_layout, ulong @mask) => vectors.av_channel_layout_from_mask(@channel_layout, @mask); + + /// Initialize a channel layout from a given string description. The input string can be represented by: - the formal channel layout name (returned by av_channel_layout_describe()) - single or multiple channel names (returned by av_channel_name(), eg. "FL", or concatenated with "+", each optionally containing a custom name after a "", eg. "FL+FR+LFE") - a decimal or hexadecimal value of a native channel layout (eg. "4" or "0x4") - the number of channels with default layout (eg. "4c") - the number of unordered channels (eg. "4C" or "4 channels") - the ambisonic order followed by optional non-diegetic channels (eg. "ambisonic 2+stereo") + /// input channel layout + /// string describing the channel layout + /// 0 channel layout was detected, AVERROR_INVALIDATATA otherwise + public static int av_channel_layout_from_string(AVChannelLayout* @channel_layout, string @str) => vectors.av_channel_layout_from_string(@channel_layout, @str); + + /// Get the index of a given channel in a channel layout. In case multiple channels are found, only the first match will be returned. + /// input channel layout + /// index of channel in channel_layout on success or a negative number if channel is not present in channel_layout. + public static int av_channel_layout_index_from_channel(AVChannelLayout* @channel_layout, AVChannel @channel) => vectors.av_channel_layout_index_from_channel(@channel_layout, @channel); + + /// Get the index in a channel layout of a channel described by the given string. In case multiple channels are found, only the first match will be returned. + /// input channel layout + /// a channel index described by the given string, or a negative AVERROR value. + public static int av_channel_layout_index_from_string(AVChannelLayout* @channel_layout, string @name) => vectors.av_channel_layout_index_from_string(@channel_layout, @name); + + /// Iterate over all standard channel layouts. + /// a pointer where libavutil will store the iteration state. Must point to NULL to start the iteration. + /// the standard channel layout or NULL when the iteration is finished + public static AVChannelLayout* av_channel_layout_standard(void** @opaque) => vectors.av_channel_layout_standard(@opaque); + + /// Find out what channels from a given set are present in a channel layout, without regard for their positions. + /// input channel layout + /// a combination of AV_CH_* representing a set of channels + /// a bitfield representing all the channels from mask that are present in channel_layout + public static ulong av_channel_layout_subset(AVChannelLayout* @channel_layout, ulong @mask) => vectors.av_channel_layout_subset(@channel_layout, @mask); + + /// Free any allocated data in the channel layout and reset the channel count to 0. + /// the layout structure to be uninitialized + public static void av_channel_layout_uninit(AVChannelLayout* @channel_layout) => vectors.av_channel_layout_uninit(@channel_layout); + + /// Get a human readable string in an abbreviated form describing a given channel. This is the inverse function of av_channel_from_string(). + /// pre-allocated buffer where to put the generated string + /// size in bytes of the buffer. + /// amount of bytes needed to hold the output string, or a negative AVERROR on failure. If the returned value is bigger than buf_size, then the string was truncated. + public static int av_channel_name(byte* @buf, ulong @buf_size, AVChannel @channel) => vectors.av_channel_name(@buf, @buf_size, @channel); + + /// bprint variant of av_channel_name(). + public static void av_channel_name_bprint(AVBPrint* @bp, AVChannel @channel_id) => vectors.av_channel_name_bprint(@bp, @channel_id); + + /// Returns the AVChromaLocation value for name or an AVError if not found. + /// the AVChromaLocation value for name or an AVError if not found. + public static int av_chroma_location_from_name(string @name) => vectors.av_chroma_location_from_name(@name); + + /// Returns the name for provided chroma location or NULL if unknown. + /// the name for provided chroma location or NULL if unknown. + public static string av_chroma_location_name(AVChromaLocation @location) => vectors.av_chroma_location_name(@location); + + /// Get the AVCodecID for the given codec tag tag. If no codec id is found returns AV_CODEC_ID_NONE. + /// list of supported codec_id-codec_tag pairs, as stored in AVInputFormat.codec_tag and AVOutputFormat.codec_tag + /// codec tag to match to a codec ID + public static AVCodecID av_codec_get_id(AVCodecTag** @tags, uint @tag) => vectors.av_codec_get_id(@tags, @tag); + + /// Get the codec tag for the given codec id id. If no codec tag is found returns 0. + /// list of supported codec_id-codec_tag pairs, as stored in AVInputFormat.codec_tag and AVOutputFormat.codec_tag + /// codec ID to match to a codec tag + public static uint av_codec_get_tag(AVCodecTag** @tags, AVCodecID @id) => vectors.av_codec_get_tag(@tags, @id); + + /// Get the codec tag for the given codec id. + /// list of supported codec_id - codec_tag pairs, as stored in AVInputFormat.codec_tag and AVOutputFormat.codec_tag + /// codec id that should be searched for in the list + /// A pointer to the found tag + /// 0 if id was not found in tags, > 0 if it was found + public static int av_codec_get_tag2(AVCodecTag** @tags, AVCodecID @id, uint* @tag) => vectors.av_codec_get_tag2(@tags, @id, @tag); + + /// Returns a non-zero number if codec is a decoder, zero otherwise + /// a non-zero number if codec is a decoder, zero otherwise + public static int av_codec_is_decoder(AVCodec* @codec) => vectors.av_codec_is_decoder(@codec); + + /// Returns a non-zero number if codec is an encoder, zero otherwise + /// a non-zero number if codec is an encoder, zero otherwise + public static int av_codec_is_encoder(AVCodec* @codec) => vectors.av_codec_is_encoder(@codec); + + /// Iterate over all registered codecs. + /// a pointer where libavcodec will store the iteration state. Must point to NULL to start the iteration. + /// the next registered codec or NULL when the iteration is finished + public static AVCodec* av_codec_iterate(void** @opaque) => vectors.av_codec_iterate(@opaque); + + /// Returns the AVColorPrimaries value for name or an AVError if not found. + /// the AVColorPrimaries value for name or an AVError if not found. + public static int av_color_primaries_from_name(string @name) => vectors.av_color_primaries_from_name(@name); + + /// Returns the name for provided color primaries or NULL if unknown. + /// the name for provided color primaries or NULL if unknown. + public static string av_color_primaries_name(AVColorPrimaries @primaries) => vectors.av_color_primaries_name(@primaries); + + /// Returns the AVColorRange value for name or an AVError if not found. + /// the AVColorRange value for name or an AVError if not found. + public static int av_color_range_from_name(string @name) => vectors.av_color_range_from_name(@name); + + /// Returns the name for provided color range or NULL if unknown. + /// the name for provided color range or NULL if unknown. + public static string av_color_range_name(AVColorRange @range) => vectors.av_color_range_name(@range); + + /// Returns the AVColorSpace value for name or an AVError if not found. + /// the AVColorSpace value for name or an AVError if not found. + public static int av_color_space_from_name(string @name) => vectors.av_color_space_from_name(@name); + + /// Returns the name for provided color space or NULL if unknown. + /// the name for provided color space or NULL if unknown. + public static string av_color_space_name(AVColorSpace @space) => vectors.av_color_space_name(@space); + + /// Returns the AVColorTransferCharacteristic value for name or an AVError if not found. + /// the AVColorTransferCharacteristic value for name or an AVError if not found. + public static int av_color_transfer_from_name(string @name) => vectors.av_color_transfer_from_name(@name); + + /// Returns the name for provided color transfer or NULL if unknown. + /// the name for provided color transfer or NULL if unknown. + public static string av_color_transfer_name(AVColorTransferCharacteristic @transfer) => vectors.av_color_transfer_name(@transfer); + + /// Compare the remainders of two integer operands divided by a common divisor. + /// Divisor; must be a power of 2 + /// - a negative value if `a % mod < b % mod` - a positive value if `a % mod > b % mod` - zero if `a % mod == b % mod` + public static long av_compare_mod(ulong @a, ulong @b, ulong @mod) => vectors.av_compare_mod(@a, @b, @mod); + + /// Compare two timestamps each in its own time base. + /// One of the following values: - -1 if `ts_a` is before `ts_b` - 1 if `ts_a` is after `ts_b` - 0 if they represent the same position + public static int av_compare_ts(long @ts_a, AVRational @tb_a, long @ts_b, AVRational @tb_b) => vectors.av_compare_ts(@ts_a, @tb_a, @ts_b, @tb_b); + + /// Allocate an AVContentLightMetadata structure and set its fields to default values. The resulting struct can be freed using av_freep(). + /// An AVContentLightMetadata filled with default values or NULL on failure. + public static AVContentLightMetadata* av_content_light_metadata_alloc(ulong* @size) => vectors.av_content_light_metadata_alloc(@size); + + /// Allocate a complete AVContentLightMetadata and add it to the frame. + /// The frame which side data is added to. + /// The AVContentLightMetadata structure to be filled by caller. + public static AVContentLightMetadata* av_content_light_metadata_create_side_data(AVFrame* @frame) => vectors.av_content_light_metadata_create_side_data(@frame); + + /// Allocate a CPB properties structure and initialize its fields to default values. + /// if non-NULL, the size of the allocated struct will be written here. This is useful for embedding it in side data. + /// the newly allocated struct or NULL on failure + public static AVCPBProperties* av_cpb_properties_alloc(ulong* @size) => vectors.av_cpb_properties_alloc(@size); + + /// Returns the number of logical CPU cores present. + /// the number of logical CPU cores present. + public static int av_cpu_count() => vectors.av_cpu_count(); + + /// Overrides cpu count detection and forces the specified count. Count < 1 disables forcing of specific count. + public static void av_cpu_force_count(int @count) => vectors.av_cpu_force_count(@count); + + /// Get the maximum data alignment that may be required by FFmpeg. + public static ulong av_cpu_max_align() => vectors.av_cpu_max_align(); + + /// Convert a double precision floating point number to a rational. + /// `double` to convert + /// Maximum allowed numerator and denominator + /// `d` in AVRational form + public static AVRational av_d2q(double @d, int @max) => vectors.av_d2q(@d, @max); + + /// Allocate an AVD3D11VAContext. + /// Newly-allocated AVD3D11VAContext or NULL on failure. + public static AVD3D11VAContext* av_d3d11va_alloc_context() => vectors.av_d3d11va_alloc_context(); + + public static AVClassCategory av_default_get_category(void* @ptr) => vectors.av_default_get_category(@ptr); + + /// Return the context name + /// The AVClass context + /// The AVClass class_name + public static string av_default_item_name(void* @ctx) => vectors.av_default_item_name(@ctx); + + /// Iterate over all registered demuxers. + /// a pointer where libavformat will store the iteration state. Must point to NULL to start the iteration. + /// the next registered demuxer or NULL when the iteration is finished + public static AVInputFormat* av_demuxer_iterate(void** @opaque) => vectors.av_demuxer_iterate(@opaque); + + /// Copy entries from one AVDictionary struct into another. + /// pointer to a pointer to a AVDictionary struct. If *dst is NULL, this function will allocate a struct for you and put it in *dst + /// pointer to source AVDictionary struct + /// flags to use when setting entries in *dst + /// 0 on success, negative AVERROR code on failure. If dst was allocated by this function, callers should free the associated memory. + public static int av_dict_copy(AVDictionary** @dst, AVDictionary* @src, int @flags) => vectors.av_dict_copy(@dst, @src, @flags); + + /// Get number of entries in dictionary. + /// dictionary + /// number of entries in dictionary + public static int av_dict_count(AVDictionary* @m) => vectors.av_dict_count(@m); + + /// Free all the memory allocated for an AVDictionary struct and all keys and values. + public static void av_dict_free(AVDictionary** @m) => vectors.av_dict_free(@m); + + /// Get a dictionary entry with matching key. + /// matching key + /// Set to the previous matching element to find the next. If set to NULL the first matching element is returned. + /// a collection of AV_DICT_* flags controlling how the entry is retrieved + /// found entry or NULL in case no matching entry was found in the dictionary + public static AVDictionaryEntry* av_dict_get(AVDictionary* @m, string @key, AVDictionaryEntry* @prev, int @flags) => vectors.av_dict_get(@m, @key, @prev, @flags); + + /// Get dictionary entries as a string. + /// dictionary + /// Pointer to buffer that will be allocated with string containg entries. Buffer must be freed by the caller when is no longer needed. + /// character used to separate key from value + /// character used to separate two pairs from each other + /// >= 0 on success, negative on error + public static int av_dict_get_string(AVDictionary* @m, byte** @buffer, byte @key_val_sep, byte @pairs_sep) => vectors.av_dict_get_string(@m, @buffer, @key_val_sep, @pairs_sep); + + /// Parse the key/value pairs list and add the parsed entries to a dictionary. + /// a 0-terminated list of characters used to separate key from value + /// a 0-terminated list of characters used to separate two pairs from each other + /// flags to use when adding to dictionary. AV_DICT_DONT_STRDUP_KEY and AV_DICT_DONT_STRDUP_VAL are ignored since the key/value tokens will always be duplicated. + /// 0 on success, negative AVERROR code on failure + public static int av_dict_parse_string(AVDictionary** @pm, string @str, string @key_val_sep, string @pairs_sep, int @flags) => vectors.av_dict_parse_string(@pm, @str, @key_val_sep, @pairs_sep, @flags); + + /// Set the given entry in *pm, overwriting an existing entry. + /// pointer to a pointer to a dictionary struct. If *pm is NULL a dictionary struct is allocated and put in *pm. + /// entry key to add to *pm (will either be av_strduped or added as a new key depending on flags) + /// entry value to add to *pm (will be av_strduped or added as a new key depending on flags). Passing a NULL value will cause an existing entry to be deleted. + /// >= 0 on success otherwise an error code < 0 + public static int av_dict_set(AVDictionary** @pm, string @key, string @value, int @flags) => vectors.av_dict_set(@pm, @key, @value, @flags); + + /// Convenience wrapper for av_dict_set that converts the value to a string and stores it. + public static int av_dict_set_int(AVDictionary** @pm, string @key, long @value, int @flags) => vectors.av_dict_set_int(@pm, @key, @value, @flags); + + /// Returns The AV_DISPOSITION_* flag corresponding to disp or a negative error code if disp does not correspond to a known stream disposition. + /// The AV_DISPOSITION_* flag corresponding to disp or a negative error code if disp does not correspond to a known stream disposition. + public static int av_disposition_from_string(string @disp) => vectors.av_disposition_from_string(@disp); + + /// Returns The string description corresponding to the lowest set bit in disposition. NULL when the lowest set bit does not correspond to a known disposition or when disposition is 0. + /// a combination of AV_DISPOSITION_* values + /// The string description corresponding to the lowest set bit in disposition. NULL when the lowest set bit does not correspond to a known disposition or when disposition is 0. + public static string av_disposition_to_string(int @disposition) => vectors.av_disposition_to_string(@disposition); + + /// Divide one rational by another. + /// First rational + /// Second rational + /// b/c + public static AVRational av_div_q(AVRational @b, AVRational @c) => vectors.av_div_q(@b, @c); + + /// Print detailed information about the input or output format, such as duration, bitrate, streams, container, programs, metadata, side data, codec and time base. + /// the context to analyze + /// index of the stream to dump information about + /// the URL to print, such as source or destination file + /// Select whether the specified context is an input(0) or output(1) + public static void av_dump_format(AVFormatContext* @ic, int @index, string @url, int @is_output) => vectors.av_dump_format(@ic, @index, @url, @is_output); + + /// Allocate an AVDynamicHDRPlus structure and set its fields to default values. The resulting struct can be freed using av_freep(). + /// An AVDynamicHDRPlus filled with default values or NULL on failure. + public static AVDynamicHDRPlus* av_dynamic_hdr_plus_alloc(ulong* @size) => vectors.av_dynamic_hdr_plus_alloc(@size); + + /// Allocate a complete AVDynamicHDRPlus and add it to the frame. + /// The frame which side data is added to. + /// The AVDynamicHDRPlus structure to be filled by caller or NULL on failure. + public static AVDynamicHDRPlus* av_dynamic_hdr_plus_create_side_data(AVFrame* @frame) => vectors.av_dynamic_hdr_plus_create_side_data(@frame); + + /// Add the pointer to an element to a dynamic array. + /// Pointer to the array to grow + /// Pointer to the number of elements in the array + /// Element to add + public static void av_dynarray_add(void* @tab_ptr, int* @nb_ptr, void* @elem) => vectors.av_dynarray_add(@tab_ptr, @nb_ptr, @elem); + + /// Add an element to a dynamic array. + /// >=0 on success, negative otherwise + public static int av_dynarray_add_nofree(void* @tab_ptr, int* @nb_ptr, void* @elem) => vectors.av_dynarray_add_nofree(@tab_ptr, @nb_ptr, @elem); + + /// Add an element of size `elem_size` to a dynamic array. + /// Pointer to the array to grow + /// Pointer to the number of elements in the array + /// Size in bytes of an element in the array + /// Pointer to the data of the element to add. If `NULL`, the space of the newly added element is allocated but left uninitialized. + /// Pointer to the data of the element to copy in the newly allocated space + public static void* av_dynarray2_add(void** @tab_ptr, int* @nb_ptr, ulong @elem_size, byte* @elem_data) => vectors.av_dynarray2_add(@tab_ptr, @nb_ptr, @elem_size, @elem_data); + + /// Allocate a buffer, reusing the given one if large enough. + /// Pointer to pointer to an already allocated buffer. `*ptr` will be overwritten with pointer to new buffer on success or `NULL` on failure + /// Pointer to the size of buffer `*ptr`. `*size` is updated to the new allocated size, in particular 0 in case of failure. + /// Desired minimal size of buffer `*ptr` + public static void av_fast_malloc(void* @ptr, uint* @size, ulong @min_size) => vectors.av_fast_malloc(@ptr, @size, @min_size); + + /// Allocate and clear a buffer, reusing the given one if large enough. + /// Pointer to pointer to an already allocated buffer. `*ptr` will be overwritten with pointer to new buffer on success or `NULL` on failure + /// Pointer to the size of buffer `*ptr`. `*size` is updated to the new allocated size, in particular 0 in case of failure. + /// Desired minimal size of buffer `*ptr` + public static void av_fast_mallocz(void* @ptr, uint* @size, ulong @min_size) => vectors.av_fast_mallocz(@ptr, @size, @min_size); + + /// Same behaviour av_fast_malloc but the buffer has additional AV_INPUT_BUFFER_PADDING_SIZE at the end which will always be 0. + public static void av_fast_padded_malloc(void* @ptr, uint* @size, ulong @min_size) => vectors.av_fast_padded_malloc(@ptr, @size, @min_size); + + /// Same behaviour av_fast_padded_malloc except that buffer will always be 0-initialized after call. + public static void av_fast_padded_mallocz(void* @ptr, uint* @size, ulong @min_size) => vectors.av_fast_padded_mallocz(@ptr, @size, @min_size); + + /// Reallocate the given buffer if it is not large enough, otherwise do nothing. + /// Already allocated buffer, or `NULL` + /// Pointer to the size of buffer `ptr`. `*size` is updated to the new allocated size, in particular 0 in case of failure. + /// Desired minimal size of buffer `ptr` + /// `ptr` if the buffer is large enough, a pointer to newly reallocated buffer if the buffer was not large enough, or `NULL` in case of error + public static void* av_fast_realloc(void* @ptr, uint* @size, ulong @min_size) => vectors.av_fast_realloc(@ptr, @size, @min_size); + + /// Read the file with name filename, and put its content in a newly allocated buffer or map it with mmap() when available. In case of success set *bufptr to the read or mmapped buffer, and *size to the size in bytes of the buffer in *bufptr. Unlike mmap this function succeeds with zero sized files, in this case *bufptr will be set to NULL and *size will be set to 0. The returned buffer must be released with av_file_unmap(). + /// loglevel offset used for logging + /// context used for logging + /// a non negative number in case of success, a negative value corresponding to an AVERROR error code in case of failure + public static int av_file_map(string @filename, byte** @bufptr, ulong* @size, int @log_offset, void* @log_ctx) => vectors.av_file_map(@filename, @bufptr, @size, @log_offset, @log_ctx); + + /// Unmap or free the buffer bufptr created by av_file_map(). + /// size in bytes of bufptr, must be the same as returned by av_file_map() + public static void av_file_unmap(byte* @bufptr, ulong @size) => vectors.av_file_unmap(@bufptr, @size); + + /// Check whether filename actually is a numbered sequence generator. + /// possible numbered sequence string + /// 1 if a valid numbered sequence string, 0 otherwise + public static int av_filename_number_test(string @filename) => vectors.av_filename_number_test(@filename); + + /// Iterate over all registered filters. + /// a pointer where libavfilter will store the iteration state. Must point to NULL to start the iteration. + /// the next registered filter or NULL when the iteration is finished + public static AVFilter* av_filter_iterate(void** @opaque) => vectors.av_filter_iterate(@opaque); + + /// Compute what kind of losses will occur when converting from one specific pixel format to another. When converting from one pixel format to another, information loss may occur. For example, when converting from RGB24 to GRAY, the color information will be lost. Similarly, other losses occur when converting from some formats to other formats. These losses can involve loss of chroma, but also loss of resolution, loss of color depth, loss due to the color space conversion, loss of the alpha bits or loss due to color quantization. av_get_fix_fmt_loss() informs you about the various types of losses which will occur when converting from one pixel format to another. + /// source pixel format + /// Whether the source pixel format alpha channel is used. + /// Combination of flags informing you what kind of losses will occur (maximum loss for an invalid dst_pix_fmt). + public static AVPixelFormat av_find_best_pix_fmt_of_2(AVPixelFormat @dst_pix_fmt1, AVPixelFormat @dst_pix_fmt2, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr) => vectors.av_find_best_pix_fmt_of_2(@dst_pix_fmt1, @dst_pix_fmt2, @src_pix_fmt, @has_alpha, @loss_ptr); + + /// Find the "best" stream in the file. The best stream is determined according to various heuristics as the most likely to be what the user expects. If the decoder parameter is non-NULL, av_find_best_stream will find the default decoder for the stream's codec; streams for which no decoder can be found are ignored. + /// media file handle + /// stream type: video, audio, subtitles, etc. + /// user-requested stream number, or -1 for automatic selection + /// try to find a stream related (eg. in the same program) to this one, or -1 if none + /// if non-NULL, returns the decoder for the selected stream + /// flags; none are currently defined + /// the non-negative stream number in case of success, AVERROR_STREAM_NOT_FOUND if no stream with the requested type could be found, AVERROR_DECODER_NOT_FOUND if streams were found but no decoder + public static int av_find_best_stream(AVFormatContext* @ic, AVMediaType @type, int @wanted_stream_nb, int @related_stream, AVCodec** @decoder_ret, int @flags) => vectors.av_find_best_stream(@ic, @type, @wanted_stream_nb, @related_stream, @decoder_ret, @flags); + + public static int av_find_default_stream_index(AVFormatContext* @s) => vectors.av_find_default_stream_index(@s); + + /// Find AVInputFormat based on the short name of the input format. + public static AVInputFormat* av_find_input_format(string @short_name) => vectors.av_find_input_format(@short_name); + + /// Find the value in a list of rationals nearest a given reference rational. + /// Reference rational + /// Array of rationals terminated by `{0, 0}` + /// Index of the nearest value found in the array + public static int av_find_nearest_q_idx(AVRational @q, AVRational* @q_list) => vectors.av_find_nearest_q_idx(@q, @q_list); + + /// Find the programs which belong to a given stream. + /// media file handle + /// the last found program, the search will start after this program, or from the beginning if it is NULL + /// stream index + /// the next program which belongs to s, NULL if no program is found or the last program is not among the programs of ic. + public static AVProgram* av_find_program_from_stream(AVFormatContext* @ic, AVProgram* @last, int @s) => vectors.av_find_program_from_stream(@ic, @last, @s); + + /// Returns the method used to set ctx->duration. + /// AVFMT_DURATION_FROM_PTS, AVFMT_DURATION_FROM_STREAM, or AVFMT_DURATION_FROM_BITRATE. + public static AVDurationEstimationMethod av_fmt_ctx_get_duration_estimation_method(AVFormatContext* @ctx) => vectors.av_fmt_ctx_get_duration_estimation_method(@ctx); + + /// Open a file using a UTF-8 filename. The API of this function matches POSIX fopen(), errors are returned through errno. + [Obsolete("Avoid using it, as on Windows, the FILE* allocated by this function may be allocated with a different CRT than the caller who uses the FILE*. No replacement provided in public API.")] + public static _iobuf* av_fopen_utf8(string @path, string @mode) => vectors.av_fopen_utf8(@path, @mode); + + /// Disables cpu detection and forces the specified flags. -1 is a special case that disables forcing of specific flags. + public static void av_force_cpu_flags(int @flags) => vectors.av_force_cpu_flags(@flags); + + /// This function will cause global side data to be injected in the next packet of each stream as well as after any subsequent seek. + public static void av_format_inject_global_side_data(AVFormatContext* @s) => vectors.av_format_inject_global_side_data(@s); + + /// Fill the provided buffer with a string containing a FourCC (four-character code) representation. + /// a buffer with size in bytes of at least AV_FOURCC_MAX_STRING_SIZE + /// the fourcc to represent + /// the buffer in input + public static byte* av_fourcc_make_string(byte* @buf, uint @fourcc) => vectors.av_fourcc_make_string(@buf, @fourcc); + + /// Allocate an AVFrame and set its fields to default values. The resulting struct must be freed using av_frame_free(). + /// An AVFrame filled with default values or NULL on failure. + public static AVFrame* av_frame_alloc() => vectors.av_frame_alloc(); + + /// Crop the given video AVFrame according to its crop_left/crop_top/crop_right/ crop_bottom fields. If cropping is successful, the function will adjust the data pointers and the width/height fields, and set the crop fields to 0. + /// the frame which should be cropped + /// Some combination of AV_FRAME_CROP_* flags, or 0. + /// >= 0 on success, a negative AVERROR on error. If the cropping fields were invalid, AVERROR(ERANGE) is returned, and nothing is changed. + public static int av_frame_apply_cropping(AVFrame* @frame, int @flags) => vectors.av_frame_apply_cropping(@frame, @flags); + + /// Create a new frame that references the same data as src. + /// newly created AVFrame on success, NULL on error. + public static AVFrame* av_frame_clone(AVFrame* @src) => vectors.av_frame_clone(@src); + + /// Copy the frame data from src to dst. + /// >= 0 on success, a negative AVERROR on error. + public static int av_frame_copy(AVFrame* @dst, AVFrame* @src) => vectors.av_frame_copy(@dst, @src); + + /// Copy only "metadata" fields from src to dst. + public static int av_frame_copy_props(AVFrame* @dst, AVFrame* @src) => vectors.av_frame_copy_props(@dst, @src); + + /// Free the frame and any dynamically allocated objects in it, e.g. extended_data. If the frame is reference counted, it will be unreferenced first. + /// frame to be freed. The pointer will be set to NULL. + public static void av_frame_free(AVFrame** @frame) => vectors.av_frame_free(@frame); + + /// Allocate new buffer(s) for audio or video data. + /// frame in which to store the new buffers. + /// Required buffer size alignment. If equal to 0, alignment will be chosen automatically for the current CPU. It is highly recommended to pass 0 here unless you know what you are doing. + /// 0 on success, a negative AVERROR on error. + public static int av_frame_get_buffer(AVFrame* @frame, int @align) => vectors.av_frame_get_buffer(@frame, @align); + + /// Get the buffer reference a given data plane is stored in. + /// index of the data plane of interest in frame->extended_data. + /// the buffer reference that contains the plane or NULL if the input frame is not valid. + public static AVBufferRef* av_frame_get_plane_buffer(AVFrame* @frame, int @plane) => vectors.av_frame_get_plane_buffer(@frame, @plane); + + /// Returns a pointer to the side data of a given type on success, NULL if there is no side data with such type in this frame. + /// a pointer to the side data of a given type on success, NULL if there is no side data with such type in this frame. + public static AVFrameSideData* av_frame_get_side_data(AVFrame* @frame, AVFrameSideDataType @type) => vectors.av_frame_get_side_data(@frame, @type); + + /// Check if the frame data is writable. + /// A positive value if the frame data is writable (which is true if and only if each of the underlying buffers has only one reference, namely the one stored in this frame). Return 0 otherwise. + public static int av_frame_is_writable(AVFrame* @frame) => vectors.av_frame_is_writable(@frame); + + /// Ensure that the frame data is writable, avoiding data copy if possible. + /// 0 on success, a negative AVERROR on error. + public static int av_frame_make_writable(AVFrame* @frame) => vectors.av_frame_make_writable(@frame); + + /// Move everything contained in src to dst and reset src. + public static void av_frame_move_ref(AVFrame* @dst, AVFrame* @src) => vectors.av_frame_move_ref(@dst, @src); + + /// Add a new side data to a frame. + /// a frame to which the side data should be added + /// type of the added side data + /// size of the side data + /// newly added side data on success, NULL on error + public static AVFrameSideData* av_frame_new_side_data(AVFrame* @frame, AVFrameSideDataType @type, ulong @size) => vectors.av_frame_new_side_data(@frame, @type, @size); + + /// Add a new side data to a frame from an existing AVBufferRef + /// a frame to which the side data should be added + /// the type of the added side data + /// an AVBufferRef to add as side data. The ownership of the reference is transferred to the frame. + /// newly added side data on success, NULL on error. On failure the frame is unchanged and the AVBufferRef remains owned by the caller. + public static AVFrameSideData* av_frame_new_side_data_from_buf(AVFrame* @frame, AVFrameSideDataType @type, AVBufferRef* @buf) => vectors.av_frame_new_side_data_from_buf(@frame, @type, @buf); + + /// Set up a new reference to the data described by the source frame. + /// 0 on success, a negative AVERROR on error + public static int av_frame_ref(AVFrame* @dst, AVFrame* @src) => vectors.av_frame_ref(@dst, @src); + + /// Remove and free all side data instances of the given type. + public static void av_frame_remove_side_data(AVFrame* @frame, AVFrameSideDataType @type) => vectors.av_frame_remove_side_data(@frame, @type); + + /// Returns a string identifying the side data type + /// a string identifying the side data type + public static string av_frame_side_data_name(AVFrameSideDataType @type) => vectors.av_frame_side_data_name(@type); + + /// Unreference all the buffers referenced by frame and reset the frame fields. + public static void av_frame_unref(AVFrame* @frame) => vectors.av_frame_unref(@frame); + + /// Free a memory block which has been allocated with a function of av_malloc() or av_realloc() family. + /// Pointer to the memory block which should be freed. + public static void av_free(void* @ptr) => vectors.av_free(@ptr); + + /// Free a memory block which has been allocated with a function of av_malloc() or av_realloc() family, and set the pointer pointing to it to `NULL`. + /// Pointer to the pointer to the memory block which should be freed + public static void av_freep(void* @ptr) => vectors.av_freep(@ptr); + + /// Compute the greatest common divisor of two integer operands. + /// GCD of a and b up to sign; if a >= 0 and b >= 0, return value is >= 0; if a == 0 and b == 0, returns 0. + public static long av_gcd(long @a, long @b) => vectors.av_gcd(@a, @b); + + /// Return the best rational so that a and b are multiple of it. If the resulting denominator is larger than max_den, return def. + public static AVRational av_gcd_q(AVRational @a, AVRational @b, int @max_den, AVRational @def) => vectors.av_gcd_q(@a, @b, @max_den, @def); + + /// Return the planar<->packed alternative form of the given sample format, or AV_SAMPLE_FMT_NONE on error. If the passed sample_fmt is already in the requested planar/packed format, the format returned is the same as the input. + public static AVSampleFormat av_get_alt_sample_fmt(AVSampleFormat @sample_fmt, int @planar) => vectors.av_get_alt_sample_fmt(@sample_fmt, @planar); + + /// Return audio frame duration. + /// codec context + /// size of the frame, or 0 if unknown + /// frame duration, in samples, if known. 0 if not able to determine. + public static int av_get_audio_frame_duration(AVCodecContext* @avctx, int @frame_bytes) => vectors.av_get_audio_frame_duration(@avctx, @frame_bytes); + + /// This function is the same as av_get_audio_frame_duration(), except it works with AVCodecParameters instead of an AVCodecContext. + public static int av_get_audio_frame_duration2(AVCodecParameters* @par, int @frame_bytes) => vectors.av_get_audio_frame_duration2(@par, @frame_bytes); + + /// Return the number of bits per pixel used by the pixel format described by pixdesc. Note that this is not the same as the number of bits per sample. + public static int av_get_bits_per_pixel(AVPixFmtDescriptor* @pixdesc) => vectors.av_get_bits_per_pixel(@pixdesc); + + /// Return codec bits per sample. + /// the codec + /// Number of bits per sample or zero if unknown for the given codec. + public static int av_get_bits_per_sample(AVCodecID @codec_id) => vectors.av_get_bits_per_sample(@codec_id); + + /// Return number of bytes per sample. + /// the sample format + /// number of bytes per sample or zero if unknown for the given sample format + public static int av_get_bytes_per_sample(AVSampleFormat @sample_fmt) => vectors.av_get_bytes_per_sample(@sample_fmt); + + /// Get the description of a given channel. + /// a channel layout with a single channel + /// channel description on success, NULL on error + [Obsolete("use av_channel_description()")] + public static string av_get_channel_description(ulong @channel) => vectors.av_get_channel_description(@channel); + + /// Return a channel layout id that matches name, or 0 if no match is found. + [Obsolete("use av_channel_layout_from_string()")] + public static ulong av_get_channel_layout(string @name) => vectors.av_get_channel_layout(@name); + + /// Get the index of a channel in channel_layout. + /// a channel layout describing exactly one channel which must be present in channel_layout. + /// index of channel in channel_layout on success, a negative AVERROR on error. + [Obsolete("use av_channel_layout_index_from_channel()")] + public static int av_get_channel_layout_channel_index(ulong @channel_layout, ulong @channel) => vectors.av_get_channel_layout_channel_index(@channel_layout, @channel); + + /// Return the number of channels in the channel layout. + [Obsolete("use AVChannelLayout.nb_channels")] + public static int av_get_channel_layout_nb_channels(ulong @channel_layout) => vectors.av_get_channel_layout_nb_channels(@channel_layout); + + /// Return a description of a channel layout. If nb_channels is <= 0, it is guessed from the channel_layout. + /// put here the string containing the channel layout + /// size in bytes of the buffer + [Obsolete("use av_channel_layout_describe()")] + public static void av_get_channel_layout_string(byte* @buf, int @buf_size, int @nb_channels, ulong @channel_layout) => vectors.av_get_channel_layout_string(@buf, @buf_size, @nb_channels, @channel_layout); + + /// Get the name of a given channel. + /// channel name on success, NULL on error. + [Obsolete("use av_channel_name()")] + public static string av_get_channel_name(ulong @channel) => vectors.av_get_channel_name(@channel); + + /// Get the name of a colorspace. + /// a static string identifying the colorspace; can be NULL. + [Obsolete("use av_color_space_name()")] + public static string av_get_colorspace_name(AVColorSpace @val) => vectors.av_get_colorspace_name(@val); + + /// Return the flags which specify extensions supported by the CPU. The returned value is affected by av_force_cpu_flags() if that was used before. So av_get_cpu_flags() can easily be used in an application to detect the enabled cpu flags. + public static int av_get_cpu_flags() => vectors.av_get_cpu_flags(); + + /// Return default channel layout for a given number of channels. + [Obsolete("use av_channel_layout_default()")] + public static long av_get_default_channel_layout(int @nb_channels) => vectors.av_get_default_channel_layout(@nb_channels); + + /// Return codec bits per sample. Only return non-zero if the bits per sample is exactly correct, not an approximation. + /// the codec + /// Number of bits per sample or zero if unknown for the given codec. + public static int av_get_exact_bits_per_sample(AVCodecID @codec_id) => vectors.av_get_exact_bits_per_sample(@codec_id); + + /// Return a channel layout and the number of channels based on the specified name. + /// channel layout specification string + /// parsed channel layout (0 if unknown) + /// number of channels + /// 0 on success, AVERROR(EINVAL) if the parsing fails. + [Obsolete("use av_channel_layout_from_string()")] + public static int av_get_extended_channel_layout(string @name, ulong* @channel_layout, int* @nb_channels) => vectors.av_get_extended_channel_layout(@name, @channel_layout, @nb_channels); + + public static int av_get_frame_filename(byte* @buf, int @buf_size, string @path, int @number) => vectors.av_get_frame_filename(@buf, @buf_size, @path, @number); + + /// Return in 'buf' the path with '%d' replaced by a number. + /// destination buffer + /// destination buffer size + /// numbered sequence string + /// frame number + /// AV_FRAME_FILENAME_FLAGS_* + /// 0 if OK, -1 on format error + public static int av_get_frame_filename2(byte* @buf, int @buf_size, string @path, int @number, int @flags) => vectors.av_get_frame_filename2(@buf, @buf_size, @path, @number, @flags); + + /// Return a string describing the media_type enum, NULL if media_type is unknown. + public static string av_get_media_type_string(AVMediaType @media_type) => vectors.av_get_media_type_string(@media_type); + + /// Get timing information for the data currently output. The exact meaning of "currently output" depends on the format. It is mostly relevant for devices that have an internal buffer and/or work in real time. + /// media file handle + /// stream in the media file + /// DTS of the last packet output for the stream, in stream time_base units + /// absolute time when that packet whas output, in microsecond + /// 0 if OK, AVERROR(ENOSYS) if the format does not support it Note: some formats or devices may not allow to measure dts and wall atomically. + public static int av_get_output_timestamp(AVFormatContext* @s, int @stream, long* @dts, long* @wall) => vectors.av_get_output_timestamp(@s, @stream, @dts, @wall); + + /// Get the packed alternative form of the given sample format. + /// the packed alternative form of the given sample format or AV_SAMPLE_FMT_NONE on error. + public static AVSampleFormat av_get_packed_sample_fmt(AVSampleFormat @sample_fmt) => vectors.av_get_packed_sample_fmt(@sample_fmt); + + /// Allocate and read the payload of a packet and initialize its fields with default values. + /// associated IO context + /// packet + /// desired payload size + /// >0 (read size) if OK, AVERROR_xxx otherwise + public static int av_get_packet(AVIOContext* @s, AVPacket* @pkt, int @size) => vectors.av_get_packet(@s, @pkt, @size); + + /// Return the number of bits per pixel for the pixel format described by pixdesc, including any padding or unused bits. + public static int av_get_padded_bits_per_pixel(AVPixFmtDescriptor* @pixdesc) => vectors.av_get_padded_bits_per_pixel(@pixdesc); + + /// Return the PCM codec associated with a sample format. + /// endianness, 0 for little, 1 for big, -1 (or anything else) for native + /// AV_CODEC_ID_PCM_* or AV_CODEC_ID_NONE + public static AVCodecID av_get_pcm_codec(AVSampleFormat @fmt, int @be) => vectors.av_get_pcm_codec(@fmt, @be); + + /// Return a single letter to describe the given picture type pict_type. + /// the picture type + /// a single character representing the picture type, '?' if pict_type is unknown + public static byte av_get_picture_type_char(AVPictureType @pict_type) => vectors.av_get_picture_type_char(@pict_type); + + /// Return the pixel format corresponding to name. + public static AVPixelFormat av_get_pix_fmt(string @name) => vectors.av_get_pix_fmt(@name); + + /// Compute what kind of losses will occur when converting from one specific pixel format to another. When converting from one pixel format to another, information loss may occur. For example, when converting from RGB24 to GRAY, the color information will be lost. Similarly, other losses occur when converting from some formats to other formats. These losses can involve loss of chroma, but also loss of resolution, loss of color depth, loss due to the color space conversion, loss of the alpha bits or loss due to color quantization. av_get_fix_fmt_loss() informs you about the various types of losses which will occur when converting from one pixel format to another. + /// destination pixel format + /// source pixel format + /// Whether the source pixel format alpha channel is used. + /// Combination of flags informing you what kind of losses will occur (maximum loss for an invalid dst_pix_fmt). + public static int av_get_pix_fmt_loss(AVPixelFormat @dst_pix_fmt, AVPixelFormat @src_pix_fmt, int @has_alpha) => vectors.av_get_pix_fmt_loss(@dst_pix_fmt, @src_pix_fmt, @has_alpha); + + /// Return the short name for a pixel format, NULL in case pix_fmt is unknown. + public static string av_get_pix_fmt_name(AVPixelFormat @pix_fmt) => vectors.av_get_pix_fmt_name(@pix_fmt); + + /// Print in buf the string corresponding to the pixel format with number pix_fmt, or a header if pix_fmt is negative. + /// the buffer where to write the string + /// the size of buf + /// the number of the pixel format to print the corresponding info string, or a negative value to print the corresponding header. + public static byte* av_get_pix_fmt_string(byte* @buf, int @buf_size, AVPixelFormat @pix_fmt) => vectors.av_get_pix_fmt_string(@buf, @buf_size, @pix_fmt); + + /// Get the planar alternative form of the given sample format. + /// the planar alternative form of the given sample format or AV_SAMPLE_FMT_NONE on error. + public static AVSampleFormat av_get_planar_sample_fmt(AVSampleFormat @sample_fmt) => vectors.av_get_planar_sample_fmt(@sample_fmt); + + /// Return a name for the specified profile, if available. + /// the codec that is searched for the given profile + /// the profile value for which a name is requested + /// A name for the profile if found, NULL otherwise. + public static string av_get_profile_name(AVCodec* @codec, int @profile) => vectors.av_get_profile_name(@codec, @profile); + + /// Return a sample format corresponding to name, or AV_SAMPLE_FMT_NONE on error. + public static AVSampleFormat av_get_sample_fmt(string @name) => vectors.av_get_sample_fmt(@name); + + /// Return the name of sample_fmt, or NULL if sample_fmt is not recognized. + public static string av_get_sample_fmt_name(AVSampleFormat @sample_fmt) => vectors.av_get_sample_fmt_name(@sample_fmt); + + /// Generate a string corresponding to the sample format with sample_fmt, or a header if sample_fmt is negative. + /// the buffer where to write the string + /// the size of buf + /// the number of the sample format to print the corresponding info string, or a negative value to print the corresponding header. + /// the pointer to the filled buffer or NULL if sample_fmt is unknown or in case of other errors + public static byte* av_get_sample_fmt_string(byte* @buf, int @buf_size, AVSampleFormat @sample_fmt) => vectors.av_get_sample_fmt_string(@buf, @buf_size, @sample_fmt); + + /// Get the value and name of a standard channel layout. + /// index in an internal list, starting at 0 + /// channel layout mask + /// name of the layout + /// 0 if the layout exists, < 0 if index is beyond the limits + [Obsolete("use av_channel_layout_standard()")] + public static int av_get_standard_channel_layout(uint @index, ulong* @layout, byte** @name) => vectors.av_get_standard_channel_layout(@index, @layout, @name); + + /// Return the fractional representation of the internal time base. + public static AVRational av_get_time_base_q() => vectors.av_get_time_base_q(); + + /// Get the current time in microseconds. + public static long av_gettime() => vectors.av_gettime(); + + /// Get the current time in microseconds since some unspecified starting point. On platforms that support it, the time comes from a monotonic clock This property makes this time source ideal for measuring relative time. The returned values may not be monotonic on platforms where a monotonic clock is not available. + public static long av_gettime_relative() => vectors.av_gettime_relative(); + + /// Indicates with a boolean result if the av_gettime_relative() time source is monotonic. + public static int av_gettime_relative_is_monotonic() => vectors.av_gettime_relative_is_monotonic(); + + /// Increase packet size, correctly zeroing padding + /// packet + /// number of bytes by which to increase the size of the packet + public static int av_grow_packet(AVPacket* @pkt, int @grow_by) => vectors.av_grow_packet(@pkt, @grow_by); + + /// Guess the codec ID based upon muxer and filename. + public static AVCodecID av_guess_codec(AVOutputFormat* @fmt, string @short_name, string @filename, string @mime_type, AVMediaType @type) => vectors.av_guess_codec(@fmt, @short_name, @filename, @mime_type, @type); + + /// Return the output format in the list of registered output formats which best matches the provided parameters, or return NULL if there is no match. + /// if non-NULL checks if short_name matches with the names of the registered formats + /// if non-NULL checks if filename terminates with the extensions of the registered formats + /// if non-NULL checks if mime_type matches with the MIME type of the registered formats + public static AVOutputFormat* av_guess_format(string @short_name, string @filename, string @mime_type) => vectors.av_guess_format(@short_name, @filename, @mime_type); + + /// Guess the frame rate, based on both the container and codec information. + /// the format context which the stream is part of + /// the stream which the frame is part of + /// the frame for which the frame rate should be determined, may be NULL + /// the guessed (valid) frame rate, 0/1 if no idea + public static AVRational av_guess_frame_rate(AVFormatContext* @ctx, AVStream* @stream, AVFrame* @frame) => vectors.av_guess_frame_rate(@ctx, @stream, @frame); + + /// Guess the sample aspect ratio of a frame, based on both the stream and the frame aspect ratio. + /// the format context which the stream is part of + /// the stream which the frame is part of + /// the frame with the aspect ratio to be determined + /// the guessed (valid) sample_aspect_ratio, 0/1 if no idea + public static AVRational av_guess_sample_aspect_ratio(AVFormatContext* @format, AVStream* @stream, AVFrame* @frame) => vectors.av_guess_sample_aspect_ratio(@format, @stream, @frame); + + /// Send a nice hexadecimal dump of a buffer to the specified file stream. + /// The file stream pointer where the dump should be sent to. + /// buffer + /// buffer size + public static void av_hex_dump(_iobuf* @f, byte* @buf, int @size) => vectors.av_hex_dump(@f, @buf, @size); + + /// Send a nice hexadecimal dump of a buffer to the log. + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct. + /// The importance level of the message, lower values signifying higher importance. + /// buffer + /// buffer size + public static void av_hex_dump_log(void* @avcl, int @level, byte* @buf, int @size) => vectors.av_hex_dump_log(@avcl, @level, @buf, @size); + + /// Allocate an AVHWDeviceContext for a given hardware type. + /// the type of the hardware device to allocate. + /// a reference to the newly created AVHWDeviceContext on success or NULL on failure. + public static AVBufferRef* av_hwdevice_ctx_alloc(AVHWDeviceType @type) => vectors.av_hwdevice_ctx_alloc(@type); + + /// Open a device of the specified type and create an AVHWDeviceContext for it. + /// On success, a reference to the newly-created device context will be written here. The reference is owned by the caller and must be released with av_buffer_unref() when no longer needed. On failure, NULL will be written to this pointer. + /// The type of the device to create. + /// A type-specific string identifying the device to open. + /// A dictionary of additional (type-specific) options to use in opening the device. The dictionary remains owned by the caller. + /// currently unused + /// 0 on success, a negative AVERROR code on failure. + public static int av_hwdevice_ctx_create(AVBufferRef** @device_ctx, AVHWDeviceType @type, string @device, AVDictionary* @opts, int @flags) => vectors.av_hwdevice_ctx_create(@device_ctx, @type, @device, @opts, @flags); + + /// Create a new device of the specified type from an existing device. + /// On success, a reference to the newly-created AVHWDeviceContext. + /// The type of the new device to create. + /// A reference to an existing AVHWDeviceContext which will be used to create the new device. + /// Currently unused; should be set to zero. + /// Zero on success, a negative AVERROR code on failure. + public static int av_hwdevice_ctx_create_derived(AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, int @flags) => vectors.av_hwdevice_ctx_create_derived(@dst_ctx, @type, @src_ctx, @flags); + + /// Create a new device of the specified type from an existing device. + /// On success, a reference to the newly-created AVHWDeviceContext. + /// The type of the new device to create. + /// A reference to an existing AVHWDeviceContext which will be used to create the new device. + /// Options for the new device to create, same format as in av_hwdevice_ctx_create. + /// Currently unused; should be set to zero. + /// Zero on success, a negative AVERROR code on failure. + public static int av_hwdevice_ctx_create_derived_opts(AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, AVDictionary* @options, int @flags) => vectors.av_hwdevice_ctx_create_derived_opts(@dst_ctx, @type, @src_ctx, @options, @flags); + + /// Finalize the device context before use. This function must be called after the context is filled with all the required information and before it is used in any way. + /// a reference to the AVHWDeviceContext + /// 0 on success, a negative AVERROR code on failure + public static int av_hwdevice_ctx_init(AVBufferRef* @ref) => vectors.av_hwdevice_ctx_init(@ref); + + /// Look up an AVHWDeviceType by name. + /// String name of the device type (case-insensitive). + /// The type from enum AVHWDeviceType, or AV_HWDEVICE_TYPE_NONE if not found. + public static AVHWDeviceType av_hwdevice_find_type_by_name(string @name) => vectors.av_hwdevice_find_type_by_name(@name); + + /// Get the constraints on HW frames given a device and the HW-specific configuration to be used with that device. If no HW-specific configuration is provided, returns the maximum possible capabilities of the device. + /// a reference to the associated AVHWDeviceContext. + /// a filled HW-specific configuration structure, or NULL to return the maximum possible capabilities of the device. + /// AVHWFramesConstraints structure describing the constraints on the device, or NULL if not available. + public static AVHWFramesConstraints* av_hwdevice_get_hwframe_constraints(AVBufferRef* @ref, void* @hwconfig) => vectors.av_hwdevice_get_hwframe_constraints(@ref, @hwconfig); + + /// Get the string name of an AVHWDeviceType. + /// Type from enum AVHWDeviceType. + /// Pointer to a static string containing the name, or NULL if the type is not valid. + public static string av_hwdevice_get_type_name(AVHWDeviceType @type) => vectors.av_hwdevice_get_type_name(@type); + + /// Allocate a HW-specific configuration structure for a given HW device. After use, the user must free all members as required by the specific hardware structure being used, then free the structure itself with av_free(). + /// a reference to the associated AVHWDeviceContext. + /// The newly created HW-specific configuration structure on success or NULL on failure. + public static void* av_hwdevice_hwconfig_alloc(AVBufferRef* @device_ctx) => vectors.av_hwdevice_hwconfig_alloc(@device_ctx); + + /// Iterate over supported device types. + /// The next usable device type from enum AVHWDeviceType, or AV_HWDEVICE_TYPE_NONE if there are no more. + public static AVHWDeviceType av_hwdevice_iterate_types(AVHWDeviceType @prev) => vectors.av_hwdevice_iterate_types(@prev); + + /// Free an AVHWFrameConstraints structure. + /// The (filled or unfilled) AVHWFrameConstraints structure. + public static void av_hwframe_constraints_free(AVHWFramesConstraints** @constraints) => vectors.av_hwframe_constraints_free(@constraints); + + /// Allocate an AVHWFramesContext tied to a given device context. + /// a reference to a AVHWDeviceContext. This function will make a new reference for internal use, the one passed to the function remains owned by the caller. + /// a reference to the newly created AVHWFramesContext on success or NULL on failure. + public static AVBufferRef* av_hwframe_ctx_alloc(AVBufferRef* @device_ctx) => vectors.av_hwframe_ctx_alloc(@device_ctx); + + /// Create and initialise an AVHWFramesContext as a mapping of another existing AVHWFramesContext on a different device. + /// On success, a reference to the newly created AVHWFramesContext. + /// A reference to the device to create the new AVHWFramesContext on. + /// A reference to an existing AVHWFramesContext which will be mapped to the derived context. + /// Some combination of AV_HWFRAME_MAP_* flags, defining the mapping parameters to apply to frames which are allocated in the derived device. + /// Zero on success, negative AVERROR code on failure. + public static int av_hwframe_ctx_create_derived(AVBufferRef** @derived_frame_ctx, AVPixelFormat @format, AVBufferRef* @derived_device_ctx, AVBufferRef* @source_frame_ctx, int @flags) => vectors.av_hwframe_ctx_create_derived(@derived_frame_ctx, @format, @derived_device_ctx, @source_frame_ctx, @flags); + + /// Finalize the context before use. This function must be called after the context is filled with all the required information and before it is attached to any frames. + /// a reference to the AVHWFramesContext + /// 0 on success, a negative AVERROR code on failure + public static int av_hwframe_ctx_init(AVBufferRef* @ref) => vectors.av_hwframe_ctx_init(@ref); + + /// Allocate a new frame attached to the given AVHWFramesContext. + /// a reference to an AVHWFramesContext + /// an empty (freshly allocated or unreffed) frame to be filled with newly allocated buffers. + /// currently unused, should be set to zero + /// 0 on success, a negative AVERROR code on failure + public static int av_hwframe_get_buffer(AVBufferRef* @hwframe_ctx, AVFrame* @frame, int @flags) => vectors.av_hwframe_get_buffer(@hwframe_ctx, @frame, @flags); + + /// Map a hardware frame. + /// Destination frame, to contain the mapping. + /// Source frame, to be mapped. + /// Some combination of AV_HWFRAME_MAP_* flags. + /// Zero on success, negative AVERROR code on failure. + public static int av_hwframe_map(AVFrame* @dst, AVFrame* @src, int @flags) => vectors.av_hwframe_map(@dst, @src, @flags); + + /// Copy data to or from a hw surface. At least one of dst/src must have an AVHWFramesContext attached. + /// the destination frame. dst is not touched on failure. + /// the source frame. + /// currently unused, should be set to zero + /// 0 on success, a negative AVERROR error code on failure. + public static int av_hwframe_transfer_data(AVFrame* @dst, AVFrame* @src, int @flags) => vectors.av_hwframe_transfer_data(@dst, @src, @flags); + + /// Get a list of possible source or target formats usable in av_hwframe_transfer_data(). + /// the frame context to obtain the information for + /// the direction of the transfer + /// the pointer to the output format list will be written here. The list is terminated with AV_PIX_FMT_NONE and must be freed by the caller when no longer needed using av_free(). If this function returns successfully, the format list will have at least one item (not counting the terminator). On failure, the contents of this pointer are unspecified. + /// currently unused, should be set to zero + /// 0 on success, a negative AVERROR code on failure. + public static int av_hwframe_transfer_get_formats(AVBufferRef* @hwframe_ctx, AVHWFrameTransferDirection @dir, AVPixelFormat** @formats, int @flags) => vectors.av_hwframe_transfer_get_formats(@hwframe_ctx, @dir, @formats, @flags); + + /// Allocate an image with size w and h and pixel format pix_fmt, and fill pointers and linesizes accordingly. The allocated image buffer has to be freed by using av_freep(&pointers[0]). + /// the value to use for buffer size alignment + /// the size in bytes required for the image buffer, a negative error code in case of failure + public static int av_image_alloc(ref byte_ptrArray4 @pointers, ref int_array4 @linesizes, int @w, int @h, AVPixelFormat @pix_fmt, int @align) => vectors.av_image_alloc(ref @pointers, ref @linesizes, @w, @h, @pix_fmt, @align); + + /// Check if the given sample aspect ratio of an image is valid. + /// width of the image + /// height of the image + /// sample aspect ratio of the image + /// 0 if valid, a negative AVERROR code otherwise + public static int av_image_check_sar(uint @w, uint @h, AVRational @sar) => vectors.av_image_check_sar(@w, @h, @sar); + + /// Check if the given dimension of an image is valid, meaning that all bytes of the image can be addressed with a signed int. + /// the width of the picture + /// the height of the picture + /// the offset to sum to the log level for logging with log_ctx + /// the parent logging context, it may be NULL + /// >= 0 if valid, a negative error code otherwise + public static int av_image_check_size(uint @w, uint @h, int @log_offset, void* @log_ctx) => vectors.av_image_check_size(@w, @h, @log_offset, @log_ctx); + + /// Check if the given dimension of an image is valid, meaning that all bytes of a plane of an image with the specified pix_fmt can be addressed with a signed int. + /// the width of the picture + /// the height of the picture + /// the maximum number of pixels the user wants to accept + /// the pixel format, can be AV_PIX_FMT_NONE if unknown. + /// the offset to sum to the log level for logging with log_ctx + /// the parent logging context, it may be NULL + /// >= 0 if valid, a negative error code otherwise + public static int av_image_check_size2(uint @w, uint @h, long @max_pixels, AVPixelFormat @pix_fmt, int @log_offset, void* @log_ctx) => vectors.av_image_check_size2(@w, @h, @max_pixels, @pix_fmt, @log_offset, @log_ctx); + + /// Copy image in src_data to dst_data. + /// linesizes for the image in dst_data + /// linesizes for the image in src_data + public static void av_image_copy(ref byte_ptrArray4 @dst_data, ref int_array4 @dst_linesizes, in byte_ptrArray4 @src_data, in int_array4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height) => vectors.av_image_copy(ref @dst_data, ref @dst_linesizes, @src_data, @src_linesizes, @pix_fmt, @width, @height); + + /// Copy image plane from src to dst. That is, copy "height" number of lines of "bytewidth" bytes each. The first byte of each successive line is separated by *_linesize bytes. + /// linesize for the image plane in dst + /// linesize for the image plane in src + public static void av_image_copy_plane(byte* @dst, int @dst_linesize, byte* @src, int @src_linesize, int @bytewidth, int @height) => vectors.av_image_copy_plane(@dst, @dst_linesize, @src, @src_linesize, @bytewidth, @height); + + /// Copy image data located in uncacheable (e.g. GPU mapped) memory. Where available, this function will use special functionality for reading from such memory, which may result in greatly improved performance compared to plain av_image_copy_plane(). + public static void av_image_copy_plane_uc_from(byte* @dst, long @dst_linesize, byte* @src, long @src_linesize, long @bytewidth, int @height) => vectors.av_image_copy_plane_uc_from(@dst, @dst_linesize, @src, @src_linesize, @bytewidth, @height); + + /// Copy image data from an image into a buffer. + /// a buffer into which picture data will be copied + /// the size in bytes of dst + /// pointers containing the source image data + /// linesizes for the image in src_data + /// the pixel format of the source image + /// the width of the source image in pixels + /// the height of the source image in pixels + /// the assumed linesize alignment for dst + /// the number of bytes written to dst, or a negative value (error code) on error + public static int av_image_copy_to_buffer(byte* @dst, int @dst_size, in byte_ptrArray4 @src_data, in int_array4 @src_linesize, AVPixelFormat @pix_fmt, int @width, int @height, int @align) => vectors.av_image_copy_to_buffer(@dst, @dst_size, @src_data, @src_linesize, @pix_fmt, @width, @height, @align); + + /// Copy image data located in uncacheable (e.g. GPU mapped) memory. Where available, this function will use special functionality for reading from such memory, which may result in greatly improved performance compared to plain av_image_copy(). + public static void av_image_copy_uc_from(ref byte_ptrArray4 @dst_data, in long_array4 @dst_linesizes, in byte_ptrArray4 @src_data, in long_array4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height) => vectors.av_image_copy_uc_from(ref @dst_data, @dst_linesizes, @src_data, @src_linesizes, @pix_fmt, @width, @height); + + /// Setup the data pointers and linesizes based on the specified image parameters and the provided array. + /// data pointers to be filled in + /// linesizes for the image in dst_data to be filled in + /// buffer which will contain or contains the actual image data, can be NULL + /// the pixel format of the image + /// the width of the image in pixels + /// the height of the image in pixels + /// the value used in src for linesize alignment + /// the size in bytes required for src, a negative error code in case of failure + public static int av_image_fill_arrays(ref byte_ptrArray4 @dst_data, ref int_array4 @dst_linesize, byte* @src, AVPixelFormat @pix_fmt, int @width, int @height, int @align) => vectors.av_image_fill_arrays(ref @dst_data, ref @dst_linesize, @src, @pix_fmt, @width, @height, @align); + + /// Overwrite the image data with black. This is suitable for filling a sub-rectangle of an image, meaning the padding between the right most pixel and the left most pixel on the next line will not be overwritten. For some formats, the image size might be rounded up due to inherent alignment. + /// data pointers to destination image + /// linesizes for the destination image + /// the pixel format of the image + /// the color range of the image (important for colorspaces such as YUV) + /// the width of the image in pixels + /// the height of the image in pixels + /// 0 if the image data was cleared, a negative AVERROR code otherwise + public static int av_image_fill_black(ref byte_ptrArray4 @dst_data, in long_array4 @dst_linesize, AVPixelFormat @pix_fmt, AVColorRange @range, int @width, int @height) => vectors.av_image_fill_black(ref @dst_data, @dst_linesize, @pix_fmt, @range, @width, @height); + + /// Fill plane linesizes for an image with pixel format pix_fmt and width width. + /// array to be filled with the linesize for each plane + /// >= 0 in case of success, a negative error code otherwise + public static int av_image_fill_linesizes(ref int_array4 @linesizes, AVPixelFormat @pix_fmt, int @width) => vectors.av_image_fill_linesizes(ref @linesizes, @pix_fmt, @width); + + /// Compute the max pixel step for each plane of an image with a format described by pixdesc. + /// an array which is filled with the max pixel step for each plane. Since a plane may contain different pixel components, the computed max_pixsteps[plane] is relative to the component in the plane with the max pixel step. + /// an array which is filled with the component for each plane which has the max pixel step. May be NULL. + public static void av_image_fill_max_pixsteps(ref int_array4 @max_pixsteps, ref int_array4 @max_pixstep_comps, AVPixFmtDescriptor* @pixdesc) => vectors.av_image_fill_max_pixsteps(ref @max_pixsteps, ref @max_pixstep_comps, @pixdesc); + + /// Fill plane sizes for an image with pixel format pix_fmt and height height. + /// the array to be filled with the size of each image plane + /// the array containing the linesize for each plane, should be filled by av_image_fill_linesizes() + /// >= 0 in case of success, a negative error code otherwise + public static int av_image_fill_plane_sizes(ref ulong_array4 @size, AVPixelFormat @pix_fmt, int @height, in long_array4 @linesizes) => vectors.av_image_fill_plane_sizes(ref @size, @pix_fmt, @height, @linesizes); + + /// Fill plane data pointers for an image with pixel format pix_fmt and height height. + /// pointers array to be filled with the pointer for each image plane + /// the pointer to a buffer which will contain the image + /// the array containing the linesize for each plane, should be filled by av_image_fill_linesizes() + /// the size in bytes required for the image buffer, a negative error code in case of failure + public static int av_image_fill_pointers(ref byte_ptrArray4 @data, AVPixelFormat @pix_fmt, int @height, byte* @ptr, in int_array4 @linesizes) => vectors.av_image_fill_pointers(ref @data, @pix_fmt, @height, @ptr, @linesizes); + + /// Return the size in bytes of the amount of data required to store an image with the given parameters. + /// the pixel format of the image + /// the width of the image in pixels + /// the height of the image in pixels + /// the assumed linesize alignment + /// the buffer size in bytes, a negative error code in case of failure + public static int av_image_get_buffer_size(AVPixelFormat @pix_fmt, int @width, int @height, int @align) => vectors.av_image_get_buffer_size(@pix_fmt, @width, @height, @align); + + /// Compute the size of an image line with format pix_fmt and width width for the plane plane. + /// the computed size in bytes + public static int av_image_get_linesize(AVPixelFormat @pix_fmt, int @width, int @plane) => vectors.av_image_get_linesize(@pix_fmt, @width, @plane); + + /// Get the index for a specific timestamp. + /// stream that the timestamp belongs to + /// timestamp to retrieve the index for + /// if AVSEEK_FLAG_BACKWARD then the returned index will correspond to the timestamp which is < = the requested one, if backward is 0, then it will be >= if AVSEEK_FLAG_ANY seek to any frame, only keyframes otherwise + /// < 0 if no such timestamp could be found + public static int av_index_search_timestamp(AVStream* @st, long @timestamp, int @flags) => vectors.av_index_search_timestamp(@st, @timestamp, @flags); + + /// Initialize optional fields of a packet with default values. + /// packet + [Obsolete("This function is deprecated. Once it's removed, sizeof(AVPacket) will not be a part of the ABI anymore.")] + public static void av_init_packet(AVPacket* @pkt) => vectors.av_init_packet(@pkt); + + /// Audio input devices iterator. + public static AVInputFormat* av_input_audio_device_next(AVInputFormat* @d) => vectors.av_input_audio_device_next(@d); + + /// Video input devices iterator. + public static AVInputFormat* av_input_video_device_next(AVInputFormat* @d) => vectors.av_input_video_device_next(@d); + + /// Compute the length of an integer list. + /// size in bytes of each list element (only 1, 2, 4 or 8) + /// pointer to the list + /// list terminator (usually 0 or -1) + /// length of the list, in elements, not counting the terminator + public static uint av_int_list_length_for_size(uint @elsize, void* @list, ulong @term) => vectors.av_int_list_length_for_size(@elsize, @list, @term); + + /// Write a packet to an output media file ensuring correct interleaving. + /// media file handle + /// The packet containing the data to be written. If the packet is reference-counted, this function will take ownership of this reference and unreference it later when it sees fit. If the packet is not reference-counted, libavformat will make a copy. The returned packet will be blank (as if returned from av_packet_alloc()), even on error. This parameter can be NULL (at any time, not just at the end), to flush the interleaving queues. Packet's "stream_index" field must be set to the index of the corresponding stream in "s->streams". The timestamps ( "pts", "dts") must be set to correct values in the stream's timebase (unless the output format is flagged with the AVFMT_NOTIMESTAMPS flag, then they can be set to AV_NOPTS_VALUE). The dts for subsequent packets in one stream must be strictly increasing (unless the output format is flagged with the AVFMT_TS_NONSTRICT, then they merely have to be nondecreasing). "duration" should also be set if known. + /// 0 on success, a negative AVERROR on error. + public static int av_interleaved_write_frame(AVFormatContext* @s, AVPacket* @pkt) => vectors.av_interleaved_write_frame(@s, @pkt); + + /// Write an uncoded frame to an output media file. + /// >=0 for success, a negative code on error + public static int av_interleaved_write_uncoded_frame(AVFormatContext* @s, int @stream_index, AVFrame* @frame) => vectors.av_interleaved_write_uncoded_frame(@s, @stream_index, @frame); + + /// Send the specified message to the log if the level is less than or equal to the current av_log_level. By default, all logging messages are sent to stderr. This behavior can be altered by setting a different logging callback function. + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct or NULL if general log. + /// The importance level of the message expressed using a "Logging Constant". + /// The format string (printf-compatible) that specifies how subsequent arguments are converted to output. + public static void av_log(void* @avcl, int @level, string @fmt) => vectors.av_log(@avcl, @level, @fmt); + + /// Default logging callback + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct. + /// The importance level of the message expressed using a "Logging Constant". + /// The format string (printf-compatible) that specifies how subsequent arguments are converted to output. + /// The arguments referenced by the format string. + public static void av_log_default_callback(void* @avcl, int @level, string @fmt, byte* @vl) => vectors.av_log_default_callback(@avcl, @level, @fmt, @vl); + + /// Format a line of log the same way as the default callback. + /// buffer to receive the formatted line + /// size of the buffer + /// used to store whether the prefix must be printed; must point to a persistent integer initially set to 1 + public static void av_log_format_line(void* @ptr, int @level, string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix) => vectors.av_log_format_line(@ptr, @level, @fmt, @vl, @line, @line_size, @print_prefix); + + /// Format a line of log the same way as the default callback. + /// buffer to receive the formatted line; may be NULL if line_size is 0 + /// size of the buffer; at most line_size-1 characters will be written to the buffer, plus one null terminator + /// used to store whether the prefix must be printed; must point to a persistent integer initially set to 1 + /// Returns a negative value if an error occurred, otherwise returns the number of characters that would have been written for a sufficiently large buffer, not including the terminating null character. If the return value is not less than line_size, it means that the log message was truncated to fit the buffer. + public static int av_log_format_line2(void* @ptr, int @level, string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix) => vectors.av_log_format_line2(@ptr, @level, @fmt, @vl, @line, @line_size, @print_prefix); + + public static int av_log_get_flags() => vectors.av_log_get_flags(); + + /// Get the current log level + /// Current log level + public static int av_log_get_level() => vectors.av_log_get_level(); + + /// Send the specified message to the log once with the initial_level and then with the subsequent_level. By default, all logging messages are sent to stderr. This behavior can be altered by setting a different logging callback function. + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct or NULL if general log. + /// importance level of the message expressed using a "Logging Constant" for the first occurance. + /// importance level of the message expressed using a "Logging Constant" after the first occurance. + /// a variable to keep trak of if a message has already been printed this must be initialized to 0 before the first use. The same state must not be accessed by 2 Threads simultaneously. + /// The format string (printf-compatible) that specifies how subsequent arguments are converted to output. + public static void av_log_once(void* @avcl, int @initial_level, int @subsequent_level, int* @state, string @fmt) => vectors.av_log_once(@avcl, @initial_level, @subsequent_level, @state, @fmt); + + /// Set the logging callback + /// A logging function with a compatible signature. + public static void av_log_set_callback(av_log_set_callback_callback_func @callback) => vectors.av_log_set_callback(@callback); + + public static void av_log_set_flags(int @arg) => vectors.av_log_set_flags(@arg); + + /// Set the log level + /// Logging level + public static void av_log_set_level(int @level) => vectors.av_log_set_level(@level); + + public static int av_log2(uint @v) => vectors.av_log2(@v); + + public static int av_log2_16bit(uint @v) => vectors.av_log2_16bit(@v); + + /// Allocate a memory block with alignment suitable for all memory accesses (including vectors if available on the CPU). + /// Size in bytes for the memory block to be allocated + /// Pointer to the allocated block, or `NULL` if the block cannot be allocated + public static void* av_malloc(ulong @size) => vectors.av_malloc(@size); + + /// Allocate a memory block for an array with av_malloc(). + /// Number of element + /// Size of a single element + /// Pointer to the allocated block, or `NULL` if the block cannot be allocated + public static void* av_malloc_array(ulong @nmemb, ulong @size) => vectors.av_malloc_array(@nmemb, @size); + + /// Allocate a memory block with alignment suitable for all memory accesses (including vectors if available on the CPU) and zero all the bytes of the block. + /// Size in bytes for the memory block to be allocated + /// Pointer to the allocated block, or `NULL` if it cannot be allocated + public static void* av_mallocz(ulong @size) => vectors.av_mallocz(@size); + + [Obsolete("use av_calloc()")] + public static void* av_mallocz_array(ulong @nmemb, ulong @size) => vectors.av_mallocz_array(@nmemb, @size); + + /// Allocate an AVMasteringDisplayMetadata structure and set its fields to default values. The resulting struct can be freed using av_freep(). + /// An AVMasteringDisplayMetadata filled with default values or NULL on failure. + public static AVMasteringDisplayMetadata* av_mastering_display_metadata_alloc() => vectors.av_mastering_display_metadata_alloc(); + + /// Allocate a complete AVMasteringDisplayMetadata and add it to the frame. + /// The frame which side data is added to. + /// The AVMasteringDisplayMetadata structure to be filled by caller. + public static AVMasteringDisplayMetadata* av_mastering_display_metadata_create_side_data(AVFrame* @frame) => vectors.av_mastering_display_metadata_create_side_data(@frame); + + /// Return a positive value if the given filename has one of the given extensions, 0 otherwise. + /// file name to check against the given extensions + /// a comma-separated list of filename extensions + public static int av_match_ext(string @filename, string @extensions) => vectors.av_match_ext(@filename, @extensions); + + /// Set the maximum size that may be allocated in one block. + /// Value to be set as the new maximum size + public static void av_max_alloc(ulong @max) => vectors.av_max_alloc(@max); + + /// Overlapping memcpy() implementation. + /// Destination buffer + /// Number of bytes back to start copying (i.e. the initial size of the overlapping window); must be > 0 + /// Number of bytes to copy; must be >= 0 + public static void av_memcpy_backptr(byte* @dst, int @back, int @cnt) => vectors.av_memcpy_backptr(@dst, @back, @cnt); + + /// Duplicate a buffer with av_malloc(). + /// Buffer to be duplicated + /// Size in bytes of the buffer copied + /// Pointer to a newly allocated buffer containing a copy of `p` or `NULL` if the buffer cannot be allocated + public static void* av_memdup(void* @p, ulong @size) => vectors.av_memdup(@p, @size); + + /// Multiply two rationals. + /// First rational + /// Second rational + /// b*c + public static AVRational av_mul_q(AVRational @b, AVRational @c) => vectors.av_mul_q(@b, @c); + + /// Iterate over all registered muxers. + /// a pointer where libavformat will store the iteration state. Must point to NULL to start the iteration. + /// the next registered muxer or NULL when the iteration is finished + public static AVOutputFormat* av_muxer_iterate(void** @opaque) => vectors.av_muxer_iterate(@opaque); + + /// Find which of the two rationals is closer to another rational. + /// Rational to be compared against + /// One of the following values: - 1 if `q1` is nearer to `q` than `q2` - -1 if `q2` is nearer to `q` than `q1` - 0 if they have the same distance + public static int av_nearer_q(AVRational @q, AVRational @q1, AVRational @q2) => vectors.av_nearer_q(@q, @q1, @q2); + + /// Allocate the payload of a packet and initialize its fields with default values. + /// packet + /// wanted payload size + /// 0 if OK, AVERROR_xxx otherwise + public static int av_new_packet(AVPacket* @pkt, int @size) => vectors.av_new_packet(@pkt, @size); + + public static AVProgram* av_new_program(AVFormatContext* @s, int @id) => vectors.av_new_program(@s, @id); + + /// Iterate over potential AVOptions-enabled children of parent. + /// a pointer where iteration state is stored. + /// AVClass corresponding to next potential child or NULL + public static AVClass* av_opt_child_class_iterate(AVClass* @parent, void** @iter) => vectors.av_opt_child_class_iterate(@parent, @iter); + + /// Iterate over AVOptions-enabled children of obj. + /// result of a previous call to this function or NULL + /// next AVOptions-enabled child or NULL + public static void* av_opt_child_next(void* @obj, void* @prev) => vectors.av_opt_child_next(@obj, @prev); + + /// Copy options from src object into dest object. + /// Object to copy from + /// Object to copy into + /// 0 on success, negative on error + public static int av_opt_copy(void* @dest, void* @src) => vectors.av_opt_copy(@dest, @src); + + public static int av_opt_eval_double(void* @obj, AVOption* @o, string @val, double* @double_out) => vectors.av_opt_eval_double(@obj, @o, @val, @double_out); + + /// @{ This group of functions can be used to evaluate option strings and get numbers out of them. They do the same thing as av_opt_set(), except the result is written into the caller-supplied pointer. + /// a struct whose first element is a pointer to AVClass. + /// an option for which the string is to be evaluated. + /// string to be evaluated. + /// 0 on success, a negative number on failure. + public static int av_opt_eval_flags(void* @obj, AVOption* @o, string @val, int* @flags_out) => vectors.av_opt_eval_flags(@obj, @o, @val, @flags_out); + + public static int av_opt_eval_float(void* @obj, AVOption* @o, string @val, float* @float_out) => vectors.av_opt_eval_float(@obj, @o, @val, @float_out); + + public static int av_opt_eval_int(void* @obj, AVOption* @o, string @val, int* @int_out) => vectors.av_opt_eval_int(@obj, @o, @val, @int_out); + + public static int av_opt_eval_int64(void* @obj, AVOption* @o, string @val, long* @int64_out) => vectors.av_opt_eval_int64(@obj, @o, @val, @int64_out); + + public static int av_opt_eval_q(void* @obj, AVOption* @o, string @val, AVRational* @q_out) => vectors.av_opt_eval_q(@obj, @o, @val, @q_out); + + /// Look for an option in an object. Consider only options which have all the specified flags set. + /// A pointer to a struct whose first element is a pointer to an AVClass. Alternatively a double pointer to an AVClass, if AV_OPT_SEARCH_FAKE_OBJ search flag is set. + /// The name of the option to look for. + /// When searching for named constants, name of the unit it belongs to. + /// Find only options with all the specified flags set (AV_OPT_FLAG). + /// A combination of AV_OPT_SEARCH_*. + /// A pointer to the option found, or NULL if no option was found. + public static AVOption* av_opt_find(void* @obj, string @name, string @unit, int @opt_flags, int @search_flags) => vectors.av_opt_find(@obj, @name, @unit, @opt_flags, @search_flags); + + /// Look for an option in an object. Consider only options which have all the specified flags set. + /// A pointer to a struct whose first element is a pointer to an AVClass. Alternatively a double pointer to an AVClass, if AV_OPT_SEARCH_FAKE_OBJ search flag is set. + /// The name of the option to look for. + /// When searching for named constants, name of the unit it belongs to. + /// Find only options with all the specified flags set (AV_OPT_FLAG). + /// A combination of AV_OPT_SEARCH_*. + /// if non-NULL, an object to which the option belongs will be written here. It may be different from obj if AV_OPT_SEARCH_CHILDREN is present in search_flags. This parameter is ignored if search_flags contain AV_OPT_SEARCH_FAKE_OBJ. + /// A pointer to the option found, or NULL if no option was found. + public static AVOption* av_opt_find2(void* @obj, string @name, string @unit, int @opt_flags, int @search_flags, void** @target_obj) => vectors.av_opt_find2(@obj, @name, @unit, @opt_flags, @search_flags, @target_obj); + + /// Check whether a particular flag is set in a flags field. + /// the name of the flag field option + /// the name of the flag to check + /// non-zero if the flag is set, zero if the flag isn't set, isn't of the right type, or the flags field doesn't exist. + public static int av_opt_flag_is_set(void* @obj, string @field_name, string @flag_name) => vectors.av_opt_flag_is_set(@obj, @field_name, @flag_name); + + /// Free all allocated objects in obj. + public static void av_opt_free(void* @obj) => vectors.av_opt_free(@obj); + + /// Free an AVOptionRanges struct and set it to NULL. + public static void av_opt_freep_ranges(AVOptionRanges** @ranges) => vectors.av_opt_freep_ranges(@ranges); + + /// @{ Those functions get a value of the option with the given name from an object. + /// a struct whose first element is a pointer to an AVClass. + /// name of the option to get. + /// flags passed to av_opt_find2. I.e. if AV_OPT_SEARCH_CHILDREN is passed here, then the option may be found in a child of obj. + /// value of the option will be written here + /// >=0 on success, a negative error code otherwise + public static int av_opt_get(void* @obj, string @name, int @search_flags, byte** @out_val) => vectors.av_opt_get(@obj, @name, @search_flags, @out_val); + + [Obsolete()] + public static int av_opt_get_channel_layout(void* @obj, string @name, int @search_flags, long* @ch_layout) => vectors.av_opt_get_channel_layout(@obj, @name, @search_flags, @ch_layout); + + public static int av_opt_get_chlayout(void* @obj, string @name, int @search_flags, AVChannelLayout* @layout) => vectors.av_opt_get_chlayout(@obj, @name, @search_flags, @layout); + + /// The returned dictionary is a copy of the actual value and must be freed with av_dict_free() by the caller + public static int av_opt_get_dict_val(void* @obj, string @name, int @search_flags, AVDictionary** @out_val) => vectors.av_opt_get_dict_val(@obj, @name, @search_flags, @out_val); + + public static int av_opt_get_double(void* @obj, string @name, int @search_flags, double* @out_val) => vectors.av_opt_get_double(@obj, @name, @search_flags, @out_val); + + public static int av_opt_get_image_size(void* @obj, string @name, int @search_flags, int* @w_out, int* @h_out) => vectors.av_opt_get_image_size(@obj, @name, @search_flags, @w_out, @h_out); + + public static int av_opt_get_int(void* @obj, string @name, int @search_flags, long* @out_val) => vectors.av_opt_get_int(@obj, @name, @search_flags, @out_val); + + /// Extract a key-value pair from the beginning of a string. + /// pointer to the options string, will be updated to point to the rest of the string (one of the pairs_sep or the final NUL) + /// a 0-terminated list of characters used to separate key from value, for example '=' + /// a 0-terminated list of characters used to separate two pairs from each other, for example ':' or ',' + /// flags; see the AV_OPT_FLAG_* values below + /// parsed key; must be freed using av_free() + /// parsed value; must be freed using av_free() + /// >=0 for success, or a negative value corresponding to an AVERROR code in case of error; in particular: AVERROR(EINVAL) if no key is present + public static int av_opt_get_key_value(byte** @ropts, string @key_val_sep, string @pairs_sep, uint @flags, byte** @rkey, byte** @rval) => vectors.av_opt_get_key_value(@ropts, @key_val_sep, @pairs_sep, @flags, @rkey, @rval); + + public static int av_opt_get_pixel_fmt(void* @obj, string @name, int @search_flags, AVPixelFormat* @out_fmt) => vectors.av_opt_get_pixel_fmt(@obj, @name, @search_flags, @out_fmt); + + public static int av_opt_get_q(void* @obj, string @name, int @search_flags, AVRational* @out_val) => vectors.av_opt_get_q(@obj, @name, @search_flags, @out_val); + + public static int av_opt_get_sample_fmt(void* @obj, string @name, int @search_flags, AVSampleFormat* @out_fmt) => vectors.av_opt_get_sample_fmt(@obj, @name, @search_flags, @out_fmt); + + public static int av_opt_get_video_rate(void* @obj, string @name, int @search_flags, AVRational* @out_val) => vectors.av_opt_get_video_rate(@obj, @name, @search_flags, @out_val); + + /// Check if given option is set to its default value. + /// AVClass object to check option on + /// option to be checked + /// >0 when option is set to its default, 0 when option is not set its default, < 0 on error + public static int av_opt_is_set_to_default(void* @obj, AVOption* @o) => vectors.av_opt_is_set_to_default(@obj, @o); + + /// Check if given option is set to its default value. + /// AVClass object to check option on + /// option name + /// combination of AV_OPT_SEARCH_* + /// >0 when option is set to its default, 0 when option is not set its default, < 0 on error + public static int av_opt_is_set_to_default_by_name(void* @obj, string @name, int @search_flags) => vectors.av_opt_is_set_to_default_by_name(@obj, @name, @search_flags); + + /// Iterate over all AVOptions belonging to obj. + /// an AVOptions-enabled struct or a double pointer to an AVClass describing it. + /// result of the previous call to av_opt_next() on this object or NULL + /// next AVOption or NULL + public static AVOption* av_opt_next(void* @obj, AVOption* @prev) => vectors.av_opt_next(@obj, @prev); + + /// @} + public static void* av_opt_ptr(AVClass* @avclass, void* @obj, string @name) => vectors.av_opt_ptr(@avclass, @obj, @name); + + /// Get a list of allowed ranges for the given option. + /// is a bitmask of flags, undefined flags should not be set and should be ignored AV_OPT_SEARCH_FAKE_OBJ indicates that the obj is a double pointer to a AVClass instead of a full instance AV_OPT_MULTI_COMPONENT_RANGE indicates that function may return more than one component, + /// number of compontents returned on success, a negative errro code otherwise + public static int av_opt_query_ranges(AVOptionRanges** @p0, void* @obj, string @key, int @flags) => vectors.av_opt_query_ranges(@p0, @obj, @key, @flags); + + /// Get a default list of allowed ranges for the given option. + /// is a bitmask of flags, undefined flags should not be set and should be ignored AV_OPT_SEARCH_FAKE_OBJ indicates that the obj is a double pointer to a AVClass instead of a full instance AV_OPT_MULTI_COMPONENT_RANGE indicates that function may return more than one component, + /// number of compontents returned on success, a negative errro code otherwise + public static int av_opt_query_ranges_default(AVOptionRanges** @p0, void* @obj, string @key, int @flags) => vectors.av_opt_query_ranges_default(@p0, @obj, @key, @flags); + + /// Serialize object's options. + /// AVClass object to serialize + /// serialize options with all the specified flags set (AV_OPT_FLAG) + /// combination of AV_OPT_SERIALIZE_* flags + /// Pointer to buffer that will be allocated with string containg serialized options. Buffer must be freed by the caller when is no longer needed. + /// character used to separate key from value + /// character used to separate two pairs from each other + /// >= 0 on success, negative on error + public static int av_opt_serialize(void* @obj, int @opt_flags, int @flags, byte** @buffer, byte @key_val_sep, byte @pairs_sep) => vectors.av_opt_serialize(@obj, @opt_flags, @flags, @buffer, @key_val_sep, @pairs_sep); + + /// @{ Those functions set the field of obj with the given name to value. + /// A struct whose first element is a pointer to an AVClass. + /// the name of the field to set + /// The value to set. In case of av_opt_set() if the field is not of a string type, then the given string is parsed. SI postfixes and some named scalars are supported. If the field is of a numeric type, it has to be a numeric or named scalar. Behavior with more than one scalar and +- infix operators is undefined. If the field is of a flags type, it has to be a sequence of numeric scalars or named flags separated by '+' or '-'. Prefixing a flag with '+' causes it to be set without affecting the other flags; similarly, '-' unsets a flag. If the field is of a dictionary type, it has to be a ':' separated list of key=value parameters. Values containing ':' special characters must be escaped. + /// flags passed to av_opt_find2. I.e. if AV_OPT_SEARCH_CHILDREN is passed here, then the option may be set on a child of obj. + /// 0 if the value has been set, or an AVERROR code in case of error: AVERROR_OPTION_NOT_FOUND if no matching option exists AVERROR(ERANGE) if the value is out of range AVERROR(EINVAL) if the value is not valid + public static int av_opt_set(void* @obj, string @name, string @val, int @search_flags) => vectors.av_opt_set(@obj, @name, @val, @search_flags); + + public static int av_opt_set_bin(void* @obj, string @name, byte* @val, int @size, int @search_flags) => vectors.av_opt_set_bin(@obj, @name, @val, @size, @search_flags); + + [Obsolete()] + public static int av_opt_set_channel_layout(void* @obj, string @name, long @ch_layout, int @search_flags) => vectors.av_opt_set_channel_layout(@obj, @name, @ch_layout, @search_flags); + + public static int av_opt_set_chlayout(void* @obj, string @name, AVChannelLayout* @layout, int @search_flags) => vectors.av_opt_set_chlayout(@obj, @name, @layout, @search_flags); + + /// Set the values of all AVOption fields to their default values. + /// an AVOption-enabled struct (its first member must be a pointer to AVClass) + public static void av_opt_set_defaults(void* @s) => vectors.av_opt_set_defaults(@s); + + /// Set the values of all AVOption fields to their default values. Only these AVOption fields for which (opt->flags & mask) == flags will have their default applied to s. + /// an AVOption-enabled struct (its first member must be a pointer to AVClass) + /// combination of AV_OPT_FLAG_* + /// combination of AV_OPT_FLAG_* + public static void av_opt_set_defaults2(void* @s, int @mask, int @flags) => vectors.av_opt_set_defaults2(@s, @mask, @flags); + + /// Set all the options from a given dictionary on an object. + /// a struct whose first element is a pointer to AVClass + /// options to process. This dictionary will be freed and replaced by a new one containing all options not found in obj. Of course this new dictionary needs to be freed by caller with av_dict_free(). + /// 0 on success, a negative AVERROR if some option was found in obj, but could not be set. + public static int av_opt_set_dict(void* @obj, AVDictionary** @options) => vectors.av_opt_set_dict(@obj, @options); + + public static int av_opt_set_dict_val(void* @obj, string @name, AVDictionary* @val, int @search_flags) => vectors.av_opt_set_dict_val(@obj, @name, @val, @search_flags); + + /// Set all the options from a given dictionary on an object. + /// a struct whose first element is a pointer to AVClass + /// options to process. This dictionary will be freed and replaced by a new one containing all options not found in obj. Of course this new dictionary needs to be freed by caller with av_dict_free(). + /// A combination of AV_OPT_SEARCH_*. + /// 0 on success, a negative AVERROR if some option was found in obj, but could not be set. + public static int av_opt_set_dict2(void* @obj, AVDictionary** @options, int @search_flags) => vectors.av_opt_set_dict2(@obj, @options, @search_flags); + + public static int av_opt_set_double(void* @obj, string @name, double @val, int @search_flags) => vectors.av_opt_set_double(@obj, @name, @val, @search_flags); + + /// Parse the key-value pairs list in opts. For each key=value pair found, set the value of the corresponding option in ctx. + /// the AVClass object to set options on + /// the options string, key-value pairs separated by a delimiter + /// a NULL-terminated array of options names for shorthand notation: if the first field in opts has no key part, the key is taken from the first element of shorthand; then again for the second, etc., until either opts is finished, shorthand is finished or a named option is found; after that, all options must be named + /// a 0-terminated list of characters used to separate key from value, for example '=' + /// a 0-terminated list of characters used to separate two pairs from each other, for example ':' or ',' + /// the number of successfully set key=value pairs, or a negative value corresponding to an AVERROR code in case of error: AVERROR(EINVAL) if opts cannot be parsed, the error code issued by av_set_string3() if a key/value pair cannot be set + public static int av_opt_set_from_string(void* @ctx, string @opts, byte** @shorthand, string @key_val_sep, string @pairs_sep) => vectors.av_opt_set_from_string(@ctx, @opts, @shorthand, @key_val_sep, @pairs_sep); + + public static int av_opt_set_image_size(void* @obj, string @name, int @w, int @h, int @search_flags) => vectors.av_opt_set_image_size(@obj, @name, @w, @h, @search_flags); + + public static int av_opt_set_int(void* @obj, string @name, long @val, int @search_flags) => vectors.av_opt_set_int(@obj, @name, @val, @search_flags); + + public static int av_opt_set_pixel_fmt(void* @obj, string @name, AVPixelFormat @fmt, int @search_flags) => vectors.av_opt_set_pixel_fmt(@obj, @name, @fmt, @search_flags); + + public static int av_opt_set_q(void* @obj, string @name, AVRational @val, int @search_flags) => vectors.av_opt_set_q(@obj, @name, @val, @search_flags); + + public static int av_opt_set_sample_fmt(void* @obj, string @name, AVSampleFormat @fmt, int @search_flags) => vectors.av_opt_set_sample_fmt(@obj, @name, @fmt, @search_flags); + + public static int av_opt_set_video_rate(void* @obj, string @name, AVRational @val, int @search_flags) => vectors.av_opt_set_video_rate(@obj, @name, @val, @search_flags); + + /// Show the obj options. + /// log context to use for showing the options + /// requested flags for the options to show. Show only the options for which it is opt->flags & req_flags. + /// rejected flags for the options to show. Show only the options for which it is !(opt->flags & req_flags). + public static int av_opt_show2(void* @obj, void* @av_log_obj, int @req_flags, int @rej_flags) => vectors.av_opt_show2(@obj, @av_log_obj, @req_flags, @rej_flags); + + /// Audio output devices iterator. + public static AVOutputFormat* av_output_audio_device_next(AVOutputFormat* @d) => vectors.av_output_audio_device_next(@d); + + /// Video output devices iterator. + public static AVOutputFormat* av_output_video_device_next(AVOutputFormat* @d) => vectors.av_output_video_device_next(@d); + + /// Wrap an existing array as a packet side data. + /// packet + /// side information type + /// the side data array. It must be allocated with the av_malloc() family of functions. The ownership of the data is transferred to pkt. + /// side information size + /// a non-negative number on success, a negative AVERROR code on failure. On failure, the packet is unchanged and the data remains owned by the caller. + public static int av_packet_add_side_data(AVPacket* @pkt, AVPacketSideDataType @type, byte* @data, ulong @size) => vectors.av_packet_add_side_data(@pkt, @type, @data, @size); + + /// Allocate an AVPacket and set its fields to default values. The resulting struct must be freed using av_packet_free(). + /// An AVPacket filled with default values or NULL on failure. + public static AVPacket* av_packet_alloc() => vectors.av_packet_alloc(); + + /// Create a new packet that references the same data as src. + /// newly created AVPacket on success, NULL on error. + public static AVPacket* av_packet_clone(AVPacket* @src) => vectors.av_packet_clone(@src); + + /// Copy only "properties" fields from src to dst. + /// Destination packet + /// Source packet + /// 0 on success AVERROR on failure. + public static int av_packet_copy_props(AVPacket* @dst, AVPacket* @src) => vectors.av_packet_copy_props(@dst, @src); + + /// Free the packet, if the packet is reference counted, it will be unreferenced first. + /// packet to be freed. The pointer will be set to NULL. + public static void av_packet_free(AVPacket** @pkt) => vectors.av_packet_free(@pkt); + + /// Convenience function to free all the side data stored. All the other fields stay untouched. + /// packet + public static void av_packet_free_side_data(AVPacket* @pkt) => vectors.av_packet_free_side_data(@pkt); + + /// Initialize a reference-counted packet from av_malloc()ed data. + /// packet to be initialized. This function will set the data, size, and buf fields, all others are left untouched. + /// Data allocated by av_malloc() to be used as packet data. If this function returns successfully, the data is owned by the underlying AVBuffer. The caller may not access the data through other means. + /// size of data in bytes, without the padding. I.e. the full buffer size is assumed to be size + AV_INPUT_BUFFER_PADDING_SIZE. + /// 0 on success, a negative AVERROR on error + public static int av_packet_from_data(AVPacket* @pkt, byte* @data, int @size) => vectors.av_packet_from_data(@pkt, @data, @size); + + /// Get side information from packet. + /// packet + /// desired side information type + /// If supplied, *size will be set to the size of the side data or to zero if the desired side data is not present. + /// pointer to data if present or NULL otherwise + public static byte* av_packet_get_side_data(AVPacket* @pkt, AVPacketSideDataType @type, ulong* @size) => vectors.av_packet_get_side_data(@pkt, @type, @size); + + /// Ensure the data described by a given packet is reference counted. + /// packet whose data should be made reference counted. + /// 0 on success, a negative AVERROR on error. On failure, the packet is unchanged. + public static int av_packet_make_refcounted(AVPacket* @pkt) => vectors.av_packet_make_refcounted(@pkt); + + /// Create a writable reference for the data described by a given packet, avoiding data copy if possible. + /// Packet whose data should be made writable. + /// 0 on success, a negative AVERROR on failure. On failure, the packet is unchanged. + public static int av_packet_make_writable(AVPacket* @pkt) => vectors.av_packet_make_writable(@pkt); + + /// Move every field in src to dst and reset src. + /// Destination packet + /// Source packet, will be reset + public static void av_packet_move_ref(AVPacket* @dst, AVPacket* @src) => vectors.av_packet_move_ref(@dst, @src); + + /// Allocate new information of a packet. + /// packet + /// side information type + /// side information size + /// pointer to fresh allocated data or NULL otherwise + public static byte* av_packet_new_side_data(AVPacket* @pkt, AVPacketSideDataType @type, ulong @size) => vectors.av_packet_new_side_data(@pkt, @type, @size); + + /// Pack a dictionary for use in side_data. + /// The dictionary to pack. + /// pointer to store the size of the returned data + /// pointer to data if successful, NULL otherwise + public static byte* av_packet_pack_dictionary(AVDictionary* @dict, ulong* @size) => vectors.av_packet_pack_dictionary(@dict, @size); + + /// Setup a new reference to the data described by a given packet + /// Destination packet. Will be completely overwritten. + /// Source packet + /// 0 on success, a negative AVERROR on error. On error, dst will be blank (as if returned by av_packet_alloc()). + public static int av_packet_ref(AVPacket* @dst, AVPacket* @src) => vectors.av_packet_ref(@dst, @src); + + /// Convert valid timing fields (timestamps / durations) in a packet from one timebase to another. Timestamps with unknown values (AV_NOPTS_VALUE) will be ignored. + /// packet on which the conversion will be performed + /// source timebase, in which the timing fields in pkt are expressed + /// destination timebase, to which the timing fields will be converted + public static void av_packet_rescale_ts(AVPacket* @pkt, AVRational @tb_src, AVRational @tb_dst) => vectors.av_packet_rescale_ts(@pkt, @tb_src, @tb_dst); + + /// Shrink the already allocated side data buffer + /// packet + /// side information type + /// new side information size + /// 0 on success, < 0 on failure + public static int av_packet_shrink_side_data(AVPacket* @pkt, AVPacketSideDataType @type, ulong @size) => vectors.av_packet_shrink_side_data(@pkt, @type, @size); + + public static string av_packet_side_data_name(AVPacketSideDataType @type) => vectors.av_packet_side_data_name(@type); + + /// Unpack a dictionary from side_data. + /// data from side_data + /// size of the data + /// the metadata storage dictionary + /// 0 on success, < 0 on failure + public static int av_packet_unpack_dictionary(byte* @data, ulong @size, AVDictionary** @dict) => vectors.av_packet_unpack_dictionary(@data, @size, @dict); + + /// Wipe the packet. + /// The packet to be unreferenced. + public static void av_packet_unref(AVPacket* @pkt) => vectors.av_packet_unref(@pkt); + + /// Parse CPU caps from a string and update the given AV_CPU_* flags based on that. + /// negative on error. + public static int av_parse_cpu_caps(uint* @flags, string @s) => vectors.av_parse_cpu_caps(@flags, @s); + + public static void av_parser_close(AVCodecParserContext* @s) => vectors.av_parser_close(@s); + + public static AVCodecParserContext* av_parser_init(int @codec_id) => vectors.av_parser_init(@codec_id); + + /// Iterate over all registered codec parsers. + /// a pointer where libavcodec will store the iteration state. Must point to NULL to start the iteration. + /// the next registered codec parser or NULL when the iteration is finished + public static AVCodecParser* av_parser_iterate(void** @opaque) => vectors.av_parser_iterate(@opaque); + + /// Parse a packet. + /// parser context. + /// codec context. + /// set to pointer to parsed buffer or NULL if not yet finished. + /// set to size of parsed buffer or zero if not yet finished. + /// input buffer. + /// buffer size in bytes without the padding. I.e. the full buffer size is assumed to be buf_size + AV_INPUT_BUFFER_PADDING_SIZE. To signal EOF, this should be 0 (so that the last frame can be output). + /// input presentation timestamp. + /// input decoding timestamp. + /// input byte position in stream. + /// the number of bytes of the input bitstream used. + public static int av_parser_parse2(AVCodecParserContext* @s, AVCodecContext* @avctx, byte** @poutbuf, int* @poutbuf_size, byte* @buf, int @buf_size, long @pts, long @dts, long @pos) => vectors.av_parser_parse2(@s, @avctx, @poutbuf, @poutbuf_size, @buf, @buf_size, @pts, @dts, @pos); + + /// Returns number of planes in pix_fmt, a negative AVERROR if pix_fmt is not a valid pixel format. + /// number of planes in pix_fmt, a negative AVERROR if pix_fmt is not a valid pixel format. + public static int av_pix_fmt_count_planes(AVPixelFormat @pix_fmt) => vectors.av_pix_fmt_count_planes(@pix_fmt); + + /// Returns a pixel format descriptor for provided pixel format or NULL if this pixel format is unknown. + /// a pixel format descriptor for provided pixel format or NULL if this pixel format is unknown. + public static AVPixFmtDescriptor* av_pix_fmt_desc_get(AVPixelFormat @pix_fmt) => vectors.av_pix_fmt_desc_get(@pix_fmt); + + /// Returns an AVPixelFormat id described by desc, or AV_PIX_FMT_NONE if desc is not a valid pointer to a pixel format descriptor. + /// an AVPixelFormat id described by desc, or AV_PIX_FMT_NONE if desc is not a valid pointer to a pixel format descriptor. + public static AVPixelFormat av_pix_fmt_desc_get_id(AVPixFmtDescriptor* @desc) => vectors.av_pix_fmt_desc_get_id(@desc); + + /// Iterate over all pixel format descriptors known to libavutil. + /// previous descriptor. NULL to get the first descriptor. + /// next descriptor or NULL after the last descriptor + public static AVPixFmtDescriptor* av_pix_fmt_desc_next(AVPixFmtDescriptor* @prev) => vectors.av_pix_fmt_desc_next(@prev); + + /// Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor. + /// the pixel format + /// store log2_chroma_w (horizontal/width shift) + /// store log2_chroma_h (vertical/height shift) + /// 0 on success, AVERROR(ENOSYS) on invalid or unknown pixel format + public static int av_pix_fmt_get_chroma_sub_sample(AVPixelFormat @pix_fmt, int* @h_shift, int* @v_shift) => vectors.av_pix_fmt_get_chroma_sub_sample(@pix_fmt, @h_shift, @v_shift); + + /// Utility function to swap the endianness of a pixel format. + /// the pixel format + /// pixel format with swapped endianness if it exists, otherwise AV_PIX_FMT_NONE + public static AVPixelFormat av_pix_fmt_swap_endianness(AVPixelFormat @pix_fmt) => vectors.av_pix_fmt_swap_endianness(@pix_fmt); + + /// Send a nice dump of a packet to the log. + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct. + /// The importance level of the message, lower values signifying higher importance. + /// packet to dump + /// True if the payload must be displayed, too. + /// AVStream that the packet belongs to + public static void av_pkt_dump_log2(void* @avcl, int @level, AVPacket* @pkt, int @dump_payload, AVStream* @st) => vectors.av_pkt_dump_log2(@avcl, @level, @pkt, @dump_payload, @st); + + /// Send a nice dump of a packet to the specified file stream. + /// The file stream pointer where the dump should be sent to. + /// packet to dump + /// True if the payload must be displayed, too. + /// AVStream that the packet belongs to + public static void av_pkt_dump2(_iobuf* @f, AVPacket* @pkt, int @dump_payload, AVStream* @st) => vectors.av_pkt_dump2(@f, @pkt, @dump_payload, @st); + + /// Like av_probe_input_buffer2() but returns 0 on success + public static int av_probe_input_buffer(AVIOContext* @pb, AVInputFormat** @fmt, string @url, void* @logctx, uint @offset, uint @max_probe_size) => vectors.av_probe_input_buffer(@pb, @fmt, @url, @logctx, @offset, @max_probe_size); + + /// Probe a bytestream to determine the input format. Each time a probe returns with a score that is too low, the probe buffer size is increased and another attempt is made. When the maximum probe size is reached, the input format with the highest score is returned. + /// the bytestream to probe + /// the input format is put here + /// the url of the stream + /// the log context + /// the offset within the bytestream to probe from + /// the maximum probe buffer size (zero for default) + /// the score in case of success, a negative value corresponding to an the maximal score is AVPROBE_SCORE_MAX AVERROR code otherwise + public static int av_probe_input_buffer2(AVIOContext* @pb, AVInputFormat** @fmt, string @url, void* @logctx, uint @offset, uint @max_probe_size) => vectors.av_probe_input_buffer2(@pb, @fmt, @url, @logctx, @offset, @max_probe_size); + + /// Guess the file format. + /// data to be probed + /// Whether the file is already opened; determines whether demuxers with or without AVFMT_NOFILE are probed. + public static AVInputFormat* av_probe_input_format(AVProbeData* @pd, int @is_opened) => vectors.av_probe_input_format(@pd, @is_opened); + + /// Guess the file format. + /// data to be probed + /// Whether the file is already opened; determines whether demuxers with or without AVFMT_NOFILE are probed. + /// A probe score larger that this is required to accept a detection, the variable is set to the actual detection score afterwards. If the score is < = AVPROBE_SCORE_MAX / 4 it is recommended to retry with a larger probe buffer. + public static AVInputFormat* av_probe_input_format2(AVProbeData* @pd, int @is_opened, int* @score_max) => vectors.av_probe_input_format2(@pd, @is_opened, @score_max); + + /// Guess the file format. + /// Whether the file is already opened; determines whether demuxers with or without AVFMT_NOFILE are probed. + /// The score of the best detection. + public static AVInputFormat* av_probe_input_format3(AVProbeData* @pd, int @is_opened, int* @score_ret) => vectors.av_probe_input_format3(@pd, @is_opened, @score_ret); + + public static void av_program_add_stream_index(AVFormatContext* @ac, int @progid, uint @idx) => vectors.av_program_add_stream_index(@ac, @progid, @idx); + + /// Convert an AVRational to a IEEE 32-bit `float` expressed in fixed-point format. + /// Rational to be converted + /// Equivalent floating-point value, expressed as an unsigned 32-bit integer. + public static uint av_q2intfloat(AVRational @q) => vectors.av_q2intfloat(@q); + + /// Return the next frame of a stream. This function returns what is stored in the file, and does not validate that what is there are valid frames for the decoder. It will split what is stored in the file into frames and return one for each call. It will not omit invalid data between valid frames so as to give the decoder the maximum information possible for decoding. + /// 0 if OK, < 0 on error or end of file. On error, pkt will be blank (as if it came from av_packet_alloc()). + public static int av_read_frame(AVFormatContext* @s, AVPacket* @pkt) => vectors.av_read_frame(@s, @pkt); + + public static void av_read_image_line(ushort* @dst, in byte_ptrArray4 @data, in int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component) => vectors.av_read_image_line(@dst, @data, @linesize, @desc, @x, @y, @c, @w, @read_pal_component); + + /// Read a line from an image, and write the values of the pixel format component c to dst. + /// the array containing the pointers to the planes of the image + /// the array containing the linesizes of the image + /// the pixel format descriptor for the image + /// the horizontal coordinate of the first pixel to read + /// the vertical coordinate of the first pixel to read + /// the width of the line to read, that is the number of values to write to dst + /// if not zero and the format is a paletted format writes the values corresponding to the palette component c in data[1] to dst, rather than the palette indexes in data[0]. The behavior is undefined if the format is not paletted. + /// size of elements in dst array (2 or 4 byte) + public static void av_read_image_line2(void* @dst, in byte_ptrArray4 @data, in int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component, int @dst_element_size) => vectors.av_read_image_line2(@dst, @data, @linesize, @desc, @x, @y, @c, @w, @read_pal_component, @dst_element_size); + + /// Pause a network-based stream (e.g. RTSP stream). + public static int av_read_pause(AVFormatContext* @s) => vectors.av_read_pause(@s); + + /// Start playing a network-based stream (e.g. RTSP stream) at the current position. + public static int av_read_play(AVFormatContext* @s) => vectors.av_read_play(@s); + + /// Allocate, reallocate, or free a block of memory. + /// Pointer to a memory block already allocated with av_realloc() or `NULL` + /// Size in bytes of the memory block to be allocated or reallocated + /// Pointer to a newly-reallocated block or `NULL` if the block cannot be reallocated + public static void* av_realloc(void* @ptr, ulong @size) => vectors.av_realloc(@ptr, @size); + + /// Allocate, reallocate, or free an array. + /// Pointer to a memory block already allocated with av_realloc() or `NULL` + /// Number of elements in the array + /// Size of the single element of the array + /// Pointer to a newly-reallocated block or NULL if the block cannot be reallocated + public static void* av_realloc_array(void* @ptr, ulong @nmemb, ulong @size) => vectors.av_realloc_array(@ptr, @nmemb, @size); + + /// Allocate, reallocate, or free a block of memory. + public static void* av_realloc_f(void* @ptr, ulong @nelem, ulong @elsize) => vectors.av_realloc_f(@ptr, @nelem, @elsize); + + /// Allocate, reallocate, or free a block of memory through a pointer to a pointer. + /// Pointer to a pointer to a memory block already allocated with av_realloc(), or a pointer to `NULL`. The pointer is updated on success, or freed on failure. + /// Size in bytes for the memory block to be allocated or reallocated + /// Zero on success, an AVERROR error code on failure + public static int av_reallocp(void* @ptr, ulong @size) => vectors.av_reallocp(@ptr, @size); + + /// Allocate, reallocate an array through a pointer to a pointer. + /// Pointer to a pointer to a memory block already allocated with av_realloc(), or a pointer to `NULL`. The pointer is updated on success, or freed on failure. + /// Number of elements + /// Size of the single element + /// Zero on success, an AVERROR error code on failure + public static int av_reallocp_array(void* @ptr, ulong @nmemb, ulong @size) => vectors.av_reallocp_array(@ptr, @nmemb, @size); + + /// Reduce a fraction. + /// Destination numerator + /// Destination denominator + /// Source numerator + /// Source denominator + /// Maximum allowed values for `dst_num` & `dst_den` + /// 1 if the operation is exact, 0 otherwise + public static int av_reduce(int* @dst_num, int* @dst_den, long @num, long @den, long @max) => vectors.av_reduce(@dst_num, @dst_den, @num, @den, @max); + + /// Rescale a 64-bit integer with rounding to nearest. + public static long av_rescale(long @a, long @b, long @c) => vectors.av_rescale(@a, @b, @c); + + /// Rescale a timestamp while preserving known durations. + /// Input time base + /// Input timestamp + /// Duration time base; typically this is finer-grained (greater) than `in_tb` and `out_tb` + /// Duration till the next call to this function (i.e. duration of the current packet/frame) + /// Pointer to a timestamp expressed in terms of `fs_tb`, acting as a state variable + /// Output timebase + /// Timestamp expressed in terms of `out_tb` + public static long av_rescale_delta(AVRational @in_tb, long @in_ts, AVRational @fs_tb, int @duration, long* @last, AVRational @out_tb) => vectors.av_rescale_delta(@in_tb, @in_ts, @fs_tb, @duration, @last, @out_tb); + + /// Rescale a 64-bit integer by 2 rational numbers. + public static long av_rescale_q(long @a, AVRational @bq, AVRational @cq) => vectors.av_rescale_q(@a, @bq, @cq); + + /// Rescale a 64-bit integer by 2 rational numbers with specified rounding. + public static long av_rescale_q_rnd(long @a, AVRational @bq, AVRational @cq, AVRounding @rnd) => vectors.av_rescale_q_rnd(@a, @bq, @cq, @rnd); + + /// Rescale a 64-bit integer with specified rounding. + public static long av_rescale_rnd(long @a, long @b, long @c, AVRounding @rnd) => vectors.av_rescale_rnd(@a, @b, @c, @rnd); + + /// Check if the sample format is planar. + /// the sample format to inspect + /// 1 if the sample format is planar, 0 if it is interleaved + public static int av_sample_fmt_is_planar(AVSampleFormat @sample_fmt) => vectors.av_sample_fmt_is_planar(@sample_fmt); + + /// Allocate a samples buffer for nb_samples samples, and fill data pointers and linesize accordingly. The allocated samples buffer can be freed by using av_freep(&audio_data[0]) Allocated data will be initialized to silence. + /// array to be filled with the pointer for each channel + /// aligned size for audio buffer(s), may be NULL + /// number of audio channels + /// number of samples per channel + /// buffer size alignment (0 = default, 1 = no alignment) + /// >=0 on success or a negative error code on failure + public static int av_samples_alloc(byte** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) => vectors.av_samples_alloc(@audio_data, @linesize, @nb_channels, @nb_samples, @sample_fmt, @align); + + /// Allocate a data pointers array, samples buffer for nb_samples samples, and fill data pointers and linesize accordingly. + public static int av_samples_alloc_array_and_samples(byte*** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) => vectors.av_samples_alloc_array_and_samples(@audio_data, @linesize, @nb_channels, @nb_samples, @sample_fmt, @align); + + /// Copy samples from src to dst. + /// destination array of pointers to data planes + /// source array of pointers to data planes + /// offset in samples at which the data will be written to dst + /// offset in samples at which the data will be read from src + /// number of samples to be copied + /// number of audio channels + /// audio sample format + public static int av_samples_copy(byte** @dst, byte** @src, int @dst_offset, int @src_offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt) => vectors.av_samples_copy(@dst, @src, @dst_offset, @src_offset, @nb_samples, @nb_channels, @sample_fmt); + + /// Fill plane data pointers and linesize for samples with sample format sample_fmt. + /// array to be filled with the pointer for each channel + /// calculated linesize, may be NULL + /// the pointer to a buffer containing the samples + /// the number of channels + /// the number of samples in a single channel + /// the sample format + /// buffer size alignment (0 = default, 1 = no alignment) + /// minimum size in bytes required for the buffer on success, or a negative error code on failure + public static int av_samples_fill_arrays(byte** @audio_data, int* @linesize, byte* @buf, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) => vectors.av_samples_fill_arrays(@audio_data, @linesize, @buf, @nb_channels, @nb_samples, @sample_fmt, @align); + + /// Get the required buffer size for the given audio parameters. + /// calculated linesize, may be NULL + /// the number of channels + /// the number of samples in a single channel + /// the sample format + /// buffer size alignment (0 = default, 1 = no alignment) + /// required buffer size, or negative error code on failure + public static int av_samples_get_buffer_size(int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align) => vectors.av_samples_get_buffer_size(@linesize, @nb_channels, @nb_samples, @sample_fmt, @align); + + /// Fill an audio buffer with silence. + /// array of pointers to data planes + /// offset in samples at which to start filling + /// number of samples to fill + /// number of audio channels + /// audio sample format + public static int av_samples_set_silence(byte** @audio_data, int @offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt) => vectors.av_samples_set_silence(@audio_data, @offset, @nb_samples, @nb_channels, @sample_fmt); + + /// Generate an SDP for an RTP session. + /// array of AVFormatContexts describing the RTP streams. If the array is composed by only one context, such context can contain multiple AVStreams (one AVStream per RTP stream). Otherwise, all the contexts in the array (an AVCodecContext per RTP stream) must contain only one AVStream. + /// number of AVCodecContexts contained in ac + /// buffer where the SDP will be stored (must be allocated by the caller) + /// the size of the buffer + /// 0 if OK, AVERROR_xxx on error + public static int av_sdp_create(AVFormatContext** @ac, int @n_files, byte* @buf, int @size) => vectors.av_sdp_create(@ac, @n_files, @buf, @size); + + /// Seek to the keyframe at timestamp. 'timestamp' in 'stream_index'. + /// media file handle + /// If stream_index is (-1), a default stream is selected, and timestamp is automatically converted from AV_TIME_BASE units to the stream specific time_base. + /// Timestamp in AVStream.time_base units or, if no stream is specified, in AV_TIME_BASE units. + /// flags which select direction and seeking mode + /// >= 0 on success + public static int av_seek_frame(AVFormatContext* @s, int @stream_index, long @timestamp, int @flags) => vectors.av_seek_frame(@s, @stream_index, @timestamp, @flags); + + /// Parse the key/value pairs list in opts. For each key/value pair found, stores the value in the field in ctx that is named like the key. ctx must be an AVClass context, storing is done using AVOptions. + /// options string to parse, may be NULL + /// a 0-terminated list of characters used to separate key from value + /// a 0-terminated list of characters used to separate two pairs from each other + /// the number of successfully set key/value pairs, or a negative value corresponding to an AVERROR code in case of error: AVERROR(EINVAL) if opts cannot be parsed, the error code issued by av_opt_set() if a key/value pair cannot be set + public static int av_set_options_string(void* @ctx, string @opts, string @key_val_sep, string @pairs_sep) => vectors.av_set_options_string(@ctx, @opts, @key_val_sep, @pairs_sep); + + /// Reduce packet size, correctly zeroing padding + /// packet + /// new size + public static void av_shrink_packet(AVPacket* @pkt, int @size) => vectors.av_shrink_packet(@pkt, @size); + + /// Multiply two `size_t` values checking for overflow. + /// Pointer to the result of the operation + /// 0 on success, AVERROR(EINVAL) on overflow + public static int av_size_mult(ulong @a, ulong @b, ulong* @r) => vectors.av_size_mult(@a, @b, @r); + + /// Duplicate a string. + /// String to be duplicated + /// Pointer to a newly-allocated string containing a copy of `s` or `NULL` if the string cannot be allocated + public static byte* av_strdup(string @s) => vectors.av_strdup(@s); + + /// Wrap an existing array as stream side data. + /// stream + /// side information type + /// the side data array. It must be allocated with the av_malloc() family of functions. The ownership of the data is transferred to st. + /// side information size + /// zero on success, a negative AVERROR code on failure. On failure, the stream is unchanged and the data remains owned by the caller. + public static int av_stream_add_side_data(AVStream* @st, AVPacketSideDataType @type, byte* @data, ulong @size) => vectors.av_stream_add_side_data(@st, @type, @data, @size); + + /// Get the AVClass for AVStream. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + public static AVClass* av_stream_get_class() => vectors.av_stream_get_class(); + + /// Get the internal codec timebase from a stream. + /// input stream to extract the timebase from + public static AVRational av_stream_get_codec_timebase(AVStream* @st) => vectors.av_stream_get_codec_timebase(@st); + + /// Returns the pts of the last muxed packet + its duration + public static long av_stream_get_end_pts(AVStream* @st) => vectors.av_stream_get_end_pts(@st); + + public static AVCodecParserContext* av_stream_get_parser(AVStream* @s) => vectors.av_stream_get_parser(@s); + + /// Get side information from stream. + /// stream + /// desired side information type + /// If supplied, *size will be set to the size of the side data or to zero if the desired side data is not present. + /// pointer to data if present or NULL otherwise + public static byte* av_stream_get_side_data(AVStream* @stream, AVPacketSideDataType @type, ulong* @size) => vectors.av_stream_get_side_data(@stream, @type, @size); + + /// Allocate new information from stream. + /// stream + /// desired side information type + /// side information size + /// pointer to fresh allocated data or NULL otherwise + public static byte* av_stream_new_side_data(AVStream* @stream, AVPacketSideDataType @type, ulong @size) => vectors.av_stream_new_side_data(@stream, @type, @size); + + /// Put a description of the AVERROR code errnum in errbuf. In case of failure the global variable errno is set to indicate the error. Even in case of failure av_strerror() will print a generic error message indicating the errnum provided to errbuf. + /// error code to describe + /// buffer to which description is written + /// the size in bytes of errbuf + /// 0 on success, a negative value if a description for errnum cannot be found + public static int av_strerror(int @errnum, byte* @errbuf, ulong @errbuf_size) => vectors.av_strerror(@errnum, @errbuf, @errbuf_size); + + /// Duplicate a substring of a string. + /// String to be duplicated + /// Maximum length of the resulting string (not counting the terminating byte) + /// Pointer to a newly-allocated string containing a substring of `s` or `NULL` if the string cannot be allocated + public static byte* av_strndup(string @s, ulong @len) => vectors.av_strndup(@s, @len); + + /// Subtract one rational from another. + /// First rational + /// Second rational + /// b-c + public static AVRational av_sub_q(AVRational @b, AVRational @c) => vectors.av_sub_q(@b, @c); + + /// Wrapper to work around the lack of mkstemp() on mingw. Also, tries to create file in /tmp first, if possible. *prefix can be a character constant; *filename will be allocated internally. + /// file descriptor of opened file (or negative value corresponding to an AVERROR code on error) and opened file name in **filename. + [Obsolete("as fd numbers cannot be passed saftely between libs on some platforms")] + public static int av_tempfile(string @prefix, byte** @filename, int @log_offset, void* @log_ctx) => vectors.av_tempfile(@prefix, @filename, @log_offset, @log_ctx); + + /// Adjust frame number for NTSC drop frame time code. + /// frame number to adjust + /// frame per second, multiples of 30 + /// adjusted frame number + public static int av_timecode_adjust_ntsc_framenum2(int @framenum, int @fps) => vectors.av_timecode_adjust_ntsc_framenum2(@framenum, @fps); + + /// Check if the timecode feature is available for the given frame rate + /// 0 if supported, < 0 otherwise + public static int av_timecode_check_frame_rate(AVRational @rate) => vectors.av_timecode_check_frame_rate(@rate); + + /// Convert sei info to SMPTE 12M binary representation. + /// frame rate in rational form + /// drop flag + /// hour + /// minute + /// second + /// frame number + /// the SMPTE binary representation + public static uint av_timecode_get_smpte(AVRational @rate, int @drop, int @hh, int @mm, int @ss, int @ff) => vectors.av_timecode_get_smpte(@rate, @drop, @hh, @mm, @ss, @ff); + + /// Convert frame number to SMPTE 12M binary representation. + /// timecode data correctly initialized + /// frame number + /// the SMPTE binary representation + public static uint av_timecode_get_smpte_from_framenum(AVTimecode* @tc, int @framenum) => vectors.av_timecode_get_smpte_from_framenum(@tc, @framenum); + + /// Init a timecode struct with the passed parameters. + /// pointer to an allocated AVTimecode + /// frame rate in rational form + /// miscellaneous flags such as drop frame, +24 hours, ... (see AVTimecodeFlag) + /// the first frame number + /// a pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct (used for av_log) + /// 0 on success, AVERROR otherwise + public static int av_timecode_init(AVTimecode* @tc, AVRational @rate, int @flags, int @frame_start, void* @log_ctx) => vectors.av_timecode_init(@tc, @rate, @flags, @frame_start, @log_ctx); + + /// Init a timecode struct from the passed timecode components. + /// pointer to an allocated AVTimecode + /// frame rate in rational form + /// miscellaneous flags such as drop frame, +24 hours, ... (see AVTimecodeFlag) + /// hours + /// minutes + /// seconds + /// frames + /// a pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct (used for av_log) + /// 0 on success, AVERROR otherwise + public static int av_timecode_init_from_components(AVTimecode* @tc, AVRational @rate, int @flags, int @hh, int @mm, int @ss, int @ff, void* @log_ctx) => vectors.av_timecode_init_from_components(@tc, @rate, @flags, @hh, @mm, @ss, @ff, @log_ctx); + + /// Parse timecode representation (hh:mm:ss[:;.]ff). + /// pointer to an allocated AVTimecode + /// frame rate in rational form + /// timecode string which will determine the frame start + /// a pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct (used for av_log). + /// 0 on success, AVERROR otherwise + public static int av_timecode_init_from_string(AVTimecode* @tc, AVRational @rate, string @str, void* @log_ctx) => vectors.av_timecode_init_from_string(@tc, @rate, @str, @log_ctx); + + /// Get the timecode string from the 25-bit timecode format (MPEG GOP format). + /// destination buffer, must be at least AV_TIMECODE_STR_SIZE long + /// the 25-bits timecode + /// the buf parameter + public static byte* av_timecode_make_mpeg_tc_string(byte* @buf, uint @tc25bit) => vectors.av_timecode_make_mpeg_tc_string(@buf, @tc25bit); + + /// Get the timecode string from the SMPTE timecode format. + /// destination buffer, must be at least AV_TIMECODE_STR_SIZE long + /// the 32-bit SMPTE timecode + /// prevent the use of a drop flag when it is known the DF bit is arbitrary + /// the buf parameter + public static byte* av_timecode_make_smpte_tc_string(byte* @buf, uint @tcsmpte, int @prevent_df) => vectors.av_timecode_make_smpte_tc_string(@buf, @tcsmpte, @prevent_df); + + /// Get the timecode string from the SMPTE timecode format. + /// destination buffer, must be at least AV_TIMECODE_STR_SIZE long + /// frame rate of the timecode + /// the 32-bit SMPTE timecode + /// prevent the use of a drop flag when it is known the DF bit is arbitrary + /// prevent the use of a field flag when it is known the field bit is arbitrary (e.g. because it is used as PC flag) + /// the buf parameter + public static byte* av_timecode_make_smpte_tc_string2(byte* @buf, AVRational @rate, uint @tcsmpte, int @prevent_df, int @skip_field) => vectors.av_timecode_make_smpte_tc_string2(@buf, @rate, @tcsmpte, @prevent_df, @skip_field); + + /// Load timecode string in buf. + /// timecode data correctly initialized + /// destination buffer, must be at least AV_TIMECODE_STR_SIZE long + /// frame number + /// the buf parameter + public static byte* av_timecode_make_string(AVTimecode* @tc, byte* @buf, int @framenum) => vectors.av_timecode_make_string(@tc, @buf, @framenum); + + public static void av_tree_destroy(AVTreeNode* @t) => vectors.av_tree_destroy(@t); + + /// Apply enu(opaque, &elem) to all the elements in the tree in a given range. + /// a comparison function that returns < 0 for an element below the range, > 0 for an element above the range and == 0 for an element inside the range + public static void av_tree_enumerate(AVTreeNode* @t, void* @opaque, av_tree_enumerate_cmp_func @cmp, av_tree_enumerate_enu_func @enu) => vectors.av_tree_enumerate(@t, @opaque, @cmp, @enu); + + /// Find an element. + /// a pointer to the root node of the tree + /// compare function used to compare elements in the tree, API identical to that of Standard C's qsort It is guaranteed that the first and only the first argument to cmp() will be the key parameter to av_tree_find(), thus it could if the user wants, be a different type (like an opaque context). + /// If next is not NULL, then next[0] will contain the previous element and next[1] the next element. If either does not exist, then the corresponding entry in next is unchanged. + /// An element with cmp(key, elem) == 0 or NULL if no such element exists in the tree. + public static void* av_tree_find(AVTreeNode* @root, void* @key, av_tree_find_cmp_func @cmp, ref void_ptrArray2 @next) => vectors.av_tree_find(@root, @key, @cmp, ref @next); + + /// Insert or remove an element. + /// A pointer to a pointer to the root node of the tree; note that the root node can change during insertions, this is required to keep the tree balanced. + /// pointer to the element key to insert in the tree + /// compare function used to compare elements in the tree, API identical to that of Standard C's qsort + /// Used to allocate and free AVTreeNodes. For insertion the user must set it to an allocated and zeroed object of at least av_tree_node_size bytes size. av_tree_insert() will set it to NULL if it has been consumed. For deleting elements *next is set to NULL by the user and av_tree_insert() will set it to the AVTreeNode which was used for the removed element. This allows the use of flat arrays, which have lower overhead compared to many malloced elements. You might want to define a function like: + /// If no insertion happened, the found element; if an insertion or removal happened, then either key or NULL will be returned. Which one it is depends on the tree state and the implementation. You should make no assumptions that it's one or the other in the code. + public static void* av_tree_insert(AVTreeNode** @rootp, void* @key, av_tree_insert_cmp_func @cmp, AVTreeNode** @next) => vectors.av_tree_insert(@rootp, @key, @cmp, @next); + + /// Allocate an AVTreeNode. + public static AVTreeNode* av_tree_node_alloc() => vectors.av_tree_node_alloc(); + + /// Split a URL string into components. + /// the buffer for the protocol + /// the size of the proto buffer + /// the buffer for the authorization + /// the size of the authorization buffer + /// the buffer for the host name + /// the size of the hostname buffer + /// a pointer to store the port number in + /// the buffer for the path + /// the size of the path buffer + /// the URL to split + public static void av_url_split(byte* @proto, int @proto_size, byte* @authorization, int @authorization_size, byte* @hostname, int @hostname_size, int* @port_ptr, byte* @path, int @path_size, string @url) => vectors.av_url_split(@proto, @proto_size, @authorization, @authorization_size, @hostname, @hostname_size, @port_ptr, @path, @path_size, @url); + + /// Sleep for a period of time. Although the duration is expressed in microseconds, the actual delay may be rounded to the precision of the system timer. + /// Number of microseconds to sleep. + /// zero on success or (negative) error code. + public static int av_usleep(uint @usec) => vectors.av_usleep(@usec); + + /// Return an informative version string. This usually is the actual release version number or a git commit description. This string has no fixed format and can change any time. It should never be parsed by code. + public static string av_version_info() => vectors.av_version_info(); + + /// Send the specified message to the log if the level is less than or equal to the current av_log_level. By default, all logging messages are sent to stderr. This behavior can be altered by setting a different logging callback function. + /// A pointer to an arbitrary struct of which the first field is a pointer to an AVClass struct. + /// The importance level of the message expressed using a "Logging Constant". + /// The format string (printf-compatible) that specifies how subsequent arguments are converted to output. + /// The arguments referenced by the format string. + public static void av_vlog(void* @avcl, int @level, string @fmt, byte* @vl) => vectors.av_vlog(@avcl, @level, @fmt, @vl); + + /// Write a packet to an output media file. + /// media file handle + /// The packet containing the data to be written. Note that unlike av_interleaved_write_frame(), this function does not take ownership of the packet passed to it (though some muxers may make an internal reference to the input packet). This parameter can be NULL (at any time, not just at the end), in order to immediately flush data buffered within the muxer, for muxers that buffer up data internally before writing it to the output. Packet's "stream_index" field must be set to the index of the corresponding stream in "s->streams". The timestamps ( "pts", "dts") must be set to correct values in the stream's timebase (unless the output format is flagged with the AVFMT_NOTIMESTAMPS flag, then they can be set to AV_NOPTS_VALUE). The dts for subsequent packets passed to this function must be strictly increasing when compared in their respective timebases (unless the output format is flagged with the AVFMT_TS_NONSTRICT, then they merely have to be nondecreasing). "duration") should also be set if known. + /// < 0 on error, = 0 if OK, 1 if flushed and there is no more data to flush + public static int av_write_frame(AVFormatContext* @s, AVPacket* @pkt) => vectors.av_write_frame(@s, @pkt); + + public static void av_write_image_line(ushort* @src, ref byte_ptrArray4 @data, in int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w) => vectors.av_write_image_line(@src, ref @data, @linesize, @desc, @x, @y, @c, @w); + + /// Write the values from src to the pixel format component c of an image line. + /// array containing the values to write + /// the array containing the pointers to the planes of the image to write into. It is supposed to be zeroed. + /// the array containing the linesizes of the image + /// the pixel format descriptor for the image + /// the horizontal coordinate of the first pixel to write + /// the vertical coordinate of the first pixel to write + /// the width of the line to write, that is the number of values to write to the image line + /// size of elements in src array (2 or 4 byte) + public static void av_write_image_line2(void* @src, ref byte_ptrArray4 @data, in int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @src_element_size) => vectors.av_write_image_line2(@src, ref @data, @linesize, @desc, @x, @y, @c, @w, @src_element_size); + + /// Write the stream trailer to an output media file and free the file private data. + /// media file handle + /// 0 if OK, AVERROR_xxx on error + public static int av_write_trailer(AVFormatContext* @s) => vectors.av_write_trailer(@s); + + /// Write an uncoded frame to an output media file. + public static int av_write_uncoded_frame(AVFormatContext* @s, int @stream_index, AVFrame* @frame) => vectors.av_write_uncoded_frame(@s, @stream_index, @frame); + + /// Test whether a muxer supports uncoded frame. + /// >=0 if an uncoded frame can be written to that muxer and stream, < 0 if not + public static int av_write_uncoded_frame_query(AVFormatContext* @s, int @stream_index) => vectors.av_write_uncoded_frame_query(@s, @stream_index); + + /// Encode extradata length to a buffer. Used by xiph codecs. + /// buffer to write to; must be at least (v/255+1) bytes long + /// size of extradata in bytes + /// number of bytes written to the buffer. + public static uint av_xiphlacing(byte* @s, uint @v) => vectors.av_xiphlacing(@s, @v); + + /// Modify width and height values so that they will result in a memory buffer that is acceptable for the codec if you do not use any horizontal padding. + public static void avcodec_align_dimensions(AVCodecContext* @s, int* @width, int* @height) => vectors.avcodec_align_dimensions(@s, @width, @height); + + /// Modify width and height values so that they will result in a memory buffer that is acceptable for the codec if you also ensure that all line sizes are a multiple of the respective linesize_align[i]. + public static void avcodec_align_dimensions2(AVCodecContext* @s, int* @width, int* @height, ref int_array8 @linesize_align) => vectors.avcodec_align_dimensions2(@s, @width, @height, ref @linesize_align); + + /// Allocate an AVCodecContext and set its fields to default values. The resulting struct should be freed with avcodec_free_context(). + /// if non-NULL, allocate private data and initialize defaults for the given codec. It is illegal to then call avcodec_open2() with a different codec. If NULL, then the codec-specific defaults won't be initialized, which may result in suboptimal default settings (this is important mainly for encoders, e.g. libx264). + /// An AVCodecContext filled with default values or NULL on failure. + public static AVCodecContext* avcodec_alloc_context3(AVCodec* @codec) => vectors.avcodec_alloc_context3(@codec); + + /// Converts swscale x/y chroma position to AVChromaLocation. + /// horizontal chroma sample position + /// vertical chroma sample position + public static AVChromaLocation avcodec_chroma_pos_to_enum(int @xpos, int @ypos) => vectors.avcodec_chroma_pos_to_enum(@xpos, @ypos); + + /// Close a given AVCodecContext and free all the data associated with it (but not the AVCodecContext itself). + public static int avcodec_close(AVCodecContext* @avctx) => vectors.avcodec_close(@avctx); + + /// Return the libavcodec build-time configuration. + public static string avcodec_configuration() => vectors.avcodec_configuration(); + + /// Decode a subtitle message. Return a negative value on error, otherwise return the number of bytes used. If no subtitle could be decompressed, got_sub_ptr is zero. Otherwise, the subtitle is stored in *sub. Note that AV_CODEC_CAP_DR1 is not available for subtitle codecs. This is for simplicity, because the performance difference is expected to be negligible and reusing a get_buffer written for video codecs would probably perform badly due to a potentially very different allocation pattern. + /// the codec context + /// The preallocated AVSubtitle in which the decoded subtitle will be stored, must be freed with avsubtitle_free if *got_sub_ptr is set. + /// Zero if no subtitle could be decompressed, otherwise, it is nonzero. + /// The input AVPacket containing the input buffer. + public static int avcodec_decode_subtitle2(AVCodecContext* @avctx, AVSubtitle* @sub, int* @got_sub_ptr, AVPacket* @avpkt) => vectors.avcodec_decode_subtitle2(@avctx, @sub, @got_sub_ptr, @avpkt); + + public static int avcodec_default_execute(AVCodecContext* @c, avcodec_default_execute_func_func @func, void* @arg, int* @ret, int @count, int @size) => vectors.avcodec_default_execute(@c, @func, @arg, @ret, @count, @size); + + public static int avcodec_default_execute2(AVCodecContext* @c, avcodec_default_execute2_func_func @func, void* @arg, int* @ret, int @count) => vectors.avcodec_default_execute2(@c, @func, @arg, @ret, @count); + + /// The default callback for AVCodecContext.get_buffer2(). It is made public so it can be called by custom get_buffer2() implementations for decoders without AV_CODEC_CAP_DR1 set. + public static int avcodec_default_get_buffer2(AVCodecContext* @s, AVFrame* @frame, int @flags) => vectors.avcodec_default_get_buffer2(@s, @frame, @flags); + + /// The default callback for AVCodecContext.get_encode_buffer(). It is made public so it can be called by custom get_encode_buffer() implementations for encoders without AV_CODEC_CAP_DR1 set. + public static int avcodec_default_get_encode_buffer(AVCodecContext* @s, AVPacket* @pkt, int @flags) => vectors.avcodec_default_get_encode_buffer(@s, @pkt, @flags); + + public static AVPixelFormat avcodec_default_get_format(AVCodecContext* @s, AVPixelFormat* @fmt) => vectors.avcodec_default_get_format(@s, @fmt); + + /// Returns descriptor for given codec ID or NULL if no descriptor exists. + /// descriptor for given codec ID or NULL if no descriptor exists. + public static AVCodecDescriptor* avcodec_descriptor_get(AVCodecID @id) => vectors.avcodec_descriptor_get(@id); + + /// Returns codec descriptor with the given name or NULL if no such descriptor exists. + /// codec descriptor with the given name or NULL if no such descriptor exists. + public static AVCodecDescriptor* avcodec_descriptor_get_by_name(string @name) => vectors.avcodec_descriptor_get_by_name(@name); + + /// Iterate over all codec descriptors known to libavcodec. + /// previous descriptor. NULL to get the first descriptor. + /// next descriptor or NULL after the last descriptor + public static AVCodecDescriptor* avcodec_descriptor_next(AVCodecDescriptor* @prev) => vectors.avcodec_descriptor_next(@prev); + + /// @{ + public static int avcodec_encode_subtitle(AVCodecContext* @avctx, byte* @buf, int @buf_size, AVSubtitle* @sub) => vectors.avcodec_encode_subtitle(@avctx, @buf, @buf_size, @sub); + + /// Converts AVChromaLocation to swscale x/y chroma position. + /// horizontal chroma sample position + /// vertical chroma sample position + public static int avcodec_enum_to_chroma_pos(int* @xpos, int* @ypos, AVChromaLocation @pos) => vectors.avcodec_enum_to_chroma_pos(@xpos, @ypos, @pos); + + /// Fill AVFrame audio data and linesize pointers. + /// the AVFrame frame->nb_samples must be set prior to calling the function. This function fills in frame->data, frame->extended_data, frame->linesize[0]. + /// channel count + /// sample format + /// buffer to use for frame data + /// size of buffer + /// plane size sample alignment (0 = default) + /// >=0 on success, negative error code on failure + public static int avcodec_fill_audio_frame(AVFrame* @frame, int @nb_channels, AVSampleFormat @sample_fmt, byte* @buf, int @buf_size, int @align) => vectors.avcodec_fill_audio_frame(@frame, @nb_channels, @sample_fmt, @buf, @buf_size, @align); + + /// Find the best pixel format to convert to given a certain source pixel format. When converting from one pixel format to another, information loss may occur. For example, when converting from RGB24 to GRAY, the color information will be lost. Similarly, other losses occur when converting from some formats to other formats. avcodec_find_best_pix_fmt_of_2() searches which of the given pixel formats should be used to suffer the least amount of loss. The pixel formats from which it chooses one, are determined by the pix_fmt_list parameter. + /// AV_PIX_FMT_NONE terminated array of pixel formats to choose from + /// source pixel format + /// Whether the source pixel format alpha channel is used. + /// Combination of flags informing you what kind of losses will occur. + /// The best pixel format to convert to or -1 if none was found. + public static AVPixelFormat avcodec_find_best_pix_fmt_of_list(AVPixelFormat* @pix_fmt_list, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr) => vectors.avcodec_find_best_pix_fmt_of_list(@pix_fmt_list, @src_pix_fmt, @has_alpha, @loss_ptr); + + /// Find a registered decoder with a matching codec ID. + /// AVCodecID of the requested decoder + /// A decoder if one was found, NULL otherwise. + public static AVCodec* avcodec_find_decoder(AVCodecID @id) => vectors.avcodec_find_decoder(@id); + + /// Find a registered decoder with the specified name. + /// name of the requested decoder + /// A decoder if one was found, NULL otherwise. + public static AVCodec* avcodec_find_decoder_by_name(string @name) => vectors.avcodec_find_decoder_by_name(@name); + + /// Find a registered encoder with a matching codec ID. + /// AVCodecID of the requested encoder + /// An encoder if one was found, NULL otherwise. + public static AVCodec* avcodec_find_encoder(AVCodecID @id) => vectors.avcodec_find_encoder(@id); + + /// Find a registered encoder with the specified name. + /// name of the requested encoder + /// An encoder if one was found, NULL otherwise. + public static AVCodec* avcodec_find_encoder_by_name(string @name) => vectors.avcodec_find_encoder_by_name(@name); + + /// Reset the internal codec state / flush internal buffers. Should be called e.g. when seeking or when switching to a different stream. + public static void avcodec_flush_buffers(AVCodecContext* @avctx) => vectors.avcodec_flush_buffers(@avctx); + + /// Free the codec context and everything associated with it and write NULL to the provided pointer. + public static void avcodec_free_context(AVCodecContext** @avctx) => vectors.avcodec_free_context(@avctx); + + /// Get the AVClass for AVCodecContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + public static AVClass* avcodec_get_class() => vectors.avcodec_get_class(); + + [Obsolete("This function should not be used.")] + public static AVClass* avcodec_get_frame_class() => vectors.avcodec_get_frame_class(); + + /// Retrieve supported hardware configurations for a codec. + public static AVCodecHWConfig* avcodec_get_hw_config(AVCodec* @codec, int @index) => vectors.avcodec_get_hw_config(@codec, @index); + + /// Create and return a AVHWFramesContext with values adequate for hardware decoding. This is meant to get called from the get_format callback, and is a helper for preparing a AVHWFramesContext for AVCodecContext.hw_frames_ctx. This API is for decoding with certain hardware acceleration modes/APIs only. + /// The context which is currently calling get_format, and which implicitly contains all state needed for filling the returned AVHWFramesContext properly. + /// A reference to the AVHWDeviceContext describing the device which will be used by the hardware decoder. + /// The hwaccel format you are going to return from get_format. + /// On success, set to a reference to an _uninitialized_ AVHWFramesContext, created from the given device_ref. Fields will be set to values required for decoding. Not changed if an error is returned. + /// zero on success, a negative value on error. The following error codes have special semantics: AVERROR(ENOENT): the decoder does not support this functionality. Setup is always manual, or it is a decoder which does not support setting AVCodecContext.hw_frames_ctx at all, or it is a software format. AVERROR(EINVAL): it is known that hardware decoding is not supported for this configuration, or the device_ref is not supported for the hwaccel referenced by hw_pix_fmt. + public static int avcodec_get_hw_frames_parameters(AVCodecContext* @avctx, AVBufferRef* @device_ref, AVPixelFormat @hw_pix_fmt, AVBufferRef** @out_frames_ref) => vectors.avcodec_get_hw_frames_parameters(@avctx, @device_ref, @hw_pix_fmt, @out_frames_ref); + + /// Get the name of a codec. + /// a static string identifying the codec; never NULL + public static string avcodec_get_name(AVCodecID @id) => vectors.avcodec_get_name(@id); + + /// Get the AVClass for AVSubtitleRect. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + public static AVClass* avcodec_get_subtitle_rect_class() => vectors.avcodec_get_subtitle_rect_class(); + + /// Get the type of the given codec. + public static AVMediaType avcodec_get_type(AVCodecID @codec_id) => vectors.avcodec_get_type(@codec_id); + + /// Returns a positive value if s is open (i.e. avcodec_open2() was called on it with no corresponding avcodec_close()), 0 otherwise. + /// a positive value if s is open (i.e. avcodec_open2() was called on it with no corresponding avcodec_close()), 0 otherwise. + public static int avcodec_is_open(AVCodecContext* @s) => vectors.avcodec_is_open(@s); + + /// Return the libavcodec license. + public static string avcodec_license() => vectors.avcodec_license(); + + /// Initialize the AVCodecContext to use the given AVCodec. Prior to using this function the context has to be allocated with avcodec_alloc_context3(). + /// The context to initialize. + /// The codec to open this context for. If a non-NULL codec has been previously passed to avcodec_alloc_context3() or for this context, then this parameter MUST be either NULL or equal to the previously passed codec. + /// A dictionary filled with AVCodecContext and codec-private options. On return this object will be filled with options that were not found. + /// zero on success, a negative value on error + public static int avcodec_open2(AVCodecContext* @avctx, AVCodec* @codec, AVDictionary** @options) => vectors.avcodec_open2(@avctx, @codec, @options); + + /// Allocate a new AVCodecParameters and set its fields to default values (unknown/invalid/0). The returned struct must be freed with avcodec_parameters_free(). + public static AVCodecParameters* avcodec_parameters_alloc() => vectors.avcodec_parameters_alloc(); + + /// Copy the contents of src to dst. Any allocated fields in dst are freed and replaced with newly allocated duplicates of the corresponding fields in src. + /// >= 0 on success, a negative AVERROR code on failure. + public static int avcodec_parameters_copy(AVCodecParameters* @dst, AVCodecParameters* @src) => vectors.avcodec_parameters_copy(@dst, @src); + + /// Free an AVCodecParameters instance and everything associated with it and write NULL to the supplied pointer. + public static void avcodec_parameters_free(AVCodecParameters** @par) => vectors.avcodec_parameters_free(@par); + + /// Fill the parameters struct based on the values from the supplied codec context. Any allocated fields in par are freed and replaced with duplicates of the corresponding fields in codec. + /// >= 0 on success, a negative AVERROR code on failure + public static int avcodec_parameters_from_context(AVCodecParameters* @par, AVCodecContext* @codec) => vectors.avcodec_parameters_from_context(@par, @codec); + + /// Fill the codec context based on the values from the supplied codec parameters. Any allocated fields in codec that have a corresponding field in par are freed and replaced with duplicates of the corresponding field in par. Fields in codec that do not have a counterpart in par are not touched. + /// >= 0 on success, a negative AVERROR code on failure. + public static int avcodec_parameters_to_context(AVCodecContext* @codec, AVCodecParameters* @par) => vectors.avcodec_parameters_to_context(@codec, @par); + + /// Return a value representing the fourCC code associated to the pixel format pix_fmt, or 0 if no associated fourCC code can be found. + public static uint avcodec_pix_fmt_to_codec_tag(AVPixelFormat @pix_fmt) => vectors.avcodec_pix_fmt_to_codec_tag(@pix_fmt); + + /// Return a name for the specified profile, if available. + /// the ID of the codec to which the requested profile belongs + /// the profile value for which a name is requested + /// A name for the profile if found, NULL otherwise. + public static string avcodec_profile_name(AVCodecID @codec_id, int @profile) => vectors.avcodec_profile_name(@codec_id, @profile); + + /// Return decoded output data from a decoder. + /// codec context + /// This will be set to a reference-counted video or audio frame (depending on the decoder type) allocated by the decoder. Note that the function will always call av_frame_unref(frame) before doing anything else. + /// 0: success, a frame was returned AVERROR(EAGAIN): output is not available in this state - user must try to send new input AVERROR_EOF: the decoder has been fully flushed, and there will be no more output frames AVERROR(EINVAL): codec not opened, or it is an encoder AVERROR_INPUT_CHANGED: current decoded frame has changed parameters with respect to first decoded frame. Applicable when flag AV_CODEC_FLAG_DROPCHANGED is set. other negative values: legitimate decoding errors + public static int avcodec_receive_frame(AVCodecContext* @avctx, AVFrame* @frame) => vectors.avcodec_receive_frame(@avctx, @frame); + + /// Read encoded data from the encoder. + /// codec context + /// This will be set to a reference-counted packet allocated by the encoder. Note that the function will always call av_packet_unref(avpkt) before doing anything else. + /// 0 on success, otherwise negative error code: AVERROR(EAGAIN): output is not available in the current state - user must try to send input AVERROR_EOF: the encoder has been fully flushed, and there will be no more output packets AVERROR(EINVAL): codec not opened, or it is a decoder other errors: legitimate encoding errors + public static int avcodec_receive_packet(AVCodecContext* @avctx, AVPacket* @avpkt) => vectors.avcodec_receive_packet(@avctx, @avpkt); + + /// Supply a raw video or audio frame to the encoder. Use avcodec_receive_packet() to retrieve buffered output packets. + /// codec context + /// AVFrame containing the raw audio or video frame to be encoded. Ownership of the frame remains with the caller, and the encoder will not write to the frame. The encoder may create a reference to the frame data (or copy it if the frame is not reference-counted). It can be NULL, in which case it is considered a flush packet. This signals the end of the stream. If the encoder still has packets buffered, it will return them after this call. Once flushing mode has been entered, additional flush packets are ignored, and sending frames will return AVERROR_EOF. + /// 0 on success, otherwise negative error code: AVERROR(EAGAIN): input is not accepted in the current state - user must read output with avcodec_receive_packet() (once all output is read, the packet should be resent, and the call will not fail with EAGAIN). AVERROR_EOF: the encoder has been flushed, and no new frames can be sent to it AVERROR(EINVAL): codec not opened, it is a decoder, or requires flush AVERROR(ENOMEM): failed to add packet to internal queue, or similar other errors: legitimate encoding errors + public static int avcodec_send_frame(AVCodecContext* @avctx, AVFrame* @frame) => vectors.avcodec_send_frame(@avctx, @frame); + + /// Supply raw packet data as input to a decoder. + /// codec context + /// The input AVPacket. Usually, this will be a single video frame, or several complete audio frames. Ownership of the packet remains with the caller, and the decoder will not write to the packet. The decoder may create a reference to the packet data (or copy it if the packet is not reference-counted). Unlike with older APIs, the packet is always fully consumed, and if it contains multiple frames (e.g. some audio codecs), will require you to call avcodec_receive_frame() multiple times afterwards before you can send a new packet. It can be NULL (or an AVPacket with data set to NULL and size set to 0); in this case, it is considered a flush packet, which signals the end of the stream. Sending the first flush packet will return success. Subsequent ones are unnecessary and will return AVERROR_EOF. If the decoder still has frames buffered, it will return them after sending a flush packet. + /// 0 on success, otherwise negative error code: AVERROR(EAGAIN): input is not accepted in the current state - user must read output with avcodec_receive_frame() (once all output is read, the packet should be resent, and the call will not fail with EAGAIN). AVERROR_EOF: the decoder has been flushed, and no new packets can be sent to it (also returned if more than 1 flush packet is sent) AVERROR(EINVAL): codec not opened, it is an encoder, or requires flush AVERROR(ENOMEM): failed to add packet to internal queue, or similar other errors: legitimate decoding errors + public static int avcodec_send_packet(AVCodecContext* @avctx, AVPacket* @avpkt) => vectors.avcodec_send_packet(@avctx, @avpkt); + + /// @} + public static void avcodec_string(byte* @buf, int @buf_size, AVCodecContext* @enc, int @encode) => vectors.avcodec_string(@buf, @buf_size, @enc, @encode); + + /// Return the LIBAVCODEC_VERSION_INT constant. + public static uint avcodec_version() => vectors.avcodec_version(); + + /// Send control message from application to device. + /// device context. + /// message type. + /// message data. Exact type depends on message type. + /// size of message data. + /// >= 0 on success, negative on error. AVERROR(ENOSYS) when device doesn't implement handler of the message. + public static int avdevice_app_to_dev_control_message(AVFormatContext* @s, AVAppToDevMessageType @type, void* @data, ulong @data_size) => vectors.avdevice_app_to_dev_control_message(@s, @type, @data, @data_size); + + /// Initialize capabilities probing API based on AVOption API. + /// Device capabilities data. Pointer to a NULL pointer must be passed. + /// Context of the device. + /// An AVDictionary filled with device-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. The same options must be passed later to avformat_write_header() for output devices or avformat_open_input() for input devices, or at any other place that affects device-private options. + /// >= 0 on success, negative otherwise. + [Obsolete()] + public static int avdevice_capabilities_create(AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s, AVDictionary** @device_options) => vectors.avdevice_capabilities_create(@caps, @s, @device_options); + + /// Free resources created by avdevice_capabilities_create() + /// Device capabilities data to be freed. + /// Context of the device. + [Obsolete()] + public static void avdevice_capabilities_free(AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s) => vectors.avdevice_capabilities_free(@caps, @s); + + /// Return the libavdevice build-time configuration. + public static string avdevice_configuration() => vectors.avdevice_configuration(); + + /// Send control message from device to application. + /// device context. + /// message type. + /// message data. Can be NULL. + /// size of message data. + /// >= 0 on success, negative on error. AVERROR(ENOSYS) when application doesn't implement handler of the message. + public static int avdevice_dev_to_app_control_message(AVFormatContext* @s, AVDevToAppMessageType @type, void* @data, ulong @data_size) => vectors.avdevice_dev_to_app_control_message(@s, @type, @data, @data_size); + + /// Convenient function to free result of avdevice_list_devices(). + public static void avdevice_free_list_devices(AVDeviceInfoList** @device_list) => vectors.avdevice_free_list_devices(@device_list); + + /// Return the libavdevice license. + public static string avdevice_license() => vectors.avdevice_license(); + + /// List devices. + /// device context. + /// list of autodetected devices. + /// count of autodetected devices, negative on error. + public static int avdevice_list_devices(AVFormatContext* @s, AVDeviceInfoList** @device_list) => vectors.avdevice_list_devices(@s, @device_list); + + /// List devices. + /// device format. May be NULL if device name is set. + /// device name. May be NULL if device format is set. + /// An AVDictionary filled with device-private options. May be NULL. The same options must be passed later to avformat_write_header() for output devices or avformat_open_input() for input devices, or at any other place that affects device-private options. + /// list of autodetected devices + /// count of autodetected devices, negative on error. + public static int avdevice_list_input_sources(AVInputFormat* @device, string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list) => vectors.avdevice_list_input_sources(@device, @device_name, @device_options, @device_list); + + public static int avdevice_list_output_sinks(AVOutputFormat* @device, string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list) => vectors.avdevice_list_output_sinks(@device, @device_name, @device_options, @device_list); + + /// Initialize libavdevice and register all the input and output devices. + public static void avdevice_register_all() => vectors.avdevice_register_all(); + + /// Return the LIBAVDEVICE_VERSION_INT constant. + public static uint avdevice_version() => vectors.avdevice_version(); + + /// Negotiate the media format, dimensions, etc of all inputs to a filter. + /// the filter to negotiate the properties for its inputs + /// zero on successful negotiation + public static int avfilter_config_links(AVFilterContext* @filter) => vectors.avfilter_config_links(@filter); + + /// Return the libavfilter build-time configuration. + public static string avfilter_configuration() => vectors.avfilter_configuration(); + + /// Get the number of elements in an AVFilter's inputs or outputs array. + public static uint avfilter_filter_pad_count(AVFilter* @filter, int @is_output) => vectors.avfilter_filter_pad_count(@filter, @is_output); + + /// Free a filter context. This will also remove the filter from its filtergraph's list of filters. + /// the filter to free + public static void avfilter_free(AVFilterContext* @filter) => vectors.avfilter_free(@filter); + + /// Get a filter definition matching the given name. + /// the filter name to find + /// the filter definition, if any matching one is registered. NULL if none found. + public static AVFilter* avfilter_get_by_name(string @name) => vectors.avfilter_get_by_name(@name); + + /// Returns AVClass for AVFilterContext. + /// AVClass for AVFilterContext. + public static AVClass* avfilter_get_class() => vectors.avfilter_get_class(); + + /// Allocate a filter graph. + /// the allocated filter graph on success or NULL. + public static AVFilterGraph* avfilter_graph_alloc() => vectors.avfilter_graph_alloc(); + + /// Create a new filter instance in a filter graph. + /// graph in which the new filter will be used + /// the filter to create an instance of + /// Name to give to the new instance (will be copied to AVFilterContext.name). This may be used by the caller to identify different filters, libavfilter itself assigns no semantics to this parameter. May be NULL. + /// the context of the newly created filter instance (note that it is also retrievable directly through AVFilterGraph.filters or with avfilter_graph_get_filter()) on success or NULL on failure. + public static AVFilterContext* avfilter_graph_alloc_filter(AVFilterGraph* @graph, AVFilter* @filter, string @name) => vectors.avfilter_graph_alloc_filter(@graph, @filter, @name); + + /// Check validity and configure all the links and formats in the graph. + /// the filter graph + /// context used for logging + /// >= 0 in case of success, a negative AVERROR code otherwise + public static int avfilter_graph_config(AVFilterGraph* @graphctx, void* @log_ctx) => vectors.avfilter_graph_config(@graphctx, @log_ctx); + + /// Create and add a filter instance into an existing graph. The filter instance is created from the filter filt and inited with the parameter args. opaque is currently ignored. + /// the instance name to give to the created filter instance + /// the filter graph + /// a negative AVERROR error code in case of failure, a non negative value otherwise + public static int avfilter_graph_create_filter(AVFilterContext** @filt_ctx, AVFilter* @filt, string @name, string @args, void* @opaque, AVFilterGraph* @graph_ctx) => vectors.avfilter_graph_create_filter(@filt_ctx, @filt, @name, @args, @opaque, @graph_ctx); + + /// Dump a graph into a human-readable string representation. + /// the graph to dump + /// formatting options; currently ignored + /// a string, or NULL in case of memory allocation failure; the string must be freed using av_free + public static byte* avfilter_graph_dump(AVFilterGraph* @graph, string @options) => vectors.avfilter_graph_dump(@graph, @options); + + /// Free a graph, destroy its links, and set *graph to NULL. If *graph is NULL, do nothing. + public static void avfilter_graph_free(AVFilterGraph** @graph) => vectors.avfilter_graph_free(@graph); + + /// Get a filter instance identified by instance name from graph. + /// filter graph to search through. + /// filter instance name (should be unique in the graph). + /// the pointer to the found filter instance or NULL if it cannot be found. + public static AVFilterContext* avfilter_graph_get_filter(AVFilterGraph* @graph, string @name) => vectors.avfilter_graph_get_filter(@graph, @name); + + /// Add a graph described by a string to a graph. + /// the filter graph where to link the parsed graph context + /// string to be parsed + /// linked list to the inputs of the graph + /// linked list to the outputs of the graph + /// zero on success, a negative AVERROR code on error + public static int avfilter_graph_parse(AVFilterGraph* @graph, string @filters, AVFilterInOut* @inputs, AVFilterInOut* @outputs, void* @log_ctx) => vectors.avfilter_graph_parse(@graph, @filters, @inputs, @outputs, @log_ctx); + + /// Add a graph described by a string to a graph. + /// the filter graph where to link the parsed graph context + /// string to be parsed + /// pointer to a linked list to the inputs of the graph, may be NULL. If non-NULL, *inputs is updated to contain the list of open inputs after the parsing, should be freed with avfilter_inout_free(). + /// pointer to a linked list to the outputs of the graph, may be NULL. If non-NULL, *outputs is updated to contain the list of open outputs after the parsing, should be freed with avfilter_inout_free(). + /// non negative on success, a negative AVERROR code on error + public static int avfilter_graph_parse_ptr(AVFilterGraph* @graph, string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs, void* @log_ctx) => vectors.avfilter_graph_parse_ptr(@graph, @filters, @inputs, @outputs, @log_ctx); + + /// Add a graph described by a string to a graph. + /// the filter graph where to link the parsed graph context + /// string to be parsed + /// a linked list of all free (unlinked) inputs of the parsed graph will be returned here. It is to be freed by the caller using avfilter_inout_free(). + /// a linked list of all free (unlinked) outputs of the parsed graph will be returned here. It is to be freed by the caller using avfilter_inout_free(). + /// zero on success, a negative AVERROR code on error + public static int avfilter_graph_parse2(AVFilterGraph* @graph, string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs) => vectors.avfilter_graph_parse2(@graph, @filters, @inputs, @outputs); + + /// Queue a command for one or more filter instances. + /// the filter graph + /// the filter(s) to which the command should be sent "all" sends to all filters otherwise it can be a filter or filter instance name which will send the command to all matching filters. + /// the command to sent, for handling simplicity all commands must be alphanumeric only + /// the argument for the command + /// time at which the command should be sent to the filter + public static int avfilter_graph_queue_command(AVFilterGraph* @graph, string @target, string @cmd, string @arg, int @flags, double @ts) => vectors.avfilter_graph_queue_command(@graph, @target, @cmd, @arg, @flags, @ts); + + /// Request a frame on the oldest sink link. + /// the return value of ff_request_frame(), or AVERROR_EOF if all links returned AVERROR_EOF + public static int avfilter_graph_request_oldest(AVFilterGraph* @graph) => vectors.avfilter_graph_request_oldest(@graph); + + /// Send a command to one or more filter instances. + /// the filter graph + /// the filter(s) to which the command should be sent "all" sends to all filters otherwise it can be a filter or filter instance name which will send the command to all matching filters. + /// the command to send, for handling simplicity all commands must be alphanumeric only + /// the argument for the command + /// a buffer with size res_size where the filter(s) can return a response. + public static int avfilter_graph_send_command(AVFilterGraph* @graph, string @target, string @cmd, string @arg, byte* @res, int @res_len, int @flags) => vectors.avfilter_graph_send_command(@graph, @target, @cmd, @arg, @res, @res_len, @flags); + + /// Enable or disable automatic format conversion inside the graph. + /// any of the AVFILTER_AUTO_CONVERT_* constants + public static void avfilter_graph_set_auto_convert(AVFilterGraph* @graph, uint @flags) => vectors.avfilter_graph_set_auto_convert(@graph, @flags); + + /// Initialize a filter with the supplied dictionary of options. + /// uninitialized filter context to initialize + /// An AVDictionary filled with options for this filter. On return this parameter will be destroyed and replaced with a dict containing options that were not found. This dictionary must be freed by the caller. May be NULL, then this function is equivalent to avfilter_init_str() with the second parameter set to NULL. + /// 0 on success, a negative AVERROR on failure + public static int avfilter_init_dict(AVFilterContext* @ctx, AVDictionary** @options) => vectors.avfilter_init_dict(@ctx, @options); + + /// Initialize a filter with the supplied parameters. + /// uninitialized filter context to initialize + /// Options to initialize the filter with. This must be a ':'-separated list of options in the 'key=value' form. May be NULL if the options have been set directly using the AVOptions API or there are no options that need to be set. + /// 0 on success, a negative AVERROR on failure + public static int avfilter_init_str(AVFilterContext* @ctx, string @args) => vectors.avfilter_init_str(@ctx, @args); + + /// Allocate a single AVFilterInOut entry. Must be freed with avfilter_inout_free(). + /// allocated AVFilterInOut on success, NULL on failure. + public static AVFilterInOut* avfilter_inout_alloc() => vectors.avfilter_inout_alloc(); + + /// Free the supplied list of AVFilterInOut and set *inout to NULL. If *inout is NULL, do nothing. + public static void avfilter_inout_free(AVFilterInOut** @inout) => vectors.avfilter_inout_free(@inout); + + /// Insert a filter in the middle of an existing link. + /// the link into which the filter should be inserted + /// the filter to be inserted + /// the input pad on the filter to connect + /// the output pad on the filter to connect + /// zero on success + public static int avfilter_insert_filter(AVFilterLink* @link, AVFilterContext* @filt, uint @filt_srcpad_idx, uint @filt_dstpad_idx) => vectors.avfilter_insert_filter(@link, @filt, @filt_srcpad_idx, @filt_dstpad_idx); + + /// Return the libavfilter license. + public static string avfilter_license() => vectors.avfilter_license(); + + /// Link two filters together. + /// the source filter + /// index of the output pad on the source filter + /// the destination filter + /// index of the input pad on the destination filter + /// zero on success + public static int avfilter_link(AVFilterContext* @src, uint @srcpad, AVFilterContext* @dst, uint @dstpad) => vectors.avfilter_link(@src, @srcpad, @dst, @dstpad); + + /// Free the link in *link, and set its pointer to NULL. + public static void avfilter_link_free(AVFilterLink** @link) => vectors.avfilter_link_free(@link); + + /// Get the number of elements in an AVFilter's inputs or outputs array. + [Obsolete("Use avfilter_filter_pad_count() instead.")] + public static int avfilter_pad_count(AVFilterPad* @pads) => vectors.avfilter_pad_count(@pads); + + /// Get the name of an AVFilterPad. + /// an array of AVFilterPads + /// index of the pad in the array; it is the caller's responsibility to ensure the index is valid + /// name of the pad_idx'th pad in pads + public static string avfilter_pad_get_name(AVFilterPad* @pads, int @pad_idx) => vectors.avfilter_pad_get_name(@pads, @pad_idx); + + /// Get the type of an AVFilterPad. + /// an array of AVFilterPads + /// index of the pad in the array; it is the caller's responsibility to ensure the index is valid + /// type of the pad_idx'th pad in pads + public static AVMediaType avfilter_pad_get_type(AVFilterPad* @pads, int @pad_idx) => vectors.avfilter_pad_get_type(@pads, @pad_idx); + + /// Make the filter instance process a command. It is recommended to use avfilter_graph_send_command(). + public static int avfilter_process_command(AVFilterContext* @filter, string @cmd, string @arg, byte* @res, int @res_len, int @flags) => vectors.avfilter_process_command(@filter, @cmd, @arg, @res, @res_len, @flags); + + /// Return the LIBAVFILTER_VERSION_INT constant. + public static uint avfilter_version() => vectors.avfilter_version(); + + /// Allocate an AVFormatContext. avformat_free_context() can be used to free the context and everything allocated by the framework within it. + public static AVFormatContext* avformat_alloc_context() => vectors.avformat_alloc_context(); + + /// Allocate an AVFormatContext for an output format. avformat_free_context() can be used to free the context and everything allocated by the framework within it. + /// format to use for allocating the context, if NULL format_name and filename are used instead + /// the name of output format to use for allocating the context, if NULL filename is used instead + /// the name of the filename to use for allocating the context, may be NULL + /// >= 0 in case of success, a negative AVERROR code in case of failure + public static int avformat_alloc_output_context2(AVFormatContext** @ctx, AVOutputFormat* @oformat, string @format_name, string @filename) => vectors.avformat_alloc_output_context2(@ctx, @oformat, @format_name, @filename); + + /// Close an opened input AVFormatContext. Free it and all its contents and set *s to NULL. + public static void avformat_close_input(AVFormatContext** @s) => vectors.avformat_close_input(@s); + + /// Return the libavformat build-time configuration. + public static string avformat_configuration() => vectors.avformat_configuration(); + + /// Read packets of a media file to get stream information. This is useful for file formats with no headers such as MPEG. This function also computes the real framerate in case of MPEG-2 repeat frame mode. The logical file position is not changed by this function; examined packets may be buffered for later processing. + /// media file handle + /// If non-NULL, an ic.nb_streams long array of pointers to dictionaries, where i-th member contains options for codec corresponding to i-th stream. On return each dictionary will be filled with options that were not found. + /// >=0 if OK, AVERROR_xxx on error + public static int avformat_find_stream_info(AVFormatContext* @ic, AVDictionary** @options) => vectors.avformat_find_stream_info(@ic, @options); + + /// Discard all internally buffered data. This can be useful when dealing with discontinuities in the byte stream. Generally works only with formats that can resync. This includes headerless formats like MPEG-TS/TS but should also work with NUT, Ogg and in a limited way AVI for example. + /// media file handle + /// >=0 on success, error code otherwise + public static int avformat_flush(AVFormatContext* @s) => vectors.avformat_flush(@s); + + /// Free an AVFormatContext and all its streams. + /// context to free + public static void avformat_free_context(AVFormatContext* @s) => vectors.avformat_free_context(@s); + + /// Get the AVClass for AVFormatContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + public static AVClass* avformat_get_class() => vectors.avformat_get_class(); + + /// Returns the table mapping MOV FourCCs for audio to AVCodecID. + /// the table mapping MOV FourCCs for audio to AVCodecID. + public static AVCodecTag* avformat_get_mov_audio_tags() => vectors.avformat_get_mov_audio_tags(); + + /// Returns the table mapping MOV FourCCs for video to libavcodec AVCodecID. + /// the table mapping MOV FourCCs for video to libavcodec AVCodecID. + public static AVCodecTag* avformat_get_mov_video_tags() => vectors.avformat_get_mov_video_tags(); + + /// Returns the table mapping RIFF FourCCs for audio to AVCodecID. + /// the table mapping RIFF FourCCs for audio to AVCodecID. + public static AVCodecTag* avformat_get_riff_audio_tags() => vectors.avformat_get_riff_audio_tags(); + + /// @{ Get the tables mapping RIFF FourCCs to libavcodec AVCodecIDs. The tables are meant to be passed to av_codec_get_id()/av_codec_get_tag() as in the following code: + /// the table mapping RIFF FourCCs for video to libavcodec AVCodecID. + public static AVCodecTag* avformat_get_riff_video_tags() => vectors.avformat_get_riff_video_tags(); + + /// Get the index entry count for the given AVStream. + /// stream + /// the number of index entries in the stream + public static int avformat_index_get_entries_count(AVStream* @st) => vectors.avformat_index_get_entries_count(@st); + + /// Get the AVIndexEntry corresponding to the given index. + /// Stream containing the requested AVIndexEntry. + /// The desired index. + /// A pointer to the requested AVIndexEntry if it exists, NULL otherwise. + public static AVIndexEntry* avformat_index_get_entry(AVStream* @st, int @idx) => vectors.avformat_index_get_entry(@st, @idx); + + /// Get the AVIndexEntry corresponding to the given timestamp. + /// Stream containing the requested AVIndexEntry. + /// If AVSEEK_FLAG_BACKWARD then the returned entry will correspond to the timestamp which is < = the requested one, if backward is 0, then it will be >= if AVSEEK_FLAG_ANY seek to any frame, only keyframes otherwise. + /// A pointer to the requested AVIndexEntry if it exists, NULL otherwise. + public static AVIndexEntry* avformat_index_get_entry_from_timestamp(AVStream* @st, long @wanted_timestamp, int @flags) => vectors.avformat_index_get_entry_from_timestamp(@st, @wanted_timestamp, @flags); + + /// Allocate the stream private data and initialize the codec, but do not write the header. May optionally be used before avformat_write_header to initialize stream parameters before actually writing the header. If using this function, do not pass the same options to avformat_write_header. + /// Media file handle, must be allocated with avformat_alloc_context(). Its oformat field must be set to the desired output format; Its pb field must be set to an already opened AVIOContext. + /// An AVDictionary filled with AVFormatContext and muxer-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. + /// AVSTREAM_INIT_IN_WRITE_HEADER on success if the codec requires avformat_write_header to fully initialize, AVSTREAM_INIT_IN_INIT_OUTPUT on success if the codec has been fully initialized, negative AVERROR on failure. + public static int avformat_init_output(AVFormatContext* @s, AVDictionary** @options) => vectors.avformat_init_output(@s, @options); + + /// Return the libavformat license. + public static string avformat_license() => vectors.avformat_license(); + + /// Check if the stream st contained in s is matched by the stream specifier spec. + /// >0 if st is matched by spec; 0 if st is not matched by spec; AVERROR code if spec is invalid + public static int avformat_match_stream_specifier(AVFormatContext* @s, AVStream* @st, string @spec) => vectors.avformat_match_stream_specifier(@s, @st, @spec); + + /// Undo the initialization done by avformat_network_init. Call it only once for each time you called avformat_network_init. + public static int avformat_network_deinit() => vectors.avformat_network_deinit(); + + /// Do global initialization of network libraries. This is optional, and not recommended anymore. + public static int avformat_network_init() => vectors.avformat_network_init(); + + /// Add a new stream to a media file. + /// media file handle + /// unused, does nothing + /// newly created stream or NULL on error. + public static AVStream* avformat_new_stream(AVFormatContext* @s, AVCodec* @c) => vectors.avformat_new_stream(@s, @c); + + /// Open an input stream and read the header. The codecs are not opened. The stream must be closed with avformat_close_input(). + /// Pointer to user-supplied AVFormatContext (allocated by avformat_alloc_context). May be a pointer to NULL, in which case an AVFormatContext is allocated by this function and written into ps. Note that a user-supplied AVFormatContext will be freed on failure. + /// URL of the stream to open. + /// If non-NULL, this parameter forces a specific input format. Otherwise the format is autodetected. + /// A dictionary filled with AVFormatContext and demuxer-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. + /// 0 on success, a negative AVERROR on failure. + public static int avformat_open_input(AVFormatContext** @ps, string @url, AVInputFormat* @fmt, AVDictionary** @options) => vectors.avformat_open_input(@ps, @url, @fmt, @options); + + /// Test if the given container can store a codec. + /// container to check for compatibility + /// codec to potentially store in container + /// standards compliance level, one of FF_COMPLIANCE_* + /// 1 if codec with ID codec_id can be stored in ofmt, 0 if it cannot. A negative number if this information is not available. + public static int avformat_query_codec(AVOutputFormat* @ofmt, AVCodecID @codec_id, int @std_compliance) => vectors.avformat_query_codec(@ofmt, @codec_id, @std_compliance); + + public static int avformat_queue_attached_pictures(AVFormatContext* @s) => vectors.avformat_queue_attached_pictures(@s); + + /// Seek to timestamp ts. Seeking will be done so that the point from which all active streams can be presented successfully will be closest to ts and within min/max_ts. Active streams are all streams that have AVStream.discard < AVDISCARD_ALL. + /// media file handle + /// index of the stream which is used as time base reference + /// smallest acceptable timestamp + /// target timestamp + /// largest acceptable timestamp + /// flags + /// >=0 on success, error code otherwise + public static int avformat_seek_file(AVFormatContext* @s, int @stream_index, long @min_ts, long @ts, long @max_ts, int @flags) => vectors.avformat_seek_file(@s, @stream_index, @min_ts, @ts, @max_ts, @flags); + + /// Transfer internal timing information from one stream to another. + /// target output format for ost + /// output stream which needs timings copy and adjustments + /// reference input stream to copy timings from + /// define from where the stream codec timebase needs to be imported + public static int avformat_transfer_internal_stream_timing_info(AVOutputFormat* @ofmt, AVStream* @ost, AVStream* @ist, AVTimebaseSource @copy_tb) => vectors.avformat_transfer_internal_stream_timing_info(@ofmt, @ost, @ist, @copy_tb); + + /// Return the LIBAVFORMAT_VERSION_INT constant. + public static uint avformat_version() => vectors.avformat_version(); + + /// Allocate the stream private data and write the stream header to an output media file. + /// Media file handle, must be allocated with avformat_alloc_context(). Its oformat field must be set to the desired output format; Its pb field must be set to an already opened AVIOContext. + /// An AVDictionary filled with AVFormatContext and muxer-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. + /// AVSTREAM_INIT_IN_WRITE_HEADER on success if the codec had not already been fully initialized in avformat_init, AVSTREAM_INIT_IN_INIT_OUTPUT on success if the codec had already been fully initialized in avformat_init, negative AVERROR on failure. + public static int avformat_write_header(AVFormatContext* @s, AVDictionary** @options) => vectors.avformat_write_header(@s, @options); + + /// Accept and allocate a client context on a server context. + /// the server context + /// the client context, must be unallocated + /// >= 0 on success or a negative value corresponding to an AVERROR on failure + public static int avio_accept(AVIOContext* @s, AVIOContext** @c) => vectors.avio_accept(@s, @c); + + /// Allocate and initialize an AVIOContext for buffered I/O. It must be later freed with avio_context_free(). + /// Memory block for input/output operations via AVIOContext. The buffer must be allocated with av_malloc() and friends. It may be freed and replaced with a new buffer by libavformat. AVIOContext.buffer holds the buffer currently in use, which must be later freed with av_free(). + /// The buffer size is very important for performance. For protocols with fixed blocksize it should be set to this blocksize. For others a typical size is a cache page, e.g. 4kb. + /// Set to 1 if the buffer should be writable, 0 otherwise. + /// An opaque pointer to user-specific data. + /// A function for refilling the buffer, may be NULL. For stream protocols, must never return 0 but rather a proper AVERROR code. + /// A function for writing the buffer contents, may be NULL. The function may not change the input buffers content. + /// A function for seeking to specified byte position, may be NULL. + /// Allocated AVIOContext or NULL on failure. + public static AVIOContext* avio_alloc_context(byte* @buffer, int @buffer_size, int @write_flag, void* @opaque, avio_alloc_context_read_packet_func @read_packet, avio_alloc_context_write_packet_func @write_packet, avio_alloc_context_seek_func @seek) => vectors.avio_alloc_context(@buffer, @buffer_size, @write_flag, @opaque, @read_packet, @write_packet, @seek); + + /// Return AVIO_FLAG_* access flags corresponding to the access permissions of the resource in url, or a negative value corresponding to an AVERROR code in case of failure. The returned access flags are masked by the value in flags. + public static int avio_check(string @url, int @flags) => vectors.avio_check(@url, @flags); + + /// Close the resource accessed by the AVIOContext s and free it. This function can only be used if s was opened by avio_open(). + /// 0 on success, an AVERROR < 0 on error. + public static int avio_close(AVIOContext* @s) => vectors.avio_close(@s); + + /// Close directory. + /// directory read context. + /// >=0 on success or negative on error. + public static int avio_close_dir(AVIODirContext** @s) => vectors.avio_close_dir(@s); + + /// Return the written size and a pointer to the buffer. The buffer must be freed with av_free(). Padding of AV_INPUT_BUFFER_PADDING_SIZE is added to the buffer. + /// IO context + /// pointer to a byte buffer + /// the length of the byte buffer + public static int avio_close_dyn_buf(AVIOContext* @s, byte** @pbuffer) => vectors.avio_close_dyn_buf(@s, @pbuffer); + + /// Close the resource accessed by the AVIOContext *s, free it and set the pointer pointing to it to NULL. This function can only be used if s was opened by avio_open(). + /// 0 on success, an AVERROR < 0 on error. + public static int avio_closep(AVIOContext** @s) => vectors.avio_closep(@s); + + /// Free the supplied IO context and everything associated with it. + /// Double pointer to the IO context. This function will write NULL into s. + public static void avio_context_free(AVIOContext** @s) => vectors.avio_context_free(@s); + + /// Iterate through names of available protocols. + /// A private pointer representing current protocol. It must be a pointer to NULL on first iteration and will be updated by successive calls to avio_enum_protocols. + /// If set to 1, iterate over output protocols, otherwise over input protocols. + /// A static string containing the name of current protocol or NULL + public static string avio_enum_protocols(void** @opaque, int @output) => vectors.avio_enum_protocols(@opaque, @output); + + /// Similar to feof() but also returns nonzero on read errors. + /// non zero if and only if at end of file or a read error happened when reading. + public static int avio_feof(AVIOContext* @s) => vectors.avio_feof(@s); + + /// Return the name of the protocol that will handle the passed URL. + /// Name of the protocol or NULL. + public static string avio_find_protocol_name(string @url) => vectors.avio_find_protocol_name(@url); + + /// Force flushing of buffered data. + public static void avio_flush(AVIOContext* @s) => vectors.avio_flush(@s); + + /// Free entry allocated by avio_read_dir(). + /// entry to be freed. + public static void avio_free_directory_entry(AVIODirEntry** @entry) => vectors.avio_free_directory_entry(@entry); + + /// Return the written size and a pointer to the buffer. The AVIOContext stream is left intact. The buffer must NOT be freed. No padding is added to the buffer. + /// IO context + /// pointer to a byte buffer + /// the length of the byte buffer + public static int avio_get_dyn_buf(AVIOContext* @s, byte** @pbuffer) => vectors.avio_get_dyn_buf(@s, @pbuffer); + + /// Read a string from pb into buf. The reading will terminate when either a NULL character was encountered, maxlen bytes have been read, or nothing more can be read from pb. The result is guaranteed to be NULL-terminated, it will be truncated if buf is too small. Note that the string is not interpreted or validated in any way, it might get truncated in the middle of a sequence for multi-byte encodings. + /// number of bytes read (is always < = maxlen). If reading ends on EOF or error, the return value will be one more than bytes actually read. + public static int avio_get_str(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen) => vectors.avio_get_str(@pb, @maxlen, @buf, @buflen); + + public static int avio_get_str16be(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen) => vectors.avio_get_str16be(@pb, @maxlen, @buf, @buflen); + + /// Read a UTF-16 string from pb and convert it to UTF-8. The reading will terminate when either a null or invalid character was encountered or maxlen bytes have been read. + /// number of bytes read (is always < = maxlen) + public static int avio_get_str16le(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen) => vectors.avio_get_str16le(@pb, @maxlen, @buf, @buflen); + + /// Perform one step of the protocol handshake to accept a new client. This function must be called on a client returned by avio_accept() before using it as a read/write context. It is separate from avio_accept() because it may block. A step of the handshake is defined by places where the application may decide to change the proceedings. For example, on a protocol with a request header and a reply header, each one can constitute a step because the application may use the parameters from the request to change parameters in the reply; or each individual chunk of the request can constitute a step. If the handshake is already finished, avio_handshake() does nothing and returns 0 immediately. + /// the client context to perform the handshake on + /// 0 on a complete and successful handshake > 0 if the handshake progressed, but is not complete < 0 for an AVERROR code + public static int avio_handshake(AVIOContext* @c) => vectors.avio_handshake(@c); + + /// Create and initialize a AVIOContext for accessing the resource indicated by url. + /// Used to return the pointer to the created AVIOContext. In case of failure the pointed to value is set to NULL. + /// resource to access + /// flags which control how the resource indicated by url is to be opened + /// >= 0 in case of success, a negative value corresponding to an AVERROR code in case of failure + public static int avio_open(AVIOContext** @s, string @url, int @flags) => vectors.avio_open(@s, @url, @flags); + + /// Open directory for reading. + /// directory read context. Pointer to a NULL pointer must be passed. + /// directory to be listed. + /// A dictionary filled with protocol-private options. On return this parameter will be destroyed and replaced with a dictionary containing options that were not found. May be NULL. + /// >=0 on success or negative on error. + public static int avio_open_dir(AVIODirContext** @s, string @url, AVDictionary** @options) => vectors.avio_open_dir(@s, @url, @options); + + /// Open a write only memory stream. + /// new IO context + /// zero if no error. + public static int avio_open_dyn_buf(AVIOContext** @s) => vectors.avio_open_dyn_buf(@s); + + /// Create and initialize a AVIOContext for accessing the resource indicated by url. + /// Used to return the pointer to the created AVIOContext. In case of failure the pointed to value is set to NULL. + /// resource to access + /// flags which control how the resource indicated by url is to be opened + /// an interrupt callback to be used at the protocols level + /// A dictionary filled with protocol-private options. On return this parameter will be destroyed and replaced with a dict containing options that were not found. May be NULL. + /// >= 0 in case of success, a negative value corresponding to an AVERROR code in case of failure + public static int avio_open2(AVIOContext** @s, string @url, int @flags, AVIOInterruptCB* @int_cb, AVDictionary** @options) => vectors.avio_open2(@s, @url, @flags, @int_cb, @options); + + /// Pause and resume playing - only meaningful if using a network streaming protocol (e.g. MMS). + /// IO context from which to call the read_pause function pointer + /// 1 for pause, 0 for resume + public static int avio_pause(AVIOContext* @h, int @pause) => vectors.avio_pause(@h, @pause); + + /// Write a NULL terminated array of strings to the context. Usually you don't need to use this function directly but its macro wrapper, avio_print. + public static void avio_print_string_array(AVIOContext* @s, byte*[] @strings) => vectors.avio_print_string_array(@s, @strings); + + /// Writes a formatted string to the context. + /// number of bytes written, < 0 on error. + public static int avio_printf(AVIOContext* @s, string @fmt) => vectors.avio_printf(@s, @fmt); + + /// Get AVClass by names of available protocols. + /// A AVClass of input protocol name or NULL + public static AVClass* avio_protocol_get_class(string @name) => vectors.avio_protocol_get_class(@name); + + /// Write a NULL-terminated string. + /// number of bytes written. + public static int avio_put_str(AVIOContext* @s, string @str) => vectors.avio_put_str(@s, @str); + + /// Convert an UTF-8 string to UTF-16BE and write it. + /// the AVIOContext + /// NULL-terminated UTF-8 string + /// number of bytes written. + public static int avio_put_str16be(AVIOContext* @s, string @str) => vectors.avio_put_str16be(@s, @str); + + /// Convert an UTF-8 string to UTF-16LE and write it. + /// the AVIOContext + /// NULL-terminated UTF-8 string + /// number of bytes written. + public static int avio_put_str16le(AVIOContext* @s, string @str) => vectors.avio_put_str16le(@s, @str); + + /// @{ + public static int avio_r8(AVIOContext* @s) => vectors.avio_r8(@s); + + public static uint avio_rb16(AVIOContext* @s) => vectors.avio_rb16(@s); + + public static uint avio_rb24(AVIOContext* @s) => vectors.avio_rb24(@s); + + public static uint avio_rb32(AVIOContext* @s) => vectors.avio_rb32(@s); + + public static ulong avio_rb64(AVIOContext* @s) => vectors.avio_rb64(@s); + + /// Read size bytes from AVIOContext into buf. + /// number of bytes read or AVERROR + public static int avio_read(AVIOContext* @s, byte* @buf, int @size) => vectors.avio_read(@s, @buf, @size); + + /// Get next directory entry. + /// directory read context. + /// next entry or NULL when no more entries. + /// >=0 on success or negative on error. End of list is not considered an error. + public static int avio_read_dir(AVIODirContext* @s, AVIODirEntry** @next) => vectors.avio_read_dir(@s, @next); + + /// Read size bytes from AVIOContext into buf. Unlike avio_read(), this is allowed to read fewer bytes than requested. The missing bytes can be read in the next call. This always tries to read at least 1 byte. Useful to reduce latency in certain cases. + /// number of bytes read or AVERROR + public static int avio_read_partial(AVIOContext* @s, byte* @buf, int @size) => vectors.avio_read_partial(@s, @buf, @size); + + /// Read contents of h into print buffer, up to max_size bytes, or up to EOF. + /// 0 for success (max_size bytes read or EOF reached), negative error code otherwise + public static int avio_read_to_bprint(AVIOContext* @h, AVBPrint* @pb, ulong @max_size) => vectors.avio_read_to_bprint(@h, @pb, @max_size); + + public static uint avio_rl16(AVIOContext* @s) => vectors.avio_rl16(@s); + + public static uint avio_rl24(AVIOContext* @s) => vectors.avio_rl24(@s); + + public static uint avio_rl32(AVIOContext* @s) => vectors.avio_rl32(@s); + + public static ulong avio_rl64(AVIOContext* @s) => vectors.avio_rl64(@s); + + /// fseek() equivalent for AVIOContext. + /// new position or AVERROR. + public static long avio_seek(AVIOContext* @s, long @offset, int @whence) => vectors.avio_seek(@s, @offset, @whence); + + /// Seek to a given timestamp relative to some component stream. Only meaningful if using a network streaming protocol (e.g. MMS.). + /// IO context from which to call the seek function pointers + /// The stream index that the timestamp is relative to. If stream_index is (-1) the timestamp should be in AV_TIME_BASE units from the beginning of the presentation. If a stream_index >= 0 is used and the protocol does not support seeking based on component streams, the call will fail. + /// timestamp in AVStream.time_base units or if there is no stream specified then in AV_TIME_BASE units. + /// Optional combination of AVSEEK_FLAG_BACKWARD, AVSEEK_FLAG_BYTE and AVSEEK_FLAG_ANY. The protocol may silently ignore AVSEEK_FLAG_BACKWARD and AVSEEK_FLAG_ANY, but AVSEEK_FLAG_BYTE will fail if used and not supported. + /// >= 0 on success + public static long avio_seek_time(AVIOContext* @h, int @stream_index, long @timestamp, int @flags) => vectors.avio_seek_time(@h, @stream_index, @timestamp, @flags); + + /// Get the filesize. + /// filesize or AVERROR + public static long avio_size(AVIOContext* @s) => vectors.avio_size(@s); + + /// Skip given number of bytes forward + /// new position or AVERROR. + public static long avio_skip(AVIOContext* @s, long @offset) => vectors.avio_skip(@s, @offset); + + /// Writes a formatted string to the context taking a va_list. + /// number of bytes written, < 0 on error. + public static int avio_vprintf(AVIOContext* @s, string @fmt, byte* @ap) => vectors.avio_vprintf(@s, @fmt, @ap); + + public static void avio_w8(AVIOContext* @s, int @b) => vectors.avio_w8(@s, @b); + + public static void avio_wb16(AVIOContext* @s, uint @val) => vectors.avio_wb16(@s, @val); + + public static void avio_wb24(AVIOContext* @s, uint @val) => vectors.avio_wb24(@s, @val); + + public static void avio_wb32(AVIOContext* @s, uint @val) => vectors.avio_wb32(@s, @val); + + public static void avio_wb64(AVIOContext* @s, ulong @val) => vectors.avio_wb64(@s, @val); + + public static void avio_wl16(AVIOContext* @s, uint @val) => vectors.avio_wl16(@s, @val); + + public static void avio_wl24(AVIOContext* @s, uint @val) => vectors.avio_wl24(@s, @val); + + public static void avio_wl32(AVIOContext* @s, uint @val) => vectors.avio_wl32(@s, @val); + + public static void avio_wl64(AVIOContext* @s, ulong @val) => vectors.avio_wl64(@s, @val); + + public static void avio_write(AVIOContext* @s, byte* @buf, int @size) => vectors.avio_write(@s, @buf, @size); + + /// Mark the written bytestream as a specific type. + /// the stream time the current bytestream pos corresponds to (in AV_TIME_BASE units), or AV_NOPTS_VALUE if unknown or not applicable + /// the kind of data written starting at the current pos + public static void avio_write_marker(AVIOContext* @s, long @time, AVIODataMarkerType @type) => vectors.avio_write_marker(@s, @time, @type); + + /// Free all allocated data in the given subtitle struct. + /// AVSubtitle to free. + public static void avsubtitle_free(AVSubtitle* @sub) => vectors.avsubtitle_free(@sub); + + /// Return the libavutil build-time configuration. + public static string avutil_configuration() => vectors.avutil_configuration(); + + /// Return the libavutil license. + public static string avutil_license() => vectors.avutil_license(); + + /// Return the LIBAVUTIL_VERSION_INT constant. + public static uint avutil_version() => vectors.avutil_version(); + + /// Return the libpostproc build-time configuration. + public static string postproc_configuration() => vectors.postproc_configuration(); + + /// Return the libpostproc license. + public static string postproc_license() => vectors.postproc_license(); + + /// Return the LIBPOSTPROC_VERSION_INT constant. + public static uint postproc_version() => vectors.postproc_version(); + + public static void pp_free_context(void* @ppContext) => vectors.pp_free_context(@ppContext); + + public static void pp_free_mode(void* @mode) => vectors.pp_free_mode(@mode); + + public static void* pp_get_context(int @width, int @height, int @flags) => vectors.pp_get_context(@width, @height, @flags); + + /// Return a pp_mode or NULL if an error occurred. + /// the string after "-pp" on the command line + /// a number from 0 to PP_QUALITY_MAX + public static void* pp_get_mode_by_name_and_quality(string @name, int @quality) => vectors.pp_get_mode_by_name_and_quality(@name, @quality); + + public static void pp_postprocess(in byte_ptrArray3 @src, in int_array3 @srcStride, ref byte_ptrArray3 @dst, in int_array3 @dstStride, int @horizontalSize, int @verticalSize, sbyte* @QP_store, int @QP_stride, void* @mode, void* @ppContext, int @pict_type) => vectors.pp_postprocess(@src, @srcStride, ref @dst, @dstStride, @horizontalSize, @verticalSize, @QP_store, @QP_stride, @mode, @ppContext, @pict_type); + + /// Allocate SwrContext. + /// NULL on error, allocated context otherwise + public static SwrContext* swr_alloc() => vectors.swr_alloc(); + + /// Allocate SwrContext if needed and set/reset common parameters. + /// existing Swr context if available, or NULL if not + /// output channel layout (AV_CH_LAYOUT_*) + /// output sample format (AV_SAMPLE_FMT_*). + /// output sample rate (frequency in Hz) + /// input channel layout (AV_CH_LAYOUT_*) + /// input sample format (AV_SAMPLE_FMT_*). + /// input sample rate (frequency in Hz) + /// logging level offset + /// parent logging context, can be NULL + /// NULL on error, allocated context otherwise + [Obsolete("use ")] + public static SwrContext* swr_alloc_set_opts(SwrContext* @s, long @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, long @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx) => vectors.swr_alloc_set_opts(@s, @out_ch_layout, @out_sample_fmt, @out_sample_rate, @in_ch_layout, @in_sample_fmt, @in_sample_rate, @log_offset, @log_ctx); + + /// Allocate SwrContext if needed and set/reset common parameters. + /// Pointer to an existing Swr context if available, or to NULL if not. On success, *ps will be set to the allocated context. + /// output channel layout (e.g. AV_CHANNEL_LAYOUT_*) + /// output sample format (AV_SAMPLE_FMT_*). + /// output sample rate (frequency in Hz) + /// input channel layout (e.g. AV_CHANNEL_LAYOUT_*) + /// input sample format (AV_SAMPLE_FMT_*). + /// input sample rate (frequency in Hz) + /// logging level offset + /// parent logging context, can be NULL + /// 0 on success, a negative AVERROR code on error. On error, the Swr context is freed and *ps set to NULL. + public static int swr_alloc_set_opts2(SwrContext** @ps, AVChannelLayout* @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, AVChannelLayout* @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx) => vectors.swr_alloc_set_opts2(@ps, @out_ch_layout, @out_sample_fmt, @out_sample_rate, @in_ch_layout, @in_sample_fmt, @in_sample_rate, @log_offset, @log_ctx); + + /// Generate a channel mixing matrix. + /// input channel layout + /// output channel layout + /// mix level for the center channel + /// mix level for the surround channel(s) + /// mix level for the low-frequency effects channel + /// if 1.0, coefficients will be normalized to prevent overflow. if INT_MAX, coefficients will not be normalized. + /// mixing coefficients; matrix[i + stride * o] is the weight of input channel i in output channel o. + /// distance between adjacent input channels in the matrix array + /// matrixed stereo downmix mode (e.g. dplii) + /// parent logging context, can be NULL + /// 0 on success, negative AVERROR code on failure + [Obsolete("use ")] + public static int swr_build_matrix(ulong @in_layout, ulong @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @rematrix_maxval, double @rematrix_volume, double* @matrix, int @stride, AVMatrixEncoding @matrix_encoding, void* @log_ctx) => vectors.swr_build_matrix(@in_layout, @out_layout, @center_mix_level, @surround_mix_level, @lfe_mix_level, @rematrix_maxval, @rematrix_volume, @matrix, @stride, @matrix_encoding, @log_ctx); + + /// Generate a channel mixing matrix. + /// input channel layout + /// output channel layout + /// mix level for the center channel + /// mix level for the surround channel(s) + /// mix level for the low-frequency effects channel + /// mixing coefficients; matrix[i + stride * o] is the weight of input channel i in output channel o. + /// distance between adjacent input channels in the matrix array + /// matrixed stereo downmix mode (e.g. dplii) + /// 0 on success, negative AVERROR code on failure + public static int swr_build_matrix2(AVChannelLayout* @in_layout, AVChannelLayout* @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @maxval, double @rematrix_volume, double* @matrix, long @stride, AVMatrixEncoding @matrix_encoding, void* @log_context) => vectors.swr_build_matrix2(@in_layout, @out_layout, @center_mix_level, @surround_mix_level, @lfe_mix_level, @maxval, @rematrix_volume, @matrix, @stride, @matrix_encoding, @log_context); + + /// Closes the context so that swr_is_initialized() returns 0. + /// Swr context to be closed + public static void swr_close(SwrContext* @s) => vectors.swr_close(@s); + + /// Configure or reconfigure the SwrContext using the information provided by the AVFrames. + /// audio resample context + /// 0 on success, AVERROR on failure. + public static int swr_config_frame(SwrContext* @swr, AVFrame* @out, AVFrame* @in) => vectors.swr_config_frame(@swr, @out, @in); + + /// Convert audio. + /// allocated Swr context, with parameters set + /// output buffers, only the first one need be set in case of packed audio + /// amount of space available for output in samples per channel + /// input buffers, only the first one need to be set in case of packed audio + /// number of input samples available in one channel + /// number of samples output per channel, negative value on error + public static int swr_convert(SwrContext* @s, byte** @out, int @out_count, byte** @in, int @in_count) => vectors.swr_convert(@s, @out, @out_count, @in, @in_count); + + /// Convert the samples in the input AVFrame and write them to the output AVFrame. + /// audio resample context + /// output AVFrame + /// input AVFrame + /// 0 on success, AVERROR on failure or nonmatching configuration. + public static int swr_convert_frame(SwrContext* @swr, AVFrame* @output, AVFrame* @input) => vectors.swr_convert_frame(@swr, @output, @input); + + /// Drops the specified number of output samples. + /// allocated Swr context + /// number of samples to be dropped + /// >= 0 on success, or a negative AVERROR code on failure + public static int swr_drop_output(SwrContext* @s, int @count) => vectors.swr_drop_output(@s, @count); + + /// Free the given SwrContext and set the pointer to NULL. + /// a pointer to a pointer to Swr context + public static void swr_free(SwrContext** @s) => vectors.swr_free(@s); + + /// Get the AVClass for SwrContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + /// the AVClass of SwrContext + public static AVClass* swr_get_class() => vectors.swr_get_class(); + + /// Gets the delay the next input sample will experience relative to the next output sample. + /// swr context + /// timebase in which the returned delay will be: + public static long swr_get_delay(SwrContext* @s, long @base) => vectors.swr_get_delay(@s, @base); + + /// Find an upper bound on the number of samples that the next swr_convert call will output, if called with in_samples of input samples. This depends on the internal state, and anything changing the internal state (like further swr_convert() calls) will may change the number of samples swr_get_out_samples() returns for the same number of input samples. + /// number of input samples. + public static int swr_get_out_samples(SwrContext* @s, int @in_samples) => vectors.swr_get_out_samples(@s, @in_samples); + + /// Initialize context after user parameters have been set. + /// Swr context to initialize + /// AVERROR error code in case of failure. + public static int swr_init(SwrContext* @s) => vectors.swr_init(@s); + + /// Injects the specified number of silence samples. + /// allocated Swr context + /// number of samples to be dropped + /// >= 0 on success, or a negative AVERROR code on failure + public static int swr_inject_silence(SwrContext* @s, int @count) => vectors.swr_inject_silence(@s, @count); + + /// Check whether an swr context has been initialized or not. + /// Swr context to check + /// positive if it has been initialized, 0 if not initialized + public static int swr_is_initialized(SwrContext* @s) => vectors.swr_is_initialized(@s); + + /// Convert the next timestamp from input to output timestamps are in 1/(in_sample_rate * out_sample_rate) units. + /// the output timestamp for the next output sample + public static long swr_next_pts(SwrContext* @s, long @pts) => vectors.swr_next_pts(@s, @pts); + + /// Set a customized input channel mapping. + /// allocated Swr context, not yet initialized + /// customized input channel mapping (array of channel indexes, -1 for a muted channel) + /// >= 0 on success, or AVERROR error code in case of failure. + public static int swr_set_channel_mapping(SwrContext* @s, int* @channel_map) => vectors.swr_set_channel_mapping(@s, @channel_map); + + /// Activate resampling compensation ("soft" compensation). This function is internally called when needed in swr_next_pts(). + /// allocated Swr context. If it is not initialized, or SWR_FLAG_RESAMPLE is not set, swr_init() is called with the flag set. + /// delta in PTS per sample + /// number of samples to compensate for + /// >= 0 on success, AVERROR error codes if: + public static int swr_set_compensation(SwrContext* @s, int @sample_delta, int @compensation_distance) => vectors.swr_set_compensation(@s, @sample_delta, @compensation_distance); + + /// Set a customized remix matrix. + /// allocated Swr context, not yet initialized + /// remix coefficients; matrix[i + stride * o] is the weight of input channel i in output channel o + /// offset between lines of the matrix + /// >= 0 on success, or AVERROR error code in case of failure. + public static int swr_set_matrix(SwrContext* @s, double* @matrix, int @stride) => vectors.swr_set_matrix(@s, @matrix, @stride); + + /// Return the swr build-time configuration. + public static string swresample_configuration() => vectors.swresample_configuration(); + + /// Return the swr license. + public static string swresample_license() => vectors.swresample_license(); + + /// Return the LIBSWRESAMPLE_VERSION_INT constant. + public static uint swresample_version() => vectors.swresample_version(); + + /// Allocate an empty SwsContext. This must be filled and passed to sws_init_context(). For filling see AVOptions, options.c and sws_setColorspaceDetails(). + public static SwsContext* sws_alloc_context() => vectors.sws_alloc_context(); + + /// Allocate and return an uninitialized vector with length coefficients. + public static SwsVector* sws_allocVec(int @length) => vectors.sws_allocVec(@length); + + /// Convert an 8-bit paletted frame into a frame with a color depth of 24 bits. + /// source frame buffer + /// destination frame buffer + /// number of pixels to convert + /// array with [256] entries, which must match color arrangement (RGB or BGR) of src + public static void sws_convertPalette8ToPacked24(byte* @src, byte* @dst, int @num_pixels, byte* @palette) => vectors.sws_convertPalette8ToPacked24(@src, @dst, @num_pixels, @palette); + + /// Convert an 8-bit paletted frame into a frame with a color depth of 32 bits. + /// source frame buffer + /// destination frame buffer + /// number of pixels to convert + /// array with [256] entries, which must match color arrangement (RGB or BGR) of src + public static void sws_convertPalette8ToPacked32(byte* @src, byte* @dst, int @num_pixels, byte* @palette) => vectors.sws_convertPalette8ToPacked32(@src, @dst, @num_pixels, @palette); + + /// Finish the scaling process for a pair of source/destination frames previously submitted with sws_frame_start(). Must be called after all sws_send_slice() and sws_receive_slice() calls are done, before any new sws_frame_start() calls. + public static void sws_frame_end(SwsContext* @c) => vectors.sws_frame_end(@c); + + /// Initialize the scaling process for a given pair of source/destination frames. Must be called before any calls to sws_send_slice() and sws_receive_slice(). + /// The destination frame. + /// The source frame. The data buffers must be allocated, but the frame data does not have to be ready at this point. Data availability is then signalled by sws_send_slice(). + /// 0 on success, a negative AVERROR code on failure + public static int sws_frame_start(SwsContext* @c, AVFrame* @dst, AVFrame* @src) => vectors.sws_frame_start(@c, @dst, @src); + + /// Free the swscaler context swsContext. If swsContext is NULL, then does nothing. + public static void sws_freeContext(SwsContext* @swsContext) => vectors.sws_freeContext(@swsContext); + + public static void sws_freeFilter(SwsFilter* @filter) => vectors.sws_freeFilter(@filter); + + public static void sws_freeVec(SwsVector* @a) => vectors.sws_freeVec(@a); + + /// Get the AVClass for swsContext. It can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options. + public static AVClass* sws_get_class() => vectors.sws_get_class(); + + /// Check if context can be reused, otherwise reallocate a new one. + public static SwsContext* sws_getCachedContext(SwsContext* @context, int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param) => vectors.sws_getCachedContext(@context, @srcW, @srcH, @srcFormat, @dstW, @dstH, @dstFormat, @flags, @srcFilter, @dstFilter, @param); + + /// Return a pointer to yuv<->rgb coefficients for the given colorspace suitable for sws_setColorspaceDetails(). + /// One of the SWS_CS_* macros. If invalid, SWS_CS_DEFAULT is used. + public static int* sws_getCoefficients(int @colorspace) => vectors.sws_getCoefficients(@colorspace); + + /// #if LIBSWSCALE_VERSION_MAJOR > 6 + /// negative error code on error, non negative otherwise #else + public static int sws_getColorspaceDetails(SwsContext* @c, int** @inv_table, int* @srcRange, int** @table, int* @dstRange, int* @brightness, int* @contrast, int* @saturation) => vectors.sws_getColorspaceDetails(@c, @inv_table, @srcRange, @table, @dstRange, @brightness, @contrast, @saturation); + + /// Allocate and return an SwsContext. You need it to perform scaling/conversion operations using sws_scale(). + /// the width of the source image + /// the height of the source image + /// the source image format + /// the width of the destination image + /// the height of the destination image + /// the destination image format + /// specify which algorithm and options to use for rescaling + /// extra parameters to tune the used scaler For SWS_BICUBIC param[0] and [1] tune the shape of the basis function, param[0] tunes f(1) and param[1] f´(1) For SWS_GAUSS param[0] tunes the exponent and thus cutoff frequency For SWS_LANCZOS param[0] tunes the width of the window function + /// a pointer to an allocated context, or NULL in case of error + public static SwsContext* sws_getContext(int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param) => vectors.sws_getContext(@srcW, @srcH, @srcFormat, @dstW, @dstH, @dstFormat, @flags, @srcFilter, @dstFilter, @param); + + public static SwsFilter* sws_getDefaultFilter(float @lumaGBlur, float @chromaGBlur, float @lumaSharpen, float @chromaSharpen, float @chromaHShift, float @chromaVShift, int @verbose) => vectors.sws_getDefaultFilter(@lumaGBlur, @chromaGBlur, @lumaSharpen, @chromaSharpen, @chromaHShift, @chromaVShift, @verbose); + + /// Return a normalized Gaussian curve used to filter stuff quality = 3 is high quality, lower is lower quality. + public static SwsVector* sws_getGaussianVec(double @variance, double @quality) => vectors.sws_getGaussianVec(@variance, @quality); + + /// Initialize the swscaler context sws_context. + /// zero or positive value on success, a negative value on error + public static int sws_init_context(SwsContext* @sws_context, SwsFilter* @srcFilter, SwsFilter* @dstFilter) => vectors.sws_init_context(@sws_context, @srcFilter, @dstFilter); + + /// Returns a positive value if an endianness conversion for pix_fmt is supported, 0 otherwise. + /// the pixel format + /// a positive value if an endianness conversion for pix_fmt is supported, 0 otherwise. + public static int sws_isSupportedEndiannessConversion(AVPixelFormat @pix_fmt) => vectors.sws_isSupportedEndiannessConversion(@pix_fmt); + + /// Return a positive value if pix_fmt is a supported input format, 0 otherwise. + public static int sws_isSupportedInput(AVPixelFormat @pix_fmt) => vectors.sws_isSupportedInput(@pix_fmt); + + /// Return a positive value if pix_fmt is a supported output format, 0 otherwise. + public static int sws_isSupportedOutput(AVPixelFormat @pix_fmt) => vectors.sws_isSupportedOutput(@pix_fmt); + + /// Scale all the coefficients of a so that their sum equals height. + public static void sws_normalizeVec(SwsVector* @a, double @height) => vectors.sws_normalizeVec(@a, @height); + + /// Request a horizontal slice of the output data to be written into the frame previously provided to sws_frame_start(). + /// first row of the slice; must be a multiple of sws_receive_slice_alignment() + /// number of rows in the slice; must be a multiple of sws_receive_slice_alignment(), except for the last slice (i.e. when slice_start+slice_height is equal to output frame height) + /// a non-negative number if the data was successfully written into the output AVERROR(EAGAIN) if more input data needs to be provided before the output can be produced another negative AVERROR code on other kinds of scaling failure + public static int sws_receive_slice(SwsContext* @c, uint @slice_start, uint @slice_height) => vectors.sws_receive_slice(@c, @slice_start, @slice_height); + + /// Returns alignment required for output slices requested with sws_receive_slice(). Slice offsets and sizes passed to sws_receive_slice() must be multiples of the value returned from this function. + /// alignment required for output slices requested with sws_receive_slice(). Slice offsets and sizes passed to sws_receive_slice() must be multiples of the value returned from this function. + public static uint sws_receive_slice_alignment(SwsContext* @c) => vectors.sws_receive_slice_alignment(@c); + + /// Scale the image slice in srcSlice and put the resulting scaled slice in the image in dst. A slice is a sequence of consecutive rows in an image. + /// the scaling context previously created with sws_getContext() + /// the array containing the pointers to the planes of the source slice + /// the array containing the strides for each plane of the source image + /// the position in the source image of the slice to process, that is the number (counted starting from zero) in the image of the first row of the slice + /// the height of the source slice, that is the number of rows in the slice + /// the array containing the pointers to the planes of the destination image + /// the array containing the strides for each plane of the destination image + /// the height of the output slice + public static int sws_scale(SwsContext* @c, byte*[] @srcSlice, int[] @srcStride, int @srcSliceY, int @srcSliceH, byte*[] @dst, int[] @dstStride) => vectors.sws_scale(@c, @srcSlice, @srcStride, @srcSliceY, @srcSliceH, @dst, @dstStride); + + /// Scale source data from src and write the output to dst. + /// The destination frame. See documentation for sws_frame_start() for more details. + /// The source frame. + /// 0 on success, a negative AVERROR code on failure + public static int sws_scale_frame(SwsContext* @c, AVFrame* @dst, AVFrame* @src) => vectors.sws_scale_frame(@c, @dst, @src); + + /// Scale all the coefficients of a by the scalar value. + public static void sws_scaleVec(SwsVector* @a, double @scalar) => vectors.sws_scaleVec(@a, @scalar); + + /// Indicate that a horizontal slice of input data is available in the source frame previously provided to sws_frame_start(). The slices may be provided in any order, but may not overlap. For vertically subsampled pixel formats, the slices must be aligned according to subsampling. + /// first row of the slice + /// number of rows in the slice + /// a non-negative number on success, a negative AVERROR code on failure. + public static int sws_send_slice(SwsContext* @c, uint @slice_start, uint @slice_height) => vectors.sws_send_slice(@c, @slice_start, @slice_height); + + /// Returns negative error code on error, non negative otherwise #else Returns -1 if not supported #endif + /// the yuv2rgb coefficients describing the input yuv space, normally ff_yuv2rgb_coeffs[x] + /// flag indicating the while-black range of the input (1=jpeg / 0=mpeg) + /// the yuv2rgb coefficients describing the output yuv space, normally ff_yuv2rgb_coeffs[x] + /// flag indicating the while-black range of the output (1=jpeg / 0=mpeg) + /// 16.16 fixed point brightness correction + /// 16.16 fixed point contrast correction + /// 16.16 fixed point saturation correction #if LIBSWSCALE_VERSION_MAJOR > 6 + /// negative error code on error, non negative otherwise #else + public static int sws_setColorspaceDetails(SwsContext* @c, in int_array4 @inv_table, int @srcRange, in int_array4 @table, int @dstRange, int @brightness, int @contrast, int @saturation) => vectors.sws_setColorspaceDetails(@c, @inv_table, @srcRange, @table, @dstRange, @brightness, @contrast, @saturation); + + /// Return the libswscale build-time configuration. + public static string swscale_configuration() => vectors.swscale_configuration(); + + /// Return the libswscale license. + public static string swscale_license() => vectors.swscale_license(); + + /// Color conversion and scaling library. + public static uint swscale_version() => vectors.swscale_version(); + +} diff --git a/FFmpeg.AutoGen/generated/ffmpeg.functions.inline.g.cs b/FFmpeg.AutoGen/generated/ffmpeg.functions.inline.g.cs new file mode 100644 index 00000000..3dda1acc --- /dev/null +++ b/FFmpeg.AutoGen/generated/ffmpeg.functions.inline.g.cs @@ -0,0 +1,368 @@ +using System; + +namespace FFmpeg.AutoGen; + +public static unsafe partial class ffmpeg +{ + /// Compute ceil(log2(x)). + /// value used to compute ceil(log2(x)) + /// computed ceiling of log2(x) + public static int av_ceil_log2_c(int @x) + { + return av_log2((uint)(x - 1U) << 1); + } + // original body hash: Y9QGw919/NB5ltczSPmZu5WZt+BfR1GGQ58ULgOxiNo= + + /// Clip a signed integer value into the amin-amax range. + /// value to clip + /// minimum value of the clip range + /// maximum value of the clip range + /// clipped value + public static int av_clip_c(int @a, int @amin, int @amax) + { + if (a < amin) + return amin; + else if (a > amax) + return amax; + else + return a; + } + // original body hash: FGSX8EvLhMgYqP9+0z1+Clej4HxjpENDPDX7uAYLx6k= + + /// Clip a signed integer value into the -32768,32767 range. + /// value to clip + /// clipped value + public static short av_clip_int16_c(int @a) + { + if (((a + 32768U) & ~65535) != 0) + return (short)((a >> 31) ^ 32767); + else + return (short)a; + } + // original body hash: l7ot2X+8YIG7Ze9ecaMTap87pGl9Q5kffGq1e9dS9Es= + + /// Clip a signed integer value into the -128,127 range. + /// value to clip + /// clipped value + public static sbyte av_clip_int8_c(int @a) + { + if (((a + 128U) & ~255) != 0) + return (sbyte)((a >> 31) ^ 127); + else + return (sbyte)a; + } + // original body hash: 959D6ojD8+Bo9o7pGvHcWTnCDg5Ax0o328RGYDIiUvo= + + /// Clip a signed integer into the -(2^p),(2^p-1) range. + /// value to clip + /// bit position to clip at + /// clipped value + public static int av_clip_intp2_c(int @a, int @p) + { + if ((((uint)a + (1 << p)) & ~((2 << p) - 1)) != 0) + return (a >> 31) ^ ((1 << p) - 1); + else + return a; + } + // original body hash: /qM73AkEE6w4/NOhpvKw1SVRZPxbN61+Yqc3i9L/2bM= + + /// Clip a signed integer value into the 0-65535 range. + /// value to clip + /// clipped value + public static ushort av_clip_uint16_c(int @a) + { + if ((a & (~65535)) != 0) + return (ushort)((~a) >> 31); + else + return (ushort)a; + } + // original body hash: nI5Vkw30nAjS2NmNSdCSnHeAUcY47XT0lnrnsUK/bJ4= + + /// Clip a signed integer value into the 0-255 range. + /// value to clip + /// clipped value + public static byte av_clip_uint8_c(int @a) + { + if ((a & (~255)) != 0) + return (byte)((~a) >> 31); + else + return (byte)a; + } + // original body hash: 32OGGgXBFRL7EcU8DizK9KbIFfU356+5hgUEyAOjIUY= + + /// Clip a signed integer to an unsigned power of two range. + /// value to clip + /// bit position to clip at + /// clipped value + public static uint av_clip_uintp2_c(int @a, int @p) + { + if ((a & ~((1 << p) - 1)) != 0) + return (uint)((~a) >> 31 & ((1 << p) - 1)); + else + return (uint)a; + } + // original body hash: 01v+7HjG6Id/YAdTCeWBkPwvakfGiCosPM6u5MXI8pU= + + /// Clip a signed 64bit integer value into the amin-amax range. + /// value to clip + /// minimum value of the clip range + /// maximum value of the clip range + /// clipped value + public static long av_clip64_c(long @a, long @amin, long @amax) + { + if (a < amin) + return amin; + else if (a > amax) + return amax; + else + return a; + } + // original body hash: FGSX8EvLhMgYqP9+0z1+Clej4HxjpENDPDX7uAYLx6k= + + /// Clip a double value into the amin-amax range. If a is nan or -inf amin will be returned. If a is +inf amax will be returned. + /// value to clip + /// minimum value of the clip range + /// maximum value of the clip range + /// clipped value + public static double av_clipd_c(double @a, double @amin, double @amax) + { + return ((((a) > (amin) ? (a) : (amin))) > (amax) ? (amax) : (((a) > (amin) ? (a) : (amin)))); + } + // original body hash: 3g76qefPWCYqXraY2vYdxoH58/EKn5EeR9v7cGEBM6Y= + + /// Clip a float value into the amin-amax range. If a is nan or -inf amin will be returned. If a is +inf amax will be returned. + /// value to clip + /// minimum value of the clip range + /// maximum value of the clip range + /// clipped value + public static float av_clipf_c(float @a, float @amin, float @amax) + { + return ((((a) > (amin) ? (a) : (amin))) > (amax) ? (amax) : (((a) > (amin) ? (a) : (amin)))); + } + // original body hash: 3g76qefPWCYqXraY2vYdxoH58/EKn5EeR9v7cGEBM6Y= + + /// Clip a signed 64-bit integer value into the -2147483648,2147483647 range. + /// value to clip + /// clipped value + public static int av_clipl_int32_c(long @a) + { + if ((((ulong)a + 2147483648UL) & ~(4294967295UL)) != 0) + return (int)((a >> 63) ^ 2147483647); + else + return (int)a; + } + // original body hash: 00dWv9FNYsEeRh1lPjYlSw3TQiOlthet3Kyi6z91Hbo= + + /// Compare two rationals. + /// First rational + /// Second rational + /// One of the following values: - 0 if `a == b` - 1 if `a > b` - -1 if `a < b` - `INT_MIN` if one of the values is of the form `0 / 0` + public static int av_cmp_q(AVRational @a, AVRational @b) + { + long tmp = a.num * (long)b.den - b.num * (long)a.den; + if (tmp != 0) + return (int)((tmp ^ a.den ^ b.den) >> 63) | 1; + else if (b.den != 0 && a.den != 0) + return 0; + else if (a.num != 0 && b.num != 0) + return (a.num >> 31) - (b.num >> 31); + else + return (-2147483647 - 1); + } + // original body hash: M+RGb5gXGdDjfY/gK5ZeCYeYrZAxjTXZA9+XVu0I66Q= + + /// Reinterpret a double as a 64-bit integer. + public static ulong av_double2int(double @f) + { + return (ulong)@f; + } + // original body hash: 2HuHK8WLchm3u+cK6H4QWhflx2JqfewtaSpj2Cwfi8M= + + /// Reinterpret a float as a 32-bit integer. + public static uint av_float2int(float @f) + { + return (uint)@f; + } + // original body hash: uBvsHd8EeFnxDvSdDE1+k5Um29kCuf0aEJhAvDy0wZk= + + /// Reinterpret a 64-bit integer as a double. + public static double av_int2double(ulong @i) + { + return (double)@i; + } + // original body hash: iFt3hVHTpF9jjqIGAAf/c7FrGfenOXGxdsyMjmrbwvw= + + /// Reinterpret a 32-bit integer as a float. + public static float av_int2float(uint @i) + { + return (float)@i; + } + // original body hash: wLGFPpW+aIvxW79y6BVY1LKz/j7yc3BdiaJ7mD4oQmw= + + /// Invert a rational. + /// value + /// 1 / q + public static AVRational av_inv_q(AVRational @q) + { + var r = new AVRational { @num = q.den, @den = q.num }; + return r; + } + // original body hash: sXbO4D7vmayAx56EFqz9C0kakcSPSryJHdk0hr0MOFY= + + /// Fill the provided buffer with a string containing an error string corresponding to the AVERROR code errnum. + /// a buffer + /// size in bytes of errbuf + /// error code to describe + /// the buffer in input, filled with the error description + public static byte* av_make_error_string(byte* @errbuf, ulong @errbuf_size, int @errnum) + { + av_strerror(errnum, errbuf, errbuf_size); + return errbuf; + } + // original body hash: DRHQHyLQNo9pTxA+wRw4zVDrC7Md1u3JWawQX0BVkqE= + + /// Create an AVRational. + public static AVRational av_make_q(int @num, int @den) + { + var r = new AVRational { @num = num, @den = den }; + return r; + } + // original body hash: IAPYNNcg3GX0PGxINeLQhb41dH921lPVKcnqxCk7ERA= + + /// Clear high bits from an unsigned integer starting with specific bit position + /// value to clip + /// bit position to clip at + /// clipped value + public static uint av_mod_uintp2_c(uint @a, uint @p) + { + return a & (uint)((1 << (int)p) - 1); + } + // original body hash: ncn4Okxr9Nas1g/qCfpRHKtywuNmJuf3UED+o3wjadc= + + public static int av_parity_c(uint @v) + { + return av_popcount_c(v) & 1; + } + // original body hash: Hsrq5CWkNvuNTnqES92ZJYVYpKXFwosrZNja/oaUd0s= + + /// Count number of bits set to one in x + /// value to count bits of + /// the number of bits set to one in x + public static int av_popcount_c(uint @x) + { + x -= (x >> 1) & 1431655765; + x = (x & 858993459) + ((x >> 2) & 858993459); + x = (x + (x >> 4)) & 252645135; + x += x >> 8; + return (int)((x + (x >> 16)) & 63); + } + // original body hash: 6EqV8Ll7t/MGINV9Nh3TSEbNyUYeskm7HucpU0SAkgg= + + /// Count number of bits set to one in x + /// value to count bits of + /// the number of bits set to one in x + public static int av_popcount64_c(ulong @x) + { + return av_popcount_c((uint)x) + av_popcount_c((uint)(x >> 32)); + } + // original body hash: 4wjPAKU9R0yS6OI8Y9h3L6de+uXt/lBm+zX7t5Ch18k= + + /// Convert an AVRational to a `double`. + /// AVRational to convert + /// `a` in floating-point form + public static double av_q2d(AVRational @a) + { + return a.num / (double)a.den; + } + // original body hash: j4R2BS8nF6czcUDVk5kKi9nLEdlTI/NRDYtnc1KFeyE= + + /// Add two signed 32-bit values with saturation. + /// one value + /// another value + /// sum with signed saturation + public static int av_sat_add32_c(int @a, int @b) + { + return av_clipl_int32_c((long)a + b); + } + // original body hash: GAAy4GsS2n+9kJ/8hzuONPUOGIsiOj7PvXnLHUVrimY= + + /// Add two signed 64-bit values with saturation. + /// one value + /// another value + /// sum with signed saturation + public static long av_sat_add64_c(long @a, long @b) + { + try + { + return @a + @b; + } + catch (OverflowException) + { + return ((double)@a + (double)@b) > 0d ? long.MaxValue : long.MinValue; + } + } + // original body hash: qeup76rp1rjakhMYQJWWEYIkpgscUcDfzDIrjyqk5iM= + + /// Add a doubled value to another value with saturation at both stages. + /// first value + /// value doubled and added to a + /// sum sat(a + sat(2*b)) with signed saturation + public static int av_sat_dadd32_c(int @a, int @b) + { + return av_sat_add32_c(a, av_sat_add32_c(b, b)); + } + // original body hash: Kbha6XFULk7dxB6zc5WRwoPczQVN7HBcNs9Hjlj/Caw= + + /// Subtract a doubled value from another value with saturation at both stages. + /// first value + /// value doubled and subtracted from a + /// difference sat(a - sat(2*b)) with signed saturation + public static int av_sat_dsub32_c(int @a, int @b) + { + return av_sat_sub32_c(a, av_sat_add32_c(b, b)); + } + // original body hash: ypu4i+30n3CeMxdL8pq7XDYAFBi1N5d2mkIT6zQ1bO0= + + /// Subtract two signed 32-bit values with saturation. + /// one value + /// another value + /// difference with signed saturation + public static int av_sat_sub32_c(int @a, int @b) + { + return av_clipl_int32_c((long)a - b); + } + // original body hash: /tgXI2zbIgliqOwZbpnq7jSiVj0N70RjBFsbkIkWhsM= + + /// Subtract two signed 64-bit values with saturation. + /// one value + /// another value + /// difference with signed saturation + public static long av_sat_sub64_c(long @a, long @b) + { + try + { + return @a - @b; + } + catch (OverflowException) + { + return ((double)@a - (double)@b) > 0d ? long.MaxValue : long.MinValue; + } + } + // original body hash: 6YrSxDrYVG1ac1wlCiXKMhTwj7Kx6eym/YtspKusrGk= + + /// Return x default pointer in case p is NULL. + public static void* av_x_if_null(void* @p, void* @x) + { + return (void*)(p != null ? p : x); + } + // original body hash: zOY924eIk3VeTSNb9XcE2Yw8aZ4/jlzQSfP06k5n0nU= + + /// ftell() equivalent for AVIOContext. + /// position or AVERROR. + public static long avio_tell(AVIOContext* @s) + { + return avio_seek(s, 0, 1); + } + // original body hash: o18c3ypeh9EsmYaplTel2ssgM2PZKTTDfMjsqEopycw= + +} diff --git a/FFmpeg.AutoGen/generated/ffmpeg.libraries.g.cs b/FFmpeg.AutoGen/generated/ffmpeg.libraries.g.cs new file mode 100644 index 00000000..839d4c8d --- /dev/null +++ b/FFmpeg.AutoGen/generated/ffmpeg.libraries.g.cs @@ -0,0 +1,18 @@ +using System.Collections.Generic; + +namespace FFmpeg.AutoGen; + +public static unsafe partial class ffmpeg +{ + public static Dictionary LibraryVersionMap = new Dictionary + { + {"avcodec", 59}, + {"avdevice", 59}, + {"avfilter", 8}, + {"avformat", 59}, + {"avutil", 57}, + {"postproc", 56}, + {"swresample", 4}, + {"swscale", 6}, + }; +} diff --git a/FFmpeg.AutoGen/generated/ffmpeg.macros.g.cs b/FFmpeg.AutoGen/generated/ffmpeg.macros.g.cs new file mode 100644 index 00000000..24d99d8c --- /dev/null +++ b/FFmpeg.AutoGen/generated/ffmpeg.macros.g.cs @@ -0,0 +1,1687 @@ +namespace FFmpeg.AutoGen; + +public static unsafe partial class ffmpeg +{ + /// _WIN32_WINNT = 0x602 + public const int _WIN32_WINNT = 0x602; + // public static attribute_deprecated = __declspec(deprecated); + // public static av_alias = __attribute__((may_alias)); + // public static av_alloc_size = (...); + // public static av_always_inline = __forceinline; + /// AV_BUFFER_FLAG_READONLY = (1 << 0) + public const int AV_BUFFER_FLAG_READONLY = 0x1 << 0x0; + /// AV_BUFFERSINK_FLAG_NO_REQUEST = 0x2 + public const int AV_BUFFERSINK_FLAG_NO_REQUEST = 0x2; + /// AV_BUFFERSINK_FLAG_PEEK = 0x1 + public const int AV_BUFFERSINK_FLAG_PEEK = 0x1; + // public static av_builtin_constant_p = __builtin_constant_p; + // public static av_ceil_log2 = av_ceil_log2_c; + // public static AV_CEIL_RSHIFT = (a,b) (!av_builtin_constant_p(b) ? -((-(a)) >> (b)) : ((a) + (1<<(b)) - 1) >> (b)); + /// AV_CH_BACK_CENTER = (1ULL << AV_CHAN_BACK_CENTER ) + public static readonly ulong AV_CH_BACK_CENTER = 0x1UL << 8; + /// AV_CH_BACK_LEFT = (1ULL << AV_CHAN_BACK_LEFT ) + public static readonly ulong AV_CH_BACK_LEFT = 0x1UL << 4; + /// AV_CH_BACK_RIGHT = (1ULL << AV_CHAN_BACK_RIGHT ) + public static readonly ulong AV_CH_BACK_RIGHT = 0x1UL << 5; + /// AV_CH_BOTTOM_FRONT_CENTER = (1ULL << AV_CHAN_BOTTOM_FRONT_CENTER ) + public static readonly ulong AV_CH_BOTTOM_FRONT_CENTER = 0x1UL << 38; + /// AV_CH_BOTTOM_FRONT_LEFT = (1ULL << AV_CHAN_BOTTOM_FRONT_LEFT ) + public static readonly ulong AV_CH_BOTTOM_FRONT_LEFT = 0x1UL << 39; + /// AV_CH_BOTTOM_FRONT_RIGHT = (1ULL << AV_CHAN_BOTTOM_FRONT_RIGHT ) + public static readonly ulong AV_CH_BOTTOM_FRONT_RIGHT = 0x1UL << 40; + /// AV_CH_FRONT_CENTER = (1ULL << AV_CHAN_FRONT_CENTER ) + public static readonly ulong AV_CH_FRONT_CENTER = 0x1UL << 2; + /// AV_CH_FRONT_LEFT = (1ULL << AV_CHAN_FRONT_LEFT ) + public static readonly ulong AV_CH_FRONT_LEFT = 0x1UL << 0; + /// AV_CH_FRONT_LEFT_OF_CENTER = (1ULL << AV_CHAN_FRONT_LEFT_OF_CENTER ) + public static readonly ulong AV_CH_FRONT_LEFT_OF_CENTER = 0x1UL << 6; + /// AV_CH_FRONT_RIGHT = (1ULL << AV_CHAN_FRONT_RIGHT ) + public static readonly ulong AV_CH_FRONT_RIGHT = 0x1UL << 1; + /// AV_CH_FRONT_RIGHT_OF_CENTER = (1ULL << AV_CHAN_FRONT_RIGHT_OF_CENTER) + public static readonly ulong AV_CH_FRONT_RIGHT_OF_CENTER = 0x1UL << 7; + /// AV_CH_LAYOUT_2_1 = (AV_CH_LAYOUT_STEREO|AV_CH_BACK_CENTER) + public static readonly ulong AV_CH_LAYOUT_2_1 = AV_CH_LAYOUT_STEREO | AV_CH_BACK_CENTER; + /// AV_CH_LAYOUT_2_2 = (AV_CH_LAYOUT_STEREO|AV_CH_SIDE_LEFT|AV_CH_SIDE_RIGHT) + public static readonly ulong AV_CH_LAYOUT_2_2 = AV_CH_LAYOUT_STEREO | AV_CH_SIDE_LEFT | AV_CH_SIDE_RIGHT; + /// AV_CH_LAYOUT_22POINT2 = (AV_CH_LAYOUT_5POINT1_BACK|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER|AV_CH_BACK_CENTER|AV_CH_LOW_FREQUENCY_2|AV_CH_SIDE_LEFT|AV_CH_SIDE_RIGHT|AV_CH_TOP_FRONT_LEFT|AV_CH_TOP_FRONT_RIGHT|AV_CH_TOP_FRONT_CENTER|AV_CH_TOP_CENTER|AV_CH_TOP_BACK_LEFT|AV_CH_TOP_BACK_RIGHT|AV_CH_TOP_SIDE_LEFT|AV_CH_TOP_SIDE_RIGHT|AV_CH_TOP_BACK_CENTER|AV_CH_BOTTOM_FRONT_CENTER|AV_CH_BOTTOM_FRONT_LEFT|AV_CH_BOTTOM_FRONT_RIGHT) + public static readonly ulong AV_CH_LAYOUT_22POINT2 = AV_CH_LAYOUT_5POINT1_BACK | AV_CH_FRONT_LEFT_OF_CENTER | AV_CH_FRONT_RIGHT_OF_CENTER | AV_CH_BACK_CENTER | AV_CH_LOW_FREQUENCY_2 | AV_CH_SIDE_LEFT | AV_CH_SIDE_RIGHT | AV_CH_TOP_FRONT_LEFT | AV_CH_TOP_FRONT_RIGHT | AV_CH_TOP_FRONT_CENTER | AV_CH_TOP_CENTER | AV_CH_TOP_BACK_LEFT | AV_CH_TOP_BACK_RIGHT | AV_CH_TOP_SIDE_LEFT | AV_CH_TOP_SIDE_RIGHT | AV_CH_TOP_BACK_CENTER | AV_CH_BOTTOM_FRONT_CENTER | AV_CH_BOTTOM_FRONT_LEFT | AV_CH_BOTTOM_FRONT_RIGHT; + /// AV_CH_LAYOUT_2POINT1 = (AV_CH_LAYOUT_STEREO|AV_CH_LOW_FREQUENCY) + public static readonly ulong AV_CH_LAYOUT_2POINT1 = AV_CH_LAYOUT_STEREO | AV_CH_LOW_FREQUENCY; + /// AV_CH_LAYOUT_3POINT1 = (AV_CH_LAYOUT_SURROUND|AV_CH_LOW_FREQUENCY) + public static readonly ulong AV_CH_LAYOUT_3POINT1 = AV_CH_LAYOUT_SURROUND | AV_CH_LOW_FREQUENCY; + /// AV_CH_LAYOUT_4POINT0 = (AV_CH_LAYOUT_SURROUND|AV_CH_BACK_CENTER) + public static readonly ulong AV_CH_LAYOUT_4POINT0 = AV_CH_LAYOUT_SURROUND | AV_CH_BACK_CENTER; + /// AV_CH_LAYOUT_4POINT1 = (AV_CH_LAYOUT_4POINT0|AV_CH_LOW_FREQUENCY) + public static readonly ulong AV_CH_LAYOUT_4POINT1 = AV_CH_LAYOUT_4POINT0 | AV_CH_LOW_FREQUENCY; + /// AV_CH_LAYOUT_5POINT0 = (AV_CH_LAYOUT_SURROUND|AV_CH_SIDE_LEFT|AV_CH_SIDE_RIGHT) + public static readonly ulong AV_CH_LAYOUT_5POINT0 = AV_CH_LAYOUT_SURROUND | AV_CH_SIDE_LEFT | AV_CH_SIDE_RIGHT; + /// AV_CH_LAYOUT_5POINT0_BACK = (AV_CH_LAYOUT_SURROUND|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT) + public static readonly ulong AV_CH_LAYOUT_5POINT0_BACK = AV_CH_LAYOUT_SURROUND | AV_CH_BACK_LEFT | AV_CH_BACK_RIGHT; + /// AV_CH_LAYOUT_5POINT1 = (AV_CH_LAYOUT_5POINT0|AV_CH_LOW_FREQUENCY) + public static readonly ulong AV_CH_LAYOUT_5POINT1 = AV_CH_LAYOUT_5POINT0 | AV_CH_LOW_FREQUENCY; + /// AV_CH_LAYOUT_5POINT1_BACK = (AV_CH_LAYOUT_5POINT0_BACK|AV_CH_LOW_FREQUENCY) + public static readonly ulong AV_CH_LAYOUT_5POINT1_BACK = AV_CH_LAYOUT_5POINT0_BACK | AV_CH_LOW_FREQUENCY; + /// AV_CH_LAYOUT_6POINT0 = (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_CENTER) + public static readonly ulong AV_CH_LAYOUT_6POINT0 = AV_CH_LAYOUT_5POINT0 | AV_CH_BACK_CENTER; + /// AV_CH_LAYOUT_6POINT0_FRONT = (AV_CH_LAYOUT_2_2|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER) + public static readonly ulong AV_CH_LAYOUT_6POINT0_FRONT = AV_CH_LAYOUT_2_2 | AV_CH_FRONT_LEFT_OF_CENTER | AV_CH_FRONT_RIGHT_OF_CENTER; + /// AV_CH_LAYOUT_6POINT1 = (AV_CH_LAYOUT_5POINT1|AV_CH_BACK_CENTER) + public static readonly ulong AV_CH_LAYOUT_6POINT1 = AV_CH_LAYOUT_5POINT1 | AV_CH_BACK_CENTER; + /// AV_CH_LAYOUT_6POINT1_BACK = (AV_CH_LAYOUT_5POINT1_BACK|AV_CH_BACK_CENTER) + public static readonly ulong AV_CH_LAYOUT_6POINT1_BACK = AV_CH_LAYOUT_5POINT1_BACK | AV_CH_BACK_CENTER; + /// AV_CH_LAYOUT_6POINT1_FRONT = (AV_CH_LAYOUT_6POINT0_FRONT|AV_CH_LOW_FREQUENCY) + public static readonly ulong AV_CH_LAYOUT_6POINT1_FRONT = AV_CH_LAYOUT_6POINT0_FRONT | AV_CH_LOW_FREQUENCY; + /// AV_CH_LAYOUT_7POINT0 = (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT) + public static readonly ulong AV_CH_LAYOUT_7POINT0 = AV_CH_LAYOUT_5POINT0 | AV_CH_BACK_LEFT | AV_CH_BACK_RIGHT; + /// AV_CH_LAYOUT_7POINT0_FRONT = (AV_CH_LAYOUT_5POINT0|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER) + public static readonly ulong AV_CH_LAYOUT_7POINT0_FRONT = AV_CH_LAYOUT_5POINT0 | AV_CH_FRONT_LEFT_OF_CENTER | AV_CH_FRONT_RIGHT_OF_CENTER; + /// AV_CH_LAYOUT_7POINT1 = (AV_CH_LAYOUT_5POINT1|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT) + public static readonly ulong AV_CH_LAYOUT_7POINT1 = AV_CH_LAYOUT_5POINT1 | AV_CH_BACK_LEFT | AV_CH_BACK_RIGHT; + /// AV_CH_LAYOUT_7POINT1_WIDE = (AV_CH_LAYOUT_5POINT1|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER) + public static readonly ulong AV_CH_LAYOUT_7POINT1_WIDE = AV_CH_LAYOUT_5POINT1 | AV_CH_FRONT_LEFT_OF_CENTER | AV_CH_FRONT_RIGHT_OF_CENTER; + /// AV_CH_LAYOUT_7POINT1_WIDE_BACK = (AV_CH_LAYOUT_5POINT1_BACK|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER) + public static readonly ulong AV_CH_LAYOUT_7POINT1_WIDE_BACK = AV_CH_LAYOUT_5POINT1_BACK | AV_CH_FRONT_LEFT_OF_CENTER | AV_CH_FRONT_RIGHT_OF_CENTER; + /// AV_CH_LAYOUT_HEXADECAGONAL = (AV_CH_LAYOUT_OCTAGONAL|AV_CH_WIDE_LEFT|AV_CH_WIDE_RIGHT|AV_CH_TOP_BACK_LEFT|AV_CH_TOP_BACK_RIGHT|AV_CH_TOP_BACK_CENTER|AV_CH_TOP_FRONT_CENTER|AV_CH_TOP_FRONT_LEFT|AV_CH_TOP_FRONT_RIGHT) + public static readonly ulong AV_CH_LAYOUT_HEXADECAGONAL = AV_CH_LAYOUT_OCTAGONAL | AV_CH_WIDE_LEFT | AV_CH_WIDE_RIGHT | AV_CH_TOP_BACK_LEFT | AV_CH_TOP_BACK_RIGHT | AV_CH_TOP_BACK_CENTER | AV_CH_TOP_FRONT_CENTER | AV_CH_TOP_FRONT_LEFT | AV_CH_TOP_FRONT_RIGHT; + /// AV_CH_LAYOUT_HEXAGONAL = (AV_CH_LAYOUT_5POINT0_BACK|AV_CH_BACK_CENTER) + public static readonly ulong AV_CH_LAYOUT_HEXAGONAL = AV_CH_LAYOUT_5POINT0_BACK | AV_CH_BACK_CENTER; + /// AV_CH_LAYOUT_MONO = (AV_CH_FRONT_CENTER) + public static readonly ulong AV_CH_LAYOUT_MONO = AV_CH_FRONT_CENTER; + /// AV_CH_LAYOUT_NATIVE = 0x8000000000000000ULL + public const ulong AV_CH_LAYOUT_NATIVE = 0x8000000000000000UL; + /// AV_CH_LAYOUT_OCTAGONAL = (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_LEFT|AV_CH_BACK_CENTER|AV_CH_BACK_RIGHT) + public static readonly ulong AV_CH_LAYOUT_OCTAGONAL = AV_CH_LAYOUT_5POINT0 | AV_CH_BACK_LEFT | AV_CH_BACK_CENTER | AV_CH_BACK_RIGHT; + /// AV_CH_LAYOUT_QUAD = (AV_CH_LAYOUT_STEREO|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT) + public static readonly ulong AV_CH_LAYOUT_QUAD = AV_CH_LAYOUT_STEREO | AV_CH_BACK_LEFT | AV_CH_BACK_RIGHT; + /// AV_CH_LAYOUT_STEREO = (AV_CH_FRONT_LEFT|AV_CH_FRONT_RIGHT) + public static readonly ulong AV_CH_LAYOUT_STEREO = AV_CH_FRONT_LEFT | AV_CH_FRONT_RIGHT; + /// AV_CH_LAYOUT_STEREO_DOWNMIX = (AV_CH_STEREO_LEFT|AV_CH_STEREO_RIGHT) + public static readonly ulong AV_CH_LAYOUT_STEREO_DOWNMIX = AV_CH_STEREO_LEFT | AV_CH_STEREO_RIGHT; + /// AV_CH_LAYOUT_SURROUND = (AV_CH_LAYOUT_STEREO|AV_CH_FRONT_CENTER) + public static readonly ulong AV_CH_LAYOUT_SURROUND = AV_CH_LAYOUT_STEREO | AV_CH_FRONT_CENTER; + /// AV_CH_LOW_FREQUENCY = (1ULL << AV_CHAN_LOW_FREQUENCY ) + public static readonly ulong AV_CH_LOW_FREQUENCY = 0x1UL << 3; + /// AV_CH_LOW_FREQUENCY_2 = (1ULL << AV_CHAN_LOW_FREQUENCY_2 ) + public static readonly ulong AV_CH_LOW_FREQUENCY_2 = 0x1UL << 35; + /// AV_CH_SIDE_LEFT = (1ULL << AV_CHAN_SIDE_LEFT ) + public static readonly ulong AV_CH_SIDE_LEFT = 0x1UL << 9; + /// AV_CH_SIDE_RIGHT = (1ULL << AV_CHAN_SIDE_RIGHT ) + public static readonly ulong AV_CH_SIDE_RIGHT = 0x1UL << 10; + /// AV_CH_STEREO_LEFT = (1ULL << AV_CHAN_STEREO_LEFT ) + public static readonly ulong AV_CH_STEREO_LEFT = 0x1UL << 29; + /// AV_CH_STEREO_RIGHT = (1ULL << AV_CHAN_STEREO_RIGHT ) + public static readonly ulong AV_CH_STEREO_RIGHT = 0x1UL << 30; + /// AV_CH_SURROUND_DIRECT_LEFT = (1ULL << AV_CHAN_SURROUND_DIRECT_LEFT ) + public static readonly ulong AV_CH_SURROUND_DIRECT_LEFT = 0x1UL << 33; + /// AV_CH_SURROUND_DIRECT_RIGHT = (1ULL << AV_CHAN_SURROUND_DIRECT_RIGHT) + public static readonly ulong AV_CH_SURROUND_DIRECT_RIGHT = 0x1UL << 34; + /// AV_CH_TOP_BACK_CENTER = (1ULL << AV_CHAN_TOP_BACK_CENTER ) + public static readonly ulong AV_CH_TOP_BACK_CENTER = 0x1UL << 16; + /// AV_CH_TOP_BACK_LEFT = (1ULL << AV_CHAN_TOP_BACK_LEFT ) + public static readonly ulong AV_CH_TOP_BACK_LEFT = 0x1UL << 15; + /// AV_CH_TOP_BACK_RIGHT = (1ULL << AV_CHAN_TOP_BACK_RIGHT ) + public static readonly ulong AV_CH_TOP_BACK_RIGHT = 0x1UL << 17; + /// AV_CH_TOP_CENTER = (1ULL << AV_CHAN_TOP_CENTER ) + public static readonly ulong AV_CH_TOP_CENTER = 0x1UL << 11; + /// AV_CH_TOP_FRONT_CENTER = (1ULL << AV_CHAN_TOP_FRONT_CENTER ) + public static readonly ulong AV_CH_TOP_FRONT_CENTER = 0x1UL << 13; + /// AV_CH_TOP_FRONT_LEFT = (1ULL << AV_CHAN_TOP_FRONT_LEFT ) + public static readonly ulong AV_CH_TOP_FRONT_LEFT = 0x1UL << 12; + /// AV_CH_TOP_FRONT_RIGHT = (1ULL << AV_CHAN_TOP_FRONT_RIGHT ) + public static readonly ulong AV_CH_TOP_FRONT_RIGHT = 0x1UL << 14; + /// AV_CH_TOP_SIDE_LEFT = (1ULL << AV_CHAN_TOP_SIDE_LEFT ) + public static readonly ulong AV_CH_TOP_SIDE_LEFT = 0x1UL << 36; + /// AV_CH_TOP_SIDE_RIGHT = (1ULL << AV_CHAN_TOP_SIDE_RIGHT ) + public static readonly ulong AV_CH_TOP_SIDE_RIGHT = 0x1UL << 37; + /// AV_CH_WIDE_LEFT = (1ULL << AV_CHAN_WIDE_LEFT ) + public static readonly ulong AV_CH_WIDE_LEFT = 0x1UL << 31; + /// AV_CH_WIDE_RIGHT = (1ULL << AV_CHAN_WIDE_RIGHT ) + public static readonly ulong AV_CH_WIDE_RIGHT = 0x1UL << 32; + // public static AV_CHANNEL_LAYOUT_2_1 = AV_CHANNEL_LAYOUT_MASK(0x3, AV_CH_LAYOUT_2_1); + // public static AV_CHANNEL_LAYOUT_2_2 = AV_CHANNEL_LAYOUT_MASK(0x4, AV_CH_LAYOUT_2_2); + // public static AV_CHANNEL_LAYOUT_22POINT2 = AV_CHANNEL_LAYOUT_MASK(0x18, AV_CH_LAYOUT_22POINT2); + // public static AV_CHANNEL_LAYOUT_2POINT1 = AV_CHANNEL_LAYOUT_MASK(0x3, AV_CH_LAYOUT_2POINT1); + // public static AV_CHANNEL_LAYOUT_3POINT1 = AV_CHANNEL_LAYOUT_MASK(0x4, AV_CH_LAYOUT_3POINT1); + // public static AV_CHANNEL_LAYOUT_4POINT0 = AV_CHANNEL_LAYOUT_MASK(0x4, AV_CH_LAYOUT_4POINT0); + // public static AV_CHANNEL_LAYOUT_4POINT1 = AV_CHANNEL_LAYOUT_MASK(0x5, AV_CH_LAYOUT_4POINT1); + // public static AV_CHANNEL_LAYOUT_5POINT0 = AV_CHANNEL_LAYOUT_MASK(0x5, AV_CH_LAYOUT_5POINT0); + // public static AV_CHANNEL_LAYOUT_5POINT0_BACK = AV_CHANNEL_LAYOUT_MASK(0x5, AV_CH_LAYOUT_5POINT0_BACK); + // public static AV_CHANNEL_LAYOUT_5POINT1 = AV_CHANNEL_LAYOUT_MASK(0x6, AV_CH_LAYOUT_5POINT1); + // public static AV_CHANNEL_LAYOUT_5POINT1_BACK = AV_CHANNEL_LAYOUT_MASK(0x6, AV_CH_LAYOUT_5POINT1_BACK); + // public static AV_CHANNEL_LAYOUT_6POINT0 = AV_CHANNEL_LAYOUT_MASK(0x6, AV_CH_LAYOUT_6POINT0); + // public static AV_CHANNEL_LAYOUT_6POINT0_FRONT = AV_CHANNEL_LAYOUT_MASK(0x6, AV_CH_LAYOUT_6POINT0_FRONT); + // public static AV_CHANNEL_LAYOUT_6POINT1 = AV_CHANNEL_LAYOUT_MASK(0x7, AV_CH_LAYOUT_6POINT1); + // public static AV_CHANNEL_LAYOUT_6POINT1_BACK = AV_CHANNEL_LAYOUT_MASK(0x7, AV_CH_LAYOUT_6POINT1_BACK); + // public static AV_CHANNEL_LAYOUT_6POINT1_FRONT = AV_CHANNEL_LAYOUT_MASK(0x7, AV_CH_LAYOUT_6POINT1_FRONT); + // public static AV_CHANNEL_LAYOUT_7POINT0 = AV_CHANNEL_LAYOUT_MASK(0x7, AV_CH_LAYOUT_7POINT0); + // public static AV_CHANNEL_LAYOUT_7POINT0_FRONT = AV_CHANNEL_LAYOUT_MASK(0x7, AV_CH_LAYOUT_7POINT0_FRONT); + // public static AV_CHANNEL_LAYOUT_7POINT1 = AV_CHANNEL_LAYOUT_MASK(0x8, AV_CH_LAYOUT_7POINT1); + // public static AV_CHANNEL_LAYOUT_7POINT1_WIDE = AV_CHANNEL_LAYOUT_MASK(0x8, AV_CH_LAYOUT_7POINT1_WIDE); + // public static AV_CHANNEL_LAYOUT_7POINT1_WIDE_BACK = AV_CHANNEL_LAYOUT_MASK(0x8, AV_CH_LAYOUT_7POINT1_WIDE_BACK); + // public static AV_CHANNEL_LAYOUT_AMBISONIC_FIRST_ORDER = { .order = AV_CHANNEL_ORDER_AMBISONIC, .nb_channels = 4, .u = { .mask = 0 }}; + // public static AV_CHANNEL_LAYOUT_HEXADECAGONAL = AV_CHANNEL_LAYOUT_MASK(0x10, AV_CH_LAYOUT_HEXADECAGONAL); + // public static AV_CHANNEL_LAYOUT_HEXAGONAL = AV_CHANNEL_LAYOUT_MASK(0x6, AV_CH_LAYOUT_HEXAGONAL); + // public static AV_CHANNEL_LAYOUT_MASK = nb; + // public static AV_CHANNEL_LAYOUT_MONO = AV_CHANNEL_LAYOUT_MASK(0x1, AV_CH_LAYOUT_MONO); + // public static AV_CHANNEL_LAYOUT_OCTAGONAL = AV_CHANNEL_LAYOUT_MASK(0x8, AV_CH_LAYOUT_OCTAGONAL); + // public static AV_CHANNEL_LAYOUT_QUAD = AV_CHANNEL_LAYOUT_MASK(0x4, AV_CH_LAYOUT_QUAD); + // public static AV_CHANNEL_LAYOUT_STEREO = AV_CHANNEL_LAYOUT_MASK(0x2, AV_CH_LAYOUT_STEREO); + // public static AV_CHANNEL_LAYOUT_STEREO_DOWNMIX = AV_CHANNEL_LAYOUT_MASK(0x2, AV_CH_LAYOUT_STEREO_DOWNMIX); + // public static AV_CHANNEL_LAYOUT_SURROUND = AV_CHANNEL_LAYOUT_MASK(0x3, AV_CH_LAYOUT_SURROUND); + // public static av_clip = av_clip_c; + // public static av_clip_int16 = av_clip_int16_c; + // public static av_clip_int8 = av_clip_int8_c; + // public static av_clip_intp2 = av_clip_intp2_c; + // public static av_clip_uint16 = av_clip_uint16_c; + // public static av_clip_uint8 = av_clip_uint8_c; + // public static av_clip_uintp2 = av_clip_uintp2_c; + // public static av_clip64 = av_clip64_c; + // public static av_clipd = av_clipd_c; + // public static av_clipf = av_clipf_c; + // public static av_clipl_int32 = av_clipl_int32_c; + /// AV_CODEC_CAP_AUTO_THREADS = AV_CODEC_CAP_OTHER_THREADS + public const int AV_CODEC_CAP_AUTO_THREADS = AV_CODEC_CAP_OTHER_THREADS; + /// AV_CODEC_CAP_AVOID_PROBING = (1 << 17) + public const int AV_CODEC_CAP_AVOID_PROBING = 0x1 << 0x11; + /// AV_CODEC_CAP_CHANNEL_CONF = (1 << 10) + public const int AV_CODEC_CAP_CHANNEL_CONF = 0x1 << 0xa; + /// AV_CODEC_CAP_DELAY = (1 << 5) + public const int AV_CODEC_CAP_DELAY = 0x1 << 0x5; + /// AV_CODEC_CAP_DR1 = (1 << 1) + public const int AV_CODEC_CAP_DR1 = 0x1 << 0x1; + /// AV_CODEC_CAP_DRAW_HORIZ_BAND = (1 << 0) + public const int AV_CODEC_CAP_DRAW_HORIZ_BAND = 0x1 << 0x0; + /// AV_CODEC_CAP_ENCODER_FLUSH = (1 << 21) + public const int AV_CODEC_CAP_ENCODER_FLUSH = 0x1 << 0x15; + /// AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE = (1 << 20) + public const int AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE = 0x1 << 0x14; + /// AV_CODEC_CAP_EXPERIMENTAL = (1 << 9) + public const int AV_CODEC_CAP_EXPERIMENTAL = 0x1 << 0x9; + /// AV_CODEC_CAP_FRAME_THREADS = (1 << 12) + public const int AV_CODEC_CAP_FRAME_THREADS = 0x1 << 0xc; + /// AV_CODEC_CAP_HARDWARE = (1 << 18) + public const int AV_CODEC_CAP_HARDWARE = 0x1 << 0x12; + /// AV_CODEC_CAP_HYBRID = (1 << 19) + public const int AV_CODEC_CAP_HYBRID = 0x1 << 0x13; + /// AV_CODEC_CAP_INTRA_ONLY = 0x40000000 + public const int AV_CODEC_CAP_INTRA_ONLY = 0x40000000; + /// AV_CODEC_CAP_LOSSLESS = 0x80000000 + public const uint AV_CODEC_CAP_LOSSLESS = 0x80000000U; + /// AV_CODEC_CAP_OTHER_THREADS = (1 << 15) + public const int AV_CODEC_CAP_OTHER_THREADS = 0x1 << 0xf; + /// AV_CODEC_CAP_PARAM_CHANGE = (1 << 14) + public const int AV_CODEC_CAP_PARAM_CHANGE = 0x1 << 0xe; + /// AV_CODEC_CAP_SLICE_THREADS = (1 << 13) + public const int AV_CODEC_CAP_SLICE_THREADS = 0x1 << 0xd; + /// AV_CODEC_CAP_SMALL_LAST_FRAME = (1 << 6) + public const int AV_CODEC_CAP_SMALL_LAST_FRAME = 0x1 << 0x6; + /// AV_CODEC_CAP_SUBFRAMES = (1 << 8) + public const int AV_CODEC_CAP_SUBFRAMES = 0x1 << 0x8; + /// AV_CODEC_CAP_TRUNCATED = (1 << 3) + public const int AV_CODEC_CAP_TRUNCATED = 0x1 << 0x3; + /// AV_CODEC_CAP_VARIABLE_FRAME_SIZE = (1 << 16) + public const int AV_CODEC_CAP_VARIABLE_FRAME_SIZE = 0x1 << 0x10; + /// AV_CODEC_EXPORT_DATA_FILM_GRAIN = 0x1 << 0x3 + public const int AV_CODEC_EXPORT_DATA_FILM_GRAIN = 0x1 << 0x3; + /// AV_CODEC_EXPORT_DATA_MVS = 0x1 << 0x0 + public const int AV_CODEC_EXPORT_DATA_MVS = 0x1 << 0x0; + /// AV_CODEC_EXPORT_DATA_PRFT = 0x1 << 0x1 + public const int AV_CODEC_EXPORT_DATA_PRFT = 0x1 << 0x1; + /// AV_CODEC_EXPORT_DATA_VIDEO_ENC_PARAMS = 0x1 << 0x2 + public const int AV_CODEC_EXPORT_DATA_VIDEO_ENC_PARAMS = 0x1 << 0x2; + /// AV_CODEC_FLAG_4MV = 0x1 << 0x2 + public const int AV_CODEC_FLAG_4MV = 0x1 << 0x2; + /// AV_CODEC_FLAG_AC_PRED = 0x1 << 0x18 + public const int AV_CODEC_FLAG_AC_PRED = 0x1 << 0x18; + /// AV_CODEC_FLAG_BITEXACT = 0x1 << 0x17 + public const int AV_CODEC_FLAG_BITEXACT = 0x1 << 0x17; + /// AV_CODEC_FLAG_CLOSED_GOP = 0x1U << 0x1f + public const uint AV_CODEC_FLAG_CLOSED_GOP = 0x1U << 0x1f; + /// AV_CODEC_FLAG_DROPCHANGED = 0x1 << 0x5 + public const int AV_CODEC_FLAG_DROPCHANGED = 0x1 << 0x5; + /// AV_CODEC_FLAG_GLOBAL_HEADER = 0x1 << 0x16 + public const int AV_CODEC_FLAG_GLOBAL_HEADER = 0x1 << 0x16; + /// AV_CODEC_FLAG_GRAY = 0x1 << 0xd + public const int AV_CODEC_FLAG_GRAY = 0x1 << 0xd; + /// AV_CODEC_FLAG_INTERLACED_DCT = 0x1 << 0x12 + public const int AV_CODEC_FLAG_INTERLACED_DCT = 0x1 << 0x12; + /// AV_CODEC_FLAG_INTERLACED_ME = 0x1 << 0x1d + public const int AV_CODEC_FLAG_INTERLACED_ME = 0x1 << 0x1d; + /// AV_CODEC_FLAG_LOOP_FILTER = 0x1 << 0xb + public const int AV_CODEC_FLAG_LOOP_FILTER = 0x1 << 0xb; + /// AV_CODEC_FLAG_LOW_DELAY = 0x1 << 0x13 + public const int AV_CODEC_FLAG_LOW_DELAY = 0x1 << 0x13; + /// AV_CODEC_FLAG_OUTPUT_CORRUPT = 0x1 << 0x3 + public const int AV_CODEC_FLAG_OUTPUT_CORRUPT = 0x1 << 0x3; + /// AV_CODEC_FLAG_PASS1 = 0x1 << 0x9 + public const int AV_CODEC_FLAG_PASS1 = 0x1 << 0x9; + /// AV_CODEC_FLAG_PASS2 = 0x1 << 0xa + public const int AV_CODEC_FLAG_PASS2 = 0x1 << 0xa; + /// AV_CODEC_FLAG_PSNR = 0x1 << 0xf + public const int AV_CODEC_FLAG_PSNR = 0x1 << 0xf; + /// AV_CODEC_FLAG_QPEL = 0x1 << 0x4 + public const int AV_CODEC_FLAG_QPEL = 0x1 << 0x4; + /// AV_CODEC_FLAG_QSCALE = 0x1 << 0x1 + public const int AV_CODEC_FLAG_QSCALE = 0x1 << 0x1; + /// AV_CODEC_FLAG_TRUNCATED = 0x1 << 0x10 + public const int AV_CODEC_FLAG_TRUNCATED = 0x1 << 0x10; + /// AV_CODEC_FLAG_UNALIGNED = 0x1 << 0x0 + public const int AV_CODEC_FLAG_UNALIGNED = 0x1 << 0x0; + /// AV_CODEC_FLAG2_CHUNKS = 0x1 << 0xf + public const int AV_CODEC_FLAG2_CHUNKS = 0x1 << 0xf; + /// AV_CODEC_FLAG2_DROP_FRAME_TIMECODE = 0x1 << 0xd + public const int AV_CODEC_FLAG2_DROP_FRAME_TIMECODE = 0x1 << 0xd; + /// AV_CODEC_FLAG2_EXPORT_MVS = 0x1 << 0x1c + public const int AV_CODEC_FLAG2_EXPORT_MVS = 0x1 << 0x1c; + /// AV_CODEC_FLAG2_FAST = 0x1 << 0x0 + public const int AV_CODEC_FLAG2_FAST = 0x1 << 0x0; + /// AV_CODEC_FLAG2_IGNORE_CROP = 0x1 << 0x10 + public const int AV_CODEC_FLAG2_IGNORE_CROP = 0x1 << 0x10; + /// AV_CODEC_FLAG2_LOCAL_HEADER = 0x1 << 0x3 + public const int AV_CODEC_FLAG2_LOCAL_HEADER = 0x1 << 0x3; + /// AV_CODEC_FLAG2_NO_OUTPUT = 0x1 << 0x2 + public const int AV_CODEC_FLAG2_NO_OUTPUT = 0x1 << 0x2; + /// AV_CODEC_FLAG2_RO_FLUSH_NOOP = 0x1 << 0x1e + public const int AV_CODEC_FLAG2_RO_FLUSH_NOOP = 0x1 << 0x1e; + /// AV_CODEC_FLAG2_SHOW_ALL = 0x1 << 0x16 + public const int AV_CODEC_FLAG2_SHOW_ALL = 0x1 << 0x16; + /// AV_CODEC_FLAG2_SKIP_MANUAL = 0x1 << 0x1d + public const int AV_CODEC_FLAG2_SKIP_MANUAL = 0x1 << 0x1d; + /// AV_CODEC_ID_H265 = AV_CODEC_ID_HEVC + public static readonly int AV_CODEC_ID_H265 = 173; + /// AV_CODEC_ID_H266 = AV_CODEC_ID_VVC + public static readonly int AV_CODEC_ID_H266 = 196; + /// AV_CODEC_ID_IFF_BYTERUN1 = AV_CODEC_ID_IFF_ILBM + public static readonly int AV_CODEC_ID_IFF_BYTERUN1 = 136; + /// AV_CODEC_PROP_BITMAP_SUB = 0x1 << 0x10 + public const int AV_CODEC_PROP_BITMAP_SUB = 0x1 << 0x10; + /// AV_CODEC_PROP_INTRA_ONLY = 0x1 << 0x0 + public const int AV_CODEC_PROP_INTRA_ONLY = 0x1 << 0x0; + /// AV_CODEC_PROP_LOSSLESS = 0x1 << 0x2 + public const int AV_CODEC_PROP_LOSSLESS = 0x1 << 0x2; + /// AV_CODEC_PROP_LOSSY = 0x1 << 0x1 + public const int AV_CODEC_PROP_LOSSY = 0x1 << 0x1; + /// AV_CODEC_PROP_REORDER = 0x1 << 0x3 + public const int AV_CODEC_PROP_REORDER = 0x1 << 0x3; + /// AV_CODEC_PROP_TEXT_SUB = 0x1 << 0x11 + public const int AV_CODEC_PROP_TEXT_SUB = 0x1 << 0x11; + // public static av_cold = __attribute__((cold)); + // public static av_const = __attribute__((const)); + /// AV_CPU_FLAG_3DNOW = 0x4 + public const int AV_CPU_FLAG_3DNOW = 0x4; + /// AV_CPU_FLAG_3DNOWEXT = 0x20 + public const int AV_CPU_FLAG_3DNOWEXT = 0x20; + /// AV_CPU_FLAG_AESNI = 0x80000 + public const int AV_CPU_FLAG_AESNI = 0x80000; + /// AV_CPU_FLAG_ALTIVEC = 0x1 + public const int AV_CPU_FLAG_ALTIVEC = 0x1; + /// AV_CPU_FLAG_ARMV5TE = 0x1 << 0x0 + public const int AV_CPU_FLAG_ARMV5TE = 0x1 << 0x0; + /// AV_CPU_FLAG_ARMV6 = 0x1 << 0x1 + public const int AV_CPU_FLAG_ARMV6 = 0x1 << 0x1; + /// AV_CPU_FLAG_ARMV6T2 = 0x1 << 0x2 + public const int AV_CPU_FLAG_ARMV6T2 = 0x1 << 0x2; + /// AV_CPU_FLAG_ARMV8 = 0x1 << 0x6 + public const int AV_CPU_FLAG_ARMV8 = 0x1 << 0x6; + /// AV_CPU_FLAG_ATOM = 0x10000000 + public const int AV_CPU_FLAG_ATOM = 0x10000000; + /// AV_CPU_FLAG_AVX = 0x4000 + public const int AV_CPU_FLAG_AVX = 0x4000; + /// AV_CPU_FLAG_AVX2 = 0x8000 + public const int AV_CPU_FLAG_AVX2 = 0x8000; + /// AV_CPU_FLAG_AVX512 = 0x100000 + public const int AV_CPU_FLAG_AVX512 = 0x100000; + /// AV_CPU_FLAG_AVX512ICL = 0x200000 + public const int AV_CPU_FLAG_AVX512ICL = 0x200000; + /// AV_CPU_FLAG_AVXSLOW = 0x8000000 + public const int AV_CPU_FLAG_AVXSLOW = 0x8000000; + /// AV_CPU_FLAG_BMI1 = 0x20000 + public const int AV_CPU_FLAG_BMI1 = 0x20000; + /// AV_CPU_FLAG_BMI2 = 0x40000 + public const int AV_CPU_FLAG_BMI2 = 0x40000; + /// AV_CPU_FLAG_CMOV = 0x1000 + public const int AV_CPU_FLAG_CMOV = 0x1000; + /// AV_CPU_FLAG_FMA3 = 0x10000 + public const int AV_CPU_FLAG_FMA3 = 0x10000; + /// AV_CPU_FLAG_FMA4 = 0x800 + public const int AV_CPU_FLAG_FMA4 = 0x800; + /// AV_CPU_FLAG_FORCE = 0x80000000U + public const uint AV_CPU_FLAG_FORCE = 0x80000000U; + /// AV_CPU_FLAG_LASX = 0x1 << 0x1 + public const int AV_CPU_FLAG_LASX = 0x1 << 0x1; + /// AV_CPU_FLAG_LSX = 0x1 << 0x0 + public const int AV_CPU_FLAG_LSX = 0x1 << 0x0; + /// AV_CPU_FLAG_MMI = 0x1 << 0x0 + public const int AV_CPU_FLAG_MMI = 0x1 << 0x0; + /// AV_CPU_FLAG_MMX = 0x1 + public const int AV_CPU_FLAG_MMX = 0x1; + /// AV_CPU_FLAG_MMX2 = 0x2 + public const int AV_CPU_FLAG_MMX2 = 0x2; + /// AV_CPU_FLAG_MMXEXT = 0x2 + public const int AV_CPU_FLAG_MMXEXT = 0x2; + /// AV_CPU_FLAG_MSA = 0x1 << 0x1 + public const int AV_CPU_FLAG_MSA = 0x1 << 0x1; + /// AV_CPU_FLAG_NEON = 0x1 << 0x5 + public const int AV_CPU_FLAG_NEON = 0x1 << 0x5; + /// AV_CPU_FLAG_POWER8 = 0x4 + public const int AV_CPU_FLAG_POWER8 = 0x4; + /// AV_CPU_FLAG_SETEND = 0x1 << 0x10 + public const int AV_CPU_FLAG_SETEND = 0x1 << 0x10; + /// AV_CPU_FLAG_SLOW_GATHER = 0x2000000 + public const int AV_CPU_FLAG_SLOW_GATHER = 0x2000000; + /// AV_CPU_FLAG_SSE = 0x8 + public const int AV_CPU_FLAG_SSE = 0x8; + /// AV_CPU_FLAG_SSE2 = 0x10 + public const int AV_CPU_FLAG_SSE2 = 0x10; + /// AV_CPU_FLAG_SSE2SLOW = 0x40000000 + public const int AV_CPU_FLAG_SSE2SLOW = 0x40000000; + /// AV_CPU_FLAG_SSE3 = 0x40 + public const int AV_CPU_FLAG_SSE3 = 0x40; + /// AV_CPU_FLAG_SSE3SLOW = 0x20000000 + public const int AV_CPU_FLAG_SSE3SLOW = 0x20000000; + /// AV_CPU_FLAG_SSE4 = 0x100 + public const int AV_CPU_FLAG_SSE4 = 0x100; + /// AV_CPU_FLAG_SSE42 = 0x200 + public const int AV_CPU_FLAG_SSE42 = 0x200; + /// AV_CPU_FLAG_SSSE3 = 0x80 + public const int AV_CPU_FLAG_SSSE3 = 0x80; + /// AV_CPU_FLAG_SSSE3SLOW = 0x4000000 + public const int AV_CPU_FLAG_SSSE3SLOW = 0x4000000; + /// AV_CPU_FLAG_VFP = 0x1 << 0x3 + public const int AV_CPU_FLAG_VFP = 0x1 << 0x3; + /// AV_CPU_FLAG_VFP_VM = 0x1 << 0x7 + public const int AV_CPU_FLAG_VFP_VM = 0x1 << 0x7; + /// AV_CPU_FLAG_VFPV3 = 0x1 << 0x4 + public const int AV_CPU_FLAG_VFPV3 = 0x1 << 0x4; + /// AV_CPU_FLAG_VSX = 0x2 + public const int AV_CPU_FLAG_VSX = 0x2; + /// AV_CPU_FLAG_XOP = 0x400 + public const int AV_CPU_FLAG_XOP = 0x400; + /// AV_DICT_APPEND = 32 + public const int AV_DICT_APPEND = 0x20; + /// AV_DICT_DONT_OVERWRITE = 16 + public const int AV_DICT_DONT_OVERWRITE = 0x10; + /// AV_DICT_DONT_STRDUP_KEY = 4 + public const int AV_DICT_DONT_STRDUP_KEY = 0x4; + /// AV_DICT_DONT_STRDUP_VAL = 8 + public const int AV_DICT_DONT_STRDUP_VAL = 0x8; + /// AV_DICT_IGNORE_SUFFIX = 2 + public const int AV_DICT_IGNORE_SUFFIX = 0x2; + /// AV_DICT_MATCH_CASE = 1 + public const int AV_DICT_MATCH_CASE = 0x1; + /// AV_DICT_MULTIKEY = 64 + public const int AV_DICT_MULTIKEY = 0x40; + /// AV_DISPOSITION_ATTACHED_PIC = (1 << 10) + public const int AV_DISPOSITION_ATTACHED_PIC = 0x1 << 0xa; + /// AV_DISPOSITION_CAPTIONS = (1 << 16) + public const int AV_DISPOSITION_CAPTIONS = 0x1 << 0x10; + /// AV_DISPOSITION_CLEAN_EFFECTS = (1 << 9) + public const int AV_DISPOSITION_CLEAN_EFFECTS = 0x1 << 0x9; + /// AV_DISPOSITION_COMMENT = (1 << 3) + public const int AV_DISPOSITION_COMMENT = 0x1 << 0x3; + /// AV_DISPOSITION_DEFAULT = (1 << 0) + public const int AV_DISPOSITION_DEFAULT = 0x1 << 0x0; + /// AV_DISPOSITION_DEPENDENT = (1 << 19) + public const int AV_DISPOSITION_DEPENDENT = 0x1 << 0x13; + /// AV_DISPOSITION_DESCRIPTIONS = (1 << 17) + public const int AV_DISPOSITION_DESCRIPTIONS = 0x1 << 0x11; + /// AV_DISPOSITION_DUB = (1 << 1) + public const int AV_DISPOSITION_DUB = 0x1 << 0x1; + /// AV_DISPOSITION_FORCED = (1 << 6) + public const int AV_DISPOSITION_FORCED = 0x1 << 0x6; + /// AV_DISPOSITION_HEARING_IMPAIRED = (1 << 7) + public const int AV_DISPOSITION_HEARING_IMPAIRED = 0x1 << 0x7; + /// AV_DISPOSITION_KARAOKE = (1 << 5) + public const int AV_DISPOSITION_KARAOKE = 0x1 << 0x5; + /// AV_DISPOSITION_LYRICS = (1 << 4) + public const int AV_DISPOSITION_LYRICS = 0x1 << 0x4; + /// AV_DISPOSITION_METADATA = (1 << 18) + public const int AV_DISPOSITION_METADATA = 0x1 << 0x12; + /// AV_DISPOSITION_NON_DIEGETIC = (1 << 12) + public const int AV_DISPOSITION_NON_DIEGETIC = 0x1 << 0xc; + /// AV_DISPOSITION_ORIGINAL = (1 << 2) + public const int AV_DISPOSITION_ORIGINAL = 0x1 << 0x2; + /// AV_DISPOSITION_STILL_IMAGE = (1 << 20) + public const int AV_DISPOSITION_STILL_IMAGE = 0x1 << 0x14; + /// AV_DISPOSITION_TIMED_THUMBNAILS = (1 << 11) + public const int AV_DISPOSITION_TIMED_THUMBNAILS = 0x1 << 0xb; + /// AV_DISPOSITION_VISUAL_IMPAIRED = (1 << 8) + public const int AV_DISPOSITION_VISUAL_IMPAIRED = 0x1 << 0x8; + /// AV_EF_AGGRESSIVE = 0x1 << 0x12 + public const int AV_EF_AGGRESSIVE = 0x1 << 0x12; + /// AV_EF_BITSTREAM = 0x1 << 0x1 + public const int AV_EF_BITSTREAM = 0x1 << 0x1; + /// AV_EF_BUFFER = 0x1 << 0x2 + public const int AV_EF_BUFFER = 0x1 << 0x2; + /// AV_EF_CAREFUL = 0x1 << 0x10 + public const int AV_EF_CAREFUL = 0x1 << 0x10; + /// AV_EF_COMPLIANT = 0x1 << 0x11 + public const int AV_EF_COMPLIANT = 0x1 << 0x11; + /// AV_EF_CRCCHECK = 0x1 << 0x0 + public const int AV_EF_CRCCHECK = 0x1 << 0x0; + /// AV_EF_EXPLODE = 0x1 << 0x3 + public const int AV_EF_EXPLODE = 0x1 << 0x3; + /// AV_EF_IGNORE_ERR = 0x1 << 0xf + public const int AV_EF_IGNORE_ERR = 0x1 << 0xf; + // public static av_err2str = (errnum) av_make_error_string((char[AV_ERROR_MAX_STRING_SIZE]){0}, AV_ERROR_MAX_STRING_SIZE, errnum); + /// AV_ERROR_MAX_STRING_SIZE = 64 + public const int AV_ERROR_MAX_STRING_SIZE = 0x40; + // public static av_extern_inline = inline; + /// AV_FOURCC_MAX_STRING_SIZE = 32 + public const int AV_FOURCC_MAX_STRING_SIZE = 0x20; + // public static av_fourcc2str = (fourcc) av_fourcc_make_string((char[AV_FOURCC_MAX_STRING_SIZE]){0}, fourcc); + /// AV_FRAME_FILENAME_FLAGS_MULTIPLE = 1 + public const int AV_FRAME_FILENAME_FLAGS_MULTIPLE = 0x1; + /// AV_FRAME_FLAG_CORRUPT = (1 << 0) + public const int AV_FRAME_FLAG_CORRUPT = 0x1 << 0x0; + /// AV_FRAME_FLAG_DISCARD = (1 << 2) + public const int AV_FRAME_FLAG_DISCARD = 0x1 << 0x2; + // public static AV_GCC_VERSION_AT_LEAST = x; + // public static AV_GCC_VERSION_AT_MOST = x; + /// AV_GET_BUFFER_FLAG_REF = 0x1 << 0x0 + public const int AV_GET_BUFFER_FLAG_REF = 0x1 << 0x0; + /// AV_GET_ENCODE_BUFFER_FLAG_REF = 0x1 << 0x0 + public const int AV_GET_ENCODE_BUFFER_FLAG_REF = 0x1 << 0x0; + // public static AV_GLUE = (a, b) a ## b; + // public static AV_HAS_BUILTIN = (x)(__has_builtin(x)); + /// AV_HAVE_BIGENDIAN = 0 + public const int AV_HAVE_BIGENDIAN = 0x0; + /// AV_HAVE_FAST_UNALIGNED = 1 + public const int AV_HAVE_FAST_UNALIGNED = 0x1; + /// AV_HWACCEL_CODEC_CAP_EXPERIMENTAL = 0x200 + public const int AV_HWACCEL_CODEC_CAP_EXPERIMENTAL = 0x200; + /// AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH = 0x1 << 0x1 + public const int AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH = 0x1 << 0x1; + /// AV_HWACCEL_FLAG_ALLOW_PROFILE_MISMATCH = 0x1 << 0x2 + public const int AV_HWACCEL_FLAG_ALLOW_PROFILE_MISMATCH = 0x1 << 0x2; + /// AV_HWACCEL_FLAG_IGNORE_LEVEL = 0x1 << 0x0 + public const int AV_HWACCEL_FLAG_IGNORE_LEVEL = 0x1 << 0x0; + /// AV_INPUT_BUFFER_MIN_SIZE = 0x4000 + public const int AV_INPUT_BUFFER_MIN_SIZE = 0x4000; + /// AV_INPUT_BUFFER_PADDING_SIZE = 64 + public const int AV_INPUT_BUFFER_PADDING_SIZE = 0x40; + // public static av_int_list_length = list; + // public static AV_IS_INPUT_DEVICE = (category)((category)(==41) || (category)(==43) || (category)(==45)); + // public static AV_IS_OUTPUT_DEVICE = (category)((category)(==40) || (category)(==42) || (category)(==44)); + // public static AV_JOIN = a; + // public static AV_LOG_C = (x)((x)(<<0x8)); + /// AV_LOG_DEBUG = 48 + public const int AV_LOG_DEBUG = 0x30; + /// AV_LOG_ERROR = 16 + public const int AV_LOG_ERROR = 0x10; + /// AV_LOG_FATAL = 8 + public const int AV_LOG_FATAL = 0x8; + /// AV_LOG_INFO = 32 + public const int AV_LOG_INFO = 0x20; + /// AV_LOG_MAX_OFFSET = (AV_LOG_TRACE - AV_LOG_QUIET) + public const int AV_LOG_MAX_OFFSET = AV_LOG_TRACE - AV_LOG_QUIET; + /// AV_LOG_PANIC = 0 + public const int AV_LOG_PANIC = 0x0; + /// AV_LOG_PRINT_LEVEL = 2 + public const int AV_LOG_PRINT_LEVEL = 0x2; + /// AV_LOG_QUIET = -8 + public const int AV_LOG_QUIET = -0x8; + /// AV_LOG_SKIP_REPEATED = 1 + public const int AV_LOG_SKIP_REPEATED = 0x1; + /// AV_LOG_TRACE = 56 + public const int AV_LOG_TRACE = 0x38; + /// AV_LOG_VERBOSE = 40 + public const int AV_LOG_VERBOSE = 0x28; + /// AV_LOG_WARNING = 24 + public const int AV_LOG_WARNING = 0x18; + // public static av_mod_uintp2 = av_mod_uintp2_c; + // public static AV_NE = be; + // public static av_noinline = __declspec(noinline); + /// AV_NOPTS_VALUE = ((int64_t)UINT64_C(0x8000000000000000)) + public static readonly long AV_NOPTS_VALUE = (long)(UINT64_C(0x8000000000000000L)); + // public static av_noreturn = __attribute__((noreturn)); + // public static AV_NOWARN_DEPRECATED = (code)(_Pragma("GCC diagnostic push")); + /// AV_NUM_DATA_POINTERS = 8 + public const int AV_NUM_DATA_POINTERS = 0x8; + /// AV_OPT_ALLOW_NULL = (1 << 2) + public const int AV_OPT_ALLOW_NULL = 0x1 << 0x2; + /// AV_OPT_FLAG_AUDIO_PARAM = 8 + public const int AV_OPT_FLAG_AUDIO_PARAM = 0x8; + /// AV_OPT_FLAG_BSF_PARAM = (1<<8) + public const int AV_OPT_FLAG_BSF_PARAM = 0x1 << 0x8; + /// AV_OPT_FLAG_CHILD_CONSTS = (1<<18) + public const int AV_OPT_FLAG_CHILD_CONSTS = 0x1 << 0x12; + /// AV_OPT_FLAG_DECODING_PARAM = 2 + public const int AV_OPT_FLAG_DECODING_PARAM = 0x2; + /// AV_OPT_FLAG_DEPRECATED = (1<<17) + public const int AV_OPT_FLAG_DEPRECATED = 0x1 << 0x11; + /// AV_OPT_FLAG_ENCODING_PARAM = 1 + public const int AV_OPT_FLAG_ENCODING_PARAM = 0x1; + /// AV_OPT_FLAG_EXPORT = 64 + public const int AV_OPT_FLAG_EXPORT = 0x40; + /// AV_OPT_FLAG_FILTERING_PARAM = (1<<16) + public const int AV_OPT_FLAG_FILTERING_PARAM = 0x1 << 0x10; + /// AV_OPT_FLAG_READONLY = 128 + public const int AV_OPT_FLAG_READONLY = 0x80; + /// AV_OPT_FLAG_RUNTIME_PARAM = (1<<15) + public const int AV_OPT_FLAG_RUNTIME_PARAM = 0x1 << 0xf; + /// AV_OPT_FLAG_SUBTITLE_PARAM = 32 + public const int AV_OPT_FLAG_SUBTITLE_PARAM = 0x20; + /// AV_OPT_FLAG_VIDEO_PARAM = 16 + public const int AV_OPT_FLAG_VIDEO_PARAM = 0x10; + /// AV_OPT_MULTI_COMPONENT_RANGE = (1 << 12) + public const int AV_OPT_MULTI_COMPONENT_RANGE = 0x1 << 0xc; + /// AV_OPT_SEARCH_CHILDREN = (1 << 0) + public const int AV_OPT_SEARCH_CHILDREN = 0x1 << 0x0; + /// AV_OPT_SEARCH_FAKE_OBJ = (1 << 1) + public const int AV_OPT_SEARCH_FAKE_OBJ = 0x1 << 0x1; + /// AV_OPT_SERIALIZE_OPT_FLAGS_EXACT = 0x00000002 + public const int AV_OPT_SERIALIZE_OPT_FLAGS_EXACT = 0x2; + /// AV_OPT_SERIALIZE_SKIP_DEFAULTS = 0x00000001 + public const int AV_OPT_SERIALIZE_SKIP_DEFAULTS = 0x1; + // public static av_opt_set_int_list = (obj, name, val, term, flags) (av_int_list_length(val, term) > INT_MAX / sizeof(*(val)) ? AVERROR(EINVAL) : av_opt_set_bin(obj, name, (const uint8_t *)(val), av_int_list_length(val, term) * sizeof(*(val)), flags)); + // public static av_parity = av_parity_c; + /// AV_PARSER_PTS_NB = 0x4 + public const int AV_PARSER_PTS_NB = 0x4; + // public static AV_PIX_FMT_0BGR32 = AV_PIX_FMT_NE(0BGR, RGB0); + // public static AV_PIX_FMT_0RGB32 = AV_PIX_FMT_NE(0x0, RGB, BGR0); + // public static AV_PIX_FMT_AYUV64 = AV_PIX_FMT_NE(AYUV64BE, AYUV64LE); + // public static AV_PIX_FMT_BAYER_BGGR16 = AV_PIX_FMT_NE(BAYER_BGGR16BE, BAYER_BGGR16LE); + // public static AV_PIX_FMT_BAYER_GBRG16 = AV_PIX_FMT_NE(BAYER_GBRG16BE, BAYER_GBRG16LE); + // public static AV_PIX_FMT_BAYER_GRBG16 = AV_PIX_FMT_NE(BAYER_GRBG16BE, BAYER_GRBG16LE); + // public static AV_PIX_FMT_BAYER_RGGB16 = AV_PIX_FMT_NE(BAYER_RGGB16BE, BAYER_RGGB16LE); + // public static AV_PIX_FMT_BGR32 = AV_PIX_FMT_NE(ABGR, RGBA); + // public static AV_PIX_FMT_BGR32_1 = AV_PIX_FMT_NE(BGRA, ARGB); + // public static AV_PIX_FMT_BGR444 = AV_PIX_FMT_NE(BGR444BE, BGR444LE); + // public static AV_PIX_FMT_BGR48 = AV_PIX_FMT_NE(BGR48BE, BGR48LE); + // public static AV_PIX_FMT_BGR555 = AV_PIX_FMT_NE(BGR555BE, BGR555LE); + // public static AV_PIX_FMT_BGR565 = AV_PIX_FMT_NE(BGR565BE, BGR565LE); + // public static AV_PIX_FMT_BGRA64 = AV_PIX_FMT_NE(BGRA64BE, BGRA64LE); + /// AV_PIX_FMT_FLAG_ALPHA = 0x1 << 0x7 + public const int AV_PIX_FMT_FLAG_ALPHA = 0x1 << 0x7; + /// AV_PIX_FMT_FLAG_BAYER = 0x1 << 0x8 + public const int AV_PIX_FMT_FLAG_BAYER = 0x1 << 0x8; + /// AV_PIX_FMT_FLAG_BE = 0x1 << 0x0 + public const int AV_PIX_FMT_FLAG_BE = 0x1 << 0x0; + /// AV_PIX_FMT_FLAG_BITSTREAM = 0x1 << 0x2 + public const int AV_PIX_FMT_FLAG_BITSTREAM = 0x1 << 0x2; + /// AV_PIX_FMT_FLAG_FLOAT = 0x1 << 0x9 + public const int AV_PIX_FMT_FLAG_FLOAT = 0x1 << 0x9; + /// AV_PIX_FMT_FLAG_HWACCEL = 0x1 << 0x3 + public const int AV_PIX_FMT_FLAG_HWACCEL = 0x1 << 0x3; + /// AV_PIX_FMT_FLAG_PAL = 0x1 << 0x1 + public const int AV_PIX_FMT_FLAG_PAL = 0x1 << 0x1; + /// AV_PIX_FMT_FLAG_PLANAR = 0x1 << 0x4 + public const int AV_PIX_FMT_FLAG_PLANAR = 0x1 << 0x4; + /// AV_PIX_FMT_FLAG_RGB = 0x1 << 0x5 + public const int AV_PIX_FMT_FLAG_RGB = 0x1 << 0x5; + // public static AV_PIX_FMT_GBRAP10 = AV_PIX_FMT_NE(GBRAP10BE, GBRAP10LE); + // public static AV_PIX_FMT_GBRAP12 = AV_PIX_FMT_NE(GBRAP12BE, GBRAP12LE); + // public static AV_PIX_FMT_GBRAP16 = AV_PIX_FMT_NE(GBRAP16BE, GBRAP16LE); + // public static AV_PIX_FMT_GBRAPF32 = AV_PIX_FMT_NE(GBRAPF32BE, GBRAPF32LE); + // public static AV_PIX_FMT_GBRP10 = AV_PIX_FMT_NE(GBRP10BE, GBRP10LE); + // public static AV_PIX_FMT_GBRP12 = AV_PIX_FMT_NE(GBRP12BE, GBRP12LE); + // public static AV_PIX_FMT_GBRP14 = AV_PIX_FMT_NE(GBRP14BE, GBRP14LE); + // public static AV_PIX_FMT_GBRP16 = AV_PIX_FMT_NE(GBRP16BE, GBRP16LE); + // public static AV_PIX_FMT_GBRP9 = AV_PIX_FMT_NE(GBRP9BE, GBRP9LE); + // public static AV_PIX_FMT_GBRPF32 = AV_PIX_FMT_NE(GBRPF32BE, GBRPF32LE); + // public static AV_PIX_FMT_GRAY10 = AV_PIX_FMT_NE(GRAY10BE, GRAY10LE); + // public static AV_PIX_FMT_GRAY12 = AV_PIX_FMT_NE(GRAY12BE, GRAY12LE); + // public static AV_PIX_FMT_GRAY14 = AV_PIX_FMT_NE(GRAY14BE, GRAY14LE); + // public static AV_PIX_FMT_GRAY16 = AV_PIX_FMT_NE(GRAY16BE, GRAY16LE); + // public static AV_PIX_FMT_GRAY9 = AV_PIX_FMT_NE(GRAY9BE, GRAY9LE); + // public static AV_PIX_FMT_GRAYF32 = AV_PIX_FMT_NE(GRAYF32BE, GRAYF32LE); + // public static AV_PIX_FMT_NE = (be, le) AV_PIX_FMT_##le; + // public static AV_PIX_FMT_NV20 = AV_PIX_FMT_NE(NV20BE, NV20LE); + // public static AV_PIX_FMT_P010 = AV_PIX_FMT_NE(P010BE, P010LE); + // public static AV_PIX_FMT_P016 = AV_PIX_FMT_NE(P016BE, P016LE); + // public static AV_PIX_FMT_P210 = AV_PIX_FMT_NE(P210BE, P210LE); + // public static AV_PIX_FMT_P216 = AV_PIX_FMT_NE(P216BE, P216LE); + // public static AV_PIX_FMT_P410 = AV_PIX_FMT_NE(P410BE, P410LE); + // public static AV_PIX_FMT_P416 = AV_PIX_FMT_NE(P416BE, P416LE); + // public static AV_PIX_FMT_RGB32 = AV_PIX_FMT_NE(ARGB, BGRA); + // public static AV_PIX_FMT_RGB32_1 = AV_PIX_FMT_NE(RGBA, ABGR); + // public static AV_PIX_FMT_RGB444 = AV_PIX_FMT_NE(RGB444BE, RGB444LE); + // public static AV_PIX_FMT_RGB48 = AV_PIX_FMT_NE(RGB48BE, RGB48LE); + // public static AV_PIX_FMT_RGB555 = AV_PIX_FMT_NE(RGB555BE, RGB555LE); + // public static AV_PIX_FMT_RGB565 = AV_PIX_FMT_NE(RGB565BE, RGB565LE); + // public static AV_PIX_FMT_RGBA64 = AV_PIX_FMT_NE(RGBA64BE, RGBA64LE); + // public static AV_PIX_FMT_X2BGR10 = AV_PIX_FMT_NE(X2BGR10BE, X2BGR10LE); + // public static AV_PIX_FMT_X2RGB10 = AV_PIX_FMT_NE(X2RGB10BE, X2RGB10LE); + // public static AV_PIX_FMT_XYZ12 = AV_PIX_FMT_NE(XYZ12BE, XYZ12LE); + // public static AV_PIX_FMT_Y210 = AV_PIX_FMT_NE(Y210BE, Y210LE); + // public static AV_PIX_FMT_YA16 = AV_PIX_FMT_NE(YA16BE, YA16LE); + // public static AV_PIX_FMT_YUV420P10 = AV_PIX_FMT_NE(YUV420P10BE, YUV420P10LE); + // public static AV_PIX_FMT_YUV420P12 = AV_PIX_FMT_NE(YUV420P12BE, YUV420P12LE); + // public static AV_PIX_FMT_YUV420P14 = AV_PIX_FMT_NE(YUV420P14BE, YUV420P14LE); + // public static AV_PIX_FMT_YUV420P16 = AV_PIX_FMT_NE(YUV420P16BE, YUV420P16LE); + // public static AV_PIX_FMT_YUV420P9 = AV_PIX_FMT_NE(YUV420P9BE, YUV420P9LE); + // public static AV_PIX_FMT_YUV422P10 = AV_PIX_FMT_NE(YUV422P10BE, YUV422P10LE); + // public static AV_PIX_FMT_YUV422P12 = AV_PIX_FMT_NE(YUV422P12BE, YUV422P12LE); + // public static AV_PIX_FMT_YUV422P14 = AV_PIX_FMT_NE(YUV422P14BE, YUV422P14LE); + // public static AV_PIX_FMT_YUV422P16 = AV_PIX_FMT_NE(YUV422P16BE, YUV422P16LE); + // public static AV_PIX_FMT_YUV422P9 = AV_PIX_FMT_NE(YUV422P9BE, YUV422P9LE); + // public static AV_PIX_FMT_YUV440P10 = AV_PIX_FMT_NE(YUV440P10BE, YUV440P10LE); + // public static AV_PIX_FMT_YUV440P12 = AV_PIX_FMT_NE(YUV440P12BE, YUV440P12LE); + // public static AV_PIX_FMT_YUV444P10 = AV_PIX_FMT_NE(YUV444P10BE, YUV444P10LE); + // public static AV_PIX_FMT_YUV444P12 = AV_PIX_FMT_NE(YUV444P12BE, YUV444P12LE); + // public static AV_PIX_FMT_YUV444P14 = AV_PIX_FMT_NE(YUV444P14BE, YUV444P14LE); + // public static AV_PIX_FMT_YUV444P16 = AV_PIX_FMT_NE(YUV444P16BE, YUV444P16LE); + // public static AV_PIX_FMT_YUV444P9 = AV_PIX_FMT_NE(YUV444P9BE, YUV444P9LE); + // public static AV_PIX_FMT_YUVA420P10 = AV_PIX_FMT_NE(YUVA420P10BE, YUVA420P10LE); + // public static AV_PIX_FMT_YUVA420P16 = AV_PIX_FMT_NE(YUVA420P16BE, YUVA420P16LE); + // public static AV_PIX_FMT_YUVA420P9 = AV_PIX_FMT_NE(YUVA420P9BE, YUVA420P9LE); + // public static AV_PIX_FMT_YUVA422P10 = AV_PIX_FMT_NE(YUVA422P10BE, YUVA422P10LE); + // public static AV_PIX_FMT_YUVA422P12 = AV_PIX_FMT_NE(YUVA422P12BE, YUVA422P12LE); + // public static AV_PIX_FMT_YUVA422P16 = AV_PIX_FMT_NE(YUVA422P16BE, YUVA422P16LE); + // public static AV_PIX_FMT_YUVA422P9 = AV_PIX_FMT_NE(YUVA422P9BE, YUVA422P9LE); + // public static AV_PIX_FMT_YUVA444P10 = AV_PIX_FMT_NE(YUVA444P10BE, YUVA444P10LE); + // public static AV_PIX_FMT_YUVA444P12 = AV_PIX_FMT_NE(YUVA444P12BE, YUVA444P12LE); + // public static AV_PIX_FMT_YUVA444P16 = AV_PIX_FMT_NE(YUVA444P16BE, YUVA444P16LE); + // public static AV_PIX_FMT_YUVA444P9 = AV_PIX_FMT_NE(YUVA444P9BE, YUVA444P9LE); + /// AV_PKT_DATA_QUALITY_FACTOR = AV_PKT_DATA_QUALITY_STATS + public static readonly int AV_PKT_DATA_QUALITY_FACTOR = 8; + /// AV_PKT_FLAG_CORRUPT = 0x0002 + public const int AV_PKT_FLAG_CORRUPT = 0x2; + /// AV_PKT_FLAG_DISCARD = 0x0004 + public const int AV_PKT_FLAG_DISCARD = 0x4; + /// AV_PKT_FLAG_DISPOSABLE = 0x0010 + public const int AV_PKT_FLAG_DISPOSABLE = 0x10; + /// AV_PKT_FLAG_KEY = 0x0001 + public const int AV_PKT_FLAG_KEY = 0x1; + /// AV_PKT_FLAG_TRUSTED = 0x0008 + public const int AV_PKT_FLAG_TRUSTED = 0x8; + // public static av_popcount = av_popcount_c; + // public static av_popcount64 = av_popcount64_c; + // public static AV_PRAGMA = (s) _Pragma(#s); + // public static av_printf_format = fmtpos; + /// AV_PROGRAM_RUNNING = 1 + public const int AV_PROGRAM_RUNNING = 0x1; + /// AV_PTS_WRAP_ADD_OFFSET = 1 + public const int AV_PTS_WRAP_ADD_OFFSET = 0x1; + /// AV_PTS_WRAP_IGNORE = 0 + public const int AV_PTS_WRAP_IGNORE = 0x0; + /// AV_PTS_WRAP_SUB_OFFSET = -1 + public const int AV_PTS_WRAP_SUB_OFFSET = -0x1; + // public static av_pure = __attribute__((pure)); + // public static av_sat_add32 = av_sat_add32_c; + // public static av_sat_add64 = av_sat_add64_c; + // public static av_sat_dadd32 = av_sat_dadd32_c; + // public static av_sat_dsub32 = av_sat_dsub32_c; + // public static av_sat_sub32 = av_sat_sub32_c; + // public static av_sat_sub64 = av_sat_sub64_c; + // public static AV_STRINGIFY = (s)(AV_TOSTRING(s)); + /// AV_SUBTITLE_FLAG_FORCED = 0x1 + public const int AV_SUBTITLE_FLAG_FORCED = 0x1; + /// AV_TIME_BASE = 1000000 + public const int AV_TIME_BASE = 0xf4240; + // public static AV_TIME_BASE_Q = (AVRational){1, AV_TIME_BASE}; + /// AV_TIMECODE_STR_SIZE = 0x17 + public const int AV_TIMECODE_STR_SIZE = 0x17; + // public static AV_TOSTRING = (s) #s; + // public static av_uninit = (x) x=x; + // public static av_unused = __attribute__((unused)); + // public static av_used = __attribute__((used)); + // public static AV_VERSION = a; + // public static AV_VERSION_DOT = (a, b, c) a ##.## b ##.## c; + // public static AV_VERSION_INT = a; + // public static AV_VERSION_MAJOR = (a)((a)(>>0x10)); + // public static AV_VERSION_MICRO = (a)((a)(&0xff)); + // public static AV_VERSION_MINOR = (a)((a)(&0xff00) >> 0x8); + // public static AVERROR = (e) (-(e)); + /// AVERROR_BSF_NOT_FOUND = FFERRTAG(0xF8,'B','S','F') + public static readonly int AVERROR_BSF_NOT_FOUND = FFERRTAG(0xf8, 'B', 'S', 'F'); + /// AVERROR_BUFFER_TOO_SMALL = FFERRTAG( 'B','U','F','S') + public static readonly int AVERROR_BUFFER_TOO_SMALL = FFERRTAG('B', 'U', 'F', 'S'); + /// AVERROR_BUG = FFERRTAG( 'B','U','G','!') + public static readonly int AVERROR_BUG = FFERRTAG('B', 'U', 'G', '!'); + /// AVERROR_BUG2 = FFERRTAG( 'B','U','G',' ') + public static readonly int AVERROR_BUG2 = FFERRTAG('B', 'U', 'G', ' '); + /// AVERROR_DECODER_NOT_FOUND = FFERRTAG(0xF8,'D','E','C') + public static readonly int AVERROR_DECODER_NOT_FOUND = FFERRTAG(0xf8, 'D', 'E', 'C'); + /// AVERROR_DEMUXER_NOT_FOUND = FFERRTAG(0xF8,'D','E','M') + public static readonly int AVERROR_DEMUXER_NOT_FOUND = FFERRTAG(0xf8, 'D', 'E', 'M'); + /// AVERROR_ENCODER_NOT_FOUND = FFERRTAG(0xF8,'E','N','C') + public static readonly int AVERROR_ENCODER_NOT_FOUND = FFERRTAG(0xf8, 'E', 'N', 'C'); + /// AVERROR_EOF = FFERRTAG( 'E','O','F',' ') + public static readonly int AVERROR_EOF = FFERRTAG('E', 'O', 'F', ' '); + /// AVERROR_EXIT = FFERRTAG( 'E','X','I','T') + public static readonly int AVERROR_EXIT = FFERRTAG('E', 'X', 'I', 'T'); + /// AVERROR_EXPERIMENTAL = (-0x2bb2afa8) + public const int AVERROR_EXPERIMENTAL = -0x2bb2afa8; + /// AVERROR_EXTERNAL = FFERRTAG( 'E','X','T',' ') + public static readonly int AVERROR_EXTERNAL = FFERRTAG('E', 'X', 'T', ' '); + /// AVERROR_FILTER_NOT_FOUND = FFERRTAG(0xF8,'F','I','L') + public static readonly int AVERROR_FILTER_NOT_FOUND = FFERRTAG(0xf8, 'F', 'I', 'L'); + /// AVERROR_HTTP_BAD_REQUEST = FFERRTAG(0xF8,'4','0','0') + public static readonly int AVERROR_HTTP_BAD_REQUEST = FFERRTAG(0xf8, '4', '0', '0'); + /// AVERROR_HTTP_FORBIDDEN = FFERRTAG(0xF8,'4','0','3') + public static readonly int AVERROR_HTTP_FORBIDDEN = FFERRTAG(0xf8, '4', '0', '3'); + /// AVERROR_HTTP_NOT_FOUND = FFERRTAG(0xF8,'4','0','4') + public static readonly int AVERROR_HTTP_NOT_FOUND = FFERRTAG(0xf8, '4', '0', '4'); + /// AVERROR_HTTP_OTHER_4XX = FFERRTAG(0xF8,'4','X','X') + public static readonly int AVERROR_HTTP_OTHER_4XX = FFERRTAG(0xf8, '4', 'X', 'X'); + /// AVERROR_HTTP_SERVER_ERROR = FFERRTAG(0xF8,'5','X','X') + public static readonly int AVERROR_HTTP_SERVER_ERROR = FFERRTAG(0xf8, '5', 'X', 'X'); + /// AVERROR_HTTP_UNAUTHORIZED = FFERRTAG(0xF8,'4','0','1') + public static readonly int AVERROR_HTTP_UNAUTHORIZED = FFERRTAG(0xf8, '4', '0', '1'); + /// AVERROR_INPUT_CHANGED = (-0x636e6701) + public const int AVERROR_INPUT_CHANGED = -0x636e6701; + /// AVERROR_INVALIDDATA = FFERRTAG( 'I','N','D','A') + public static readonly int AVERROR_INVALIDDATA = FFERRTAG('I', 'N', 'D', 'A'); + /// AVERROR_MUXER_NOT_FOUND = FFERRTAG(0xF8,'M','U','X') + public static readonly int AVERROR_MUXER_NOT_FOUND = FFERRTAG(0xf8, 'M', 'U', 'X'); + /// AVERROR_OPTION_NOT_FOUND = FFERRTAG(0xF8,'O','P','T') + public static readonly int AVERROR_OPTION_NOT_FOUND = FFERRTAG(0xf8, 'O', 'P', 'T'); + /// AVERROR_OUTPUT_CHANGED = (-0x636e6702) + public const int AVERROR_OUTPUT_CHANGED = -0x636e6702; + /// AVERROR_PATCHWELCOME = FFERRTAG( 'P','A','W','E') + public static readonly int AVERROR_PATCHWELCOME = FFERRTAG('P', 'A', 'W', 'E'); + /// AVERROR_PROTOCOL_NOT_FOUND = FFERRTAG(0xF8,'P','R','O') + public static readonly int AVERROR_PROTOCOL_NOT_FOUND = FFERRTAG(0xf8, 'P', 'R', 'O'); + /// AVERROR_STREAM_NOT_FOUND = FFERRTAG(0xF8,'S','T','R') + public static readonly int AVERROR_STREAM_NOT_FOUND = FFERRTAG(0xf8, 'S', 'T', 'R'); + /// AVERROR_UNKNOWN = FFERRTAG( 'U','N','K','N') + public static readonly int AVERROR_UNKNOWN = FFERRTAG('U', 'N', 'K', 'N'); + /// AVFILTER_CMD_FLAG_FAST = 0x2 + public const int AVFILTER_CMD_FLAG_FAST = 0x2; + /// AVFILTER_CMD_FLAG_ONE = 0x1 + public const int AVFILTER_CMD_FLAG_ONE = 0x1; + /// AVFILTER_FLAG_DYNAMIC_INPUTS = 0x1 << 0x0 + public const int AVFILTER_FLAG_DYNAMIC_INPUTS = 0x1 << 0x0; + /// AVFILTER_FLAG_DYNAMIC_OUTPUTS = 0x1 << 0x1 + public const int AVFILTER_FLAG_DYNAMIC_OUTPUTS = 0x1 << 0x1; + /// AVFILTER_FLAG_METADATA_ONLY = 0x1 << 0x3 + public const int AVFILTER_FLAG_METADATA_ONLY = 0x1 << 0x3; + /// AVFILTER_FLAG_SLICE_THREADS = 0x1 << 0x2 + public const int AVFILTER_FLAG_SLICE_THREADS = 0x1 << 0x2; + /// AVFILTER_FLAG_SUPPORT_TIMELINE = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL + public const int AVFILTER_FLAG_SUPPORT_TIMELINE = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL; + /// AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC = 0x1 << 0x10 + public const int AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC = 0x1 << 0x10; + /// AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL = 0x1 << 0x11 + public const int AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL = 0x1 << 0x11; + /// AVFILTER_THREAD_SLICE = 0x1 << 0x0 + public const int AVFILTER_THREAD_SLICE = 0x1 << 0x0; + /// AVFMT_ALLOW_FLUSH = 0x10000 + public const int AVFMT_ALLOW_FLUSH = 0x10000; + /// AVFMT_AVOID_NEG_TS_AUTO = -1 + public const int AVFMT_AVOID_NEG_TS_AUTO = -0x1; + /// AVFMT_AVOID_NEG_TS_DISABLED = 0 + public const int AVFMT_AVOID_NEG_TS_DISABLED = 0x0; + /// AVFMT_AVOID_NEG_TS_MAKE_NON_NEGATIVE = 1 + public const int AVFMT_AVOID_NEG_TS_MAKE_NON_NEGATIVE = 0x1; + /// AVFMT_AVOID_NEG_TS_MAKE_ZERO = 2 + public const int AVFMT_AVOID_NEG_TS_MAKE_ZERO = 0x2; + /// AVFMT_EVENT_FLAG_METADATA_UPDATED = 0x0001 + public const int AVFMT_EVENT_FLAG_METADATA_UPDATED = 0x1; + /// AVFMT_EXPERIMENTAL = 0x0004 + public const int AVFMT_EXPERIMENTAL = 0x4; + /// AVFMT_FLAG_AUTO_BSF = 0x200000 + public const int AVFMT_FLAG_AUTO_BSF = 0x200000; + /// AVFMT_FLAG_BITEXACT = 0x0400 + public const int AVFMT_FLAG_BITEXACT = 0x400; + /// AVFMT_FLAG_CUSTOM_IO = 0x0080 + public const int AVFMT_FLAG_CUSTOM_IO = 0x80; + /// AVFMT_FLAG_DISCARD_CORRUPT = 0x0100 + public const int AVFMT_FLAG_DISCARD_CORRUPT = 0x100; + /// AVFMT_FLAG_FAST_SEEK = 0x80000 + public const int AVFMT_FLAG_FAST_SEEK = 0x80000; + /// AVFMT_FLAG_FLUSH_PACKETS = 0x0200 + public const int AVFMT_FLAG_FLUSH_PACKETS = 0x200; + /// AVFMT_FLAG_GENPTS = 0x0001 + public const int AVFMT_FLAG_GENPTS = 0x1; + /// AVFMT_FLAG_IGNDTS = 0x0008 + public const int AVFMT_FLAG_IGNDTS = 0x8; + /// AVFMT_FLAG_IGNIDX = 0x0002 + public const int AVFMT_FLAG_IGNIDX = 0x2; + /// AVFMT_FLAG_NOBUFFER = 0x0040 + public const int AVFMT_FLAG_NOBUFFER = 0x40; + /// AVFMT_FLAG_NOFILLIN = 0x0010 + public const int AVFMT_FLAG_NOFILLIN = 0x10; + /// AVFMT_FLAG_NONBLOCK = 0x0004 + public const int AVFMT_FLAG_NONBLOCK = 0x4; + /// AVFMT_FLAG_NOPARSE = 0x0020 + public const int AVFMT_FLAG_NOPARSE = 0x20; + /// AVFMT_FLAG_PRIV_OPT = 0x20000 + public const int AVFMT_FLAG_PRIV_OPT = 0x20000; + /// AVFMT_FLAG_SHORTEST = 0x100000 + public const int AVFMT_FLAG_SHORTEST = 0x100000; + /// AVFMT_FLAG_SORT_DTS = 0x10000 + public const int AVFMT_FLAG_SORT_DTS = 0x10000; + /// AVFMT_GENERIC_INDEX = 0x0100 + public const int AVFMT_GENERIC_INDEX = 0x100; + /// AVFMT_GLOBALHEADER = 0x0040 + public const int AVFMT_GLOBALHEADER = 0x40; + /// AVFMT_NEEDNUMBER = 0x0002 + public const int AVFMT_NEEDNUMBER = 0x2; + /// AVFMT_NO_BYTE_SEEK = 0x8000 + public const int AVFMT_NO_BYTE_SEEK = 0x8000; + /// AVFMT_NOBINSEARCH = 0x2000 + public const int AVFMT_NOBINSEARCH = 0x2000; + /// AVFMT_NODIMENSIONS = 0x0800 + public const int AVFMT_NODIMENSIONS = 0x800; + /// AVFMT_NOFILE = 0x0001 + public const int AVFMT_NOFILE = 0x1; + /// AVFMT_NOGENSEARCH = 0x4000 + public const int AVFMT_NOGENSEARCH = 0x4000; + /// AVFMT_NOSTREAMS = 0x1000 + public const int AVFMT_NOSTREAMS = 0x1000; + /// AVFMT_NOTIMESTAMPS = 0x0080 + public const int AVFMT_NOTIMESTAMPS = 0x80; + /// AVFMT_SEEK_TO_PTS = 0x4000000 + public const int AVFMT_SEEK_TO_PTS = 0x4000000; + /// AVFMT_SHOW_IDS = 0x0008 + public const int AVFMT_SHOW_IDS = 0x8; + /// AVFMT_TS_DISCONT = 0x0200 + public const int AVFMT_TS_DISCONT = 0x200; + /// AVFMT_TS_NEGATIVE = 0x40000 + public const int AVFMT_TS_NEGATIVE = 0x40000; + /// AVFMT_TS_NONSTRICT = 0x20000 + public const int AVFMT_TS_NONSTRICT = 0x20000; + /// AVFMT_VARIABLE_FPS = 0x0400 + public const int AVFMT_VARIABLE_FPS = 0x400; + /// AVFMTCTX_NOHEADER = 0x0001 + public const int AVFMTCTX_NOHEADER = 0x1; + /// AVFMTCTX_UNSEEKABLE = 0x0002 + public const int AVFMTCTX_UNSEEKABLE = 0x2; + /// AVINDEX_DISCARD_FRAME = 0x0002 + public const int AVINDEX_DISCARD_FRAME = 0x2; + /// AVINDEX_KEYFRAME = 0x0001 + public const int AVINDEX_KEYFRAME = 0x1; + /// AVIO_FLAG_DIRECT = 0x8000 + public const int AVIO_FLAG_DIRECT = 0x8000; + /// AVIO_FLAG_NONBLOCK = 8 + public const int AVIO_FLAG_NONBLOCK = 0x8; + /// AVIO_FLAG_READ = 1 + public const int AVIO_FLAG_READ = 0x1; + /// AVIO_FLAG_READ_WRITE = (AVIO_FLAG_READ|AVIO_FLAG_WRITE) + public const int AVIO_FLAG_READ_WRITE = AVIO_FLAG_READ | AVIO_FLAG_WRITE; + /// AVIO_FLAG_WRITE = 2 + public const int AVIO_FLAG_WRITE = 0x2; + // public static avio_print = s; + /// AVIO_SEEKABLE_NORMAL = (1 << 0) + public const int AVIO_SEEKABLE_NORMAL = 0x1 << 0x0; + /// AVIO_SEEKABLE_TIME = (1 << 1) + public const int AVIO_SEEKABLE_TIME = 0x1 << 0x1; + /// AVPALETTE_COUNT = 256 + public const int AVPALETTE_COUNT = 0x100; + /// AVPALETTE_SIZE = 1024 + public const int AVPALETTE_SIZE = 0x400; + /// AVPROBE_PADDING_SIZE = 32 + public const int AVPROBE_PADDING_SIZE = 0x20; + /// AVPROBE_SCORE_EXTENSION = 50 + public const int AVPROBE_SCORE_EXTENSION = 0x32; + /// AVPROBE_SCORE_MAX = 100 + public const int AVPROBE_SCORE_MAX = 0x64; + /// AVPROBE_SCORE_MIME = 75 + public const int AVPROBE_SCORE_MIME = 0x4b; + /// AVPROBE_SCORE_RETRY = (AVPROBE_SCORE_MAX/4) + public const int AVPROBE_SCORE_RETRY = AVPROBE_SCORE_MAX / 0x4; + /// AVPROBE_SCORE_STREAM_RETRY = (AVPROBE_SCORE_MAX/4-1) + public const int AVPROBE_SCORE_STREAM_RETRY = AVPROBE_SCORE_MAX / 0x4 - 0x1; + /// AVSEEK_FLAG_ANY = 4 + public const int AVSEEK_FLAG_ANY = 0x4; + /// AVSEEK_FLAG_BACKWARD = 1 + public const int AVSEEK_FLAG_BACKWARD = 0x1; + /// AVSEEK_FLAG_BYTE = 2 + public const int AVSEEK_FLAG_BYTE = 0x2; + /// AVSEEK_FLAG_FRAME = 8 + public const int AVSEEK_FLAG_FRAME = 0x8; + /// AVSEEK_FORCE = 0x20000 + public const int AVSEEK_FORCE = 0x20000; + /// AVSEEK_SIZE = 0x10000 + public const int AVSEEK_SIZE = 0x10000; + /// AVSTREAM_EVENT_FLAG_METADATA_UPDATED = 0x0001 + public const int AVSTREAM_EVENT_FLAG_METADATA_UPDATED = 0x1; + /// AVSTREAM_EVENT_FLAG_NEW_PACKETS = (1 << 1) + public const int AVSTREAM_EVENT_FLAG_NEW_PACKETS = 0x1 << 0x1; + /// AVSTREAM_INIT_IN_INIT_OUTPUT = 1 + public const int AVSTREAM_INIT_IN_INIT_OUTPUT = 0x1; + /// AVSTREAM_INIT_IN_WRITE_HEADER = 0 + public const int AVSTREAM_INIT_IN_WRITE_HEADER = 0x0; + // public static AVUNERROR = (e) (-(e)); + // public static DECLARE_ALIGNED = n; + // public static DECLARE_ASM_ALIGNED = n; + // public static DECLARE_ASM_CONST = n; + /// FF_API_AUTO_THREADS = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_AUTO_THREADS = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_AV_FOPEN_UTF8 = (LIBAVUTIL_VERSION_MAJOR < 58) + public const bool FF_API_AV_FOPEN_UTF8 = LIBAVUTIL_VERSION_MAJOR < 0x3a; + /// FF_API_AV_MALLOCZ_ARRAY = (LIBAVUTIL_VERSION_MAJOR < 58) + public const bool FF_API_AV_MALLOCZ_ARRAY = LIBAVUTIL_VERSION_MAJOR < 0x3a; + /// FF_API_AVCTX_TIMEBASE = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_AVCTX_TIMEBASE = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_AVIOCONTEXT_WRITTEN = (LIBAVFORMAT_VERSION_MAJOR < 60) + public const bool FF_API_AVIOCONTEXT_WRITTEN = LIBAVFORMAT_VERSION_MAJOR < 0x3c; + /// FF_API_AVSTREAM_CLASS = (LIBAVFORMAT_VERSION_MAJOR > 59) + public const bool FF_API_AVSTREAM_CLASS = LIBAVFORMAT_VERSION_MAJOR > 0x3b; + /// FF_API_BUFFERSINK_ALLOC = LIBAVFILTER_VERSION_MAJOR < 0x9 + public const bool FF_API_BUFFERSINK_ALLOC = LIBAVFILTER_VERSION_MAJOR < 0x9; + /// FF_API_COLORSPACE_NAME = (LIBAVUTIL_VERSION_MAJOR < 58) + public const bool FF_API_COLORSPACE_NAME = LIBAVUTIL_VERSION_MAJOR < 0x3a; + /// FF_API_COMPUTE_PKT_FIELDS2 = (LIBAVFORMAT_VERSION_MAJOR < 60) + public const bool FF_API_COMPUTE_PKT_FIELDS2 = LIBAVFORMAT_VERSION_MAJOR < 0x3c; + /// FF_API_D2STR = (LIBAVUTIL_VERSION_MAJOR < 58) + public const bool FF_API_D2STR = LIBAVUTIL_VERSION_MAJOR < 0x3a; + /// FF_API_DEBUG_MV = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_DEBUG_MV = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_DECLARE_ALIGNED = (LIBAVUTIL_VERSION_MAJOR < 58) + public const bool FF_API_DECLARE_ALIGNED = LIBAVUTIL_VERSION_MAJOR < 0x3a; + /// FF_API_DEVICE_CAPABILITIES = (LIBAVDEVICE_VERSION_MAJOR < 60) + public const bool FF_API_DEVICE_CAPABILITIES = LIBAVDEVICE_VERSION_MAJOR < 0x3c; + /// FF_API_FIFO_OLD_API = (LIBAVUTIL_VERSION_MAJOR < 58) + public const bool FF_API_FIFO_OLD_API = LIBAVUTIL_VERSION_MAJOR < 0x3a; + /// FF_API_FIFO_PEEK2 = (LIBAVUTIL_VERSION_MAJOR < 58) + public const bool FF_API_FIFO_PEEK2 = LIBAVUTIL_VERSION_MAJOR < 0x3a; + /// FF_API_FLAG_TRUNCATED = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_FLAG_TRUNCATED = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_GET_FRAME_CLASS = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_GET_FRAME_CLASS = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_IDCT_NONE = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_IDCT_NONE = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_INIT_PACKET = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_INIT_PACKET = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_LAVF_PRIV_OPT = (LIBAVFORMAT_VERSION_MAJOR < 60) + public const bool FF_API_LAVF_PRIV_OPT = LIBAVFORMAT_VERSION_MAJOR < 0x3c; + /// FF_API_OLD_CHANNEL_LAYOUT = (LIBAVUTIL_VERSION_MAJOR < 58) + public const bool FF_API_OLD_CHANNEL_LAYOUT = LIBAVUTIL_VERSION_MAJOR < 0x3a; + /// FF_API_OPENH264_CABAC = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_OPENH264_CABAC = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_OPENH264_SLICE_MODE = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_OPENH264_SLICE_MODE = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_PAD_COUNT = LIBAVFILTER_VERSION_MAJOR < 0x9 + public const bool FF_API_PAD_COUNT = LIBAVFILTER_VERSION_MAJOR < 0x9; + /// FF_API_R_FRAME_RATE = 1 + public const int FF_API_R_FRAME_RATE = 0x1; + /// FF_API_SUB_TEXT_FORMAT = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_SUB_TEXT_FORMAT = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_SVTAV1_OPTS = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_SVTAV1_OPTS = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_SWS_PARAM_OPTION = LIBAVFILTER_VERSION_MAJOR < 0x9 + public const bool FF_API_SWS_PARAM_OPTION = LIBAVFILTER_VERSION_MAJOR < 0x9; + /// FF_API_THREAD_SAFE_CALLBACKS = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_THREAD_SAFE_CALLBACKS = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_UNUSED_CODEC_CAPS = (LIBAVCODEC_VERSION_MAJOR < 60) + public const bool FF_API_UNUSED_CODEC_CAPS = LIBAVCODEC_VERSION_MAJOR < 0x3c; + /// FF_API_XVMC = (LIBAVUTIL_VERSION_MAJOR < 58) + public const bool FF_API_XVMC = LIBAVUTIL_VERSION_MAJOR < 0x3a; + // public static FF_ARRAY_ELEMS = (a) (sizeof(a) / sizeof((a)[0])); + /// FF_BUG_AMV = 0x20 + public const int FF_BUG_AMV = 0x20; + /// FF_BUG_AUTODETECT = 0x1 + public const int FF_BUG_AUTODETECT = 0x1; + /// FF_BUG_DC_CLIP = 0x1000 + public const int FF_BUG_DC_CLIP = 0x1000; + /// FF_BUG_DIRECT_BLOCKSIZE = 0x200 + public const int FF_BUG_DIRECT_BLOCKSIZE = 0x200; + /// FF_BUG_EDGE = 0x400 + public const int FF_BUG_EDGE = 0x400; + /// FF_BUG_HPEL_CHROMA = 0x800 + public const int FF_BUG_HPEL_CHROMA = 0x800; + /// FF_BUG_IEDGE = 0x8000 + public const int FF_BUG_IEDGE = 0x8000; + /// FF_BUG_MS = 0x2000 + public const int FF_BUG_MS = 0x2000; + /// FF_BUG_NO_PADDING = 0x10 + public const int FF_BUG_NO_PADDING = 0x10; + /// FF_BUG_QPEL_CHROMA = 0x40 + public const int FF_BUG_QPEL_CHROMA = 0x40; + /// FF_BUG_QPEL_CHROMA2 = 0x100 + public const int FF_BUG_QPEL_CHROMA2 = 0x100; + /// FF_BUG_STD_QPEL = 0x80 + public const int FF_BUG_STD_QPEL = 0x80; + /// FF_BUG_TRUNCATED = 0x4000 + public const int FF_BUG_TRUNCATED = 0x4000; + /// FF_BUG_UMP4 = 0x8 + public const int FF_BUG_UMP4 = 0x8; + /// FF_BUG_XVID_ILACE = 0x4 + public const int FF_BUG_XVID_ILACE = 0x4; + // public static FF_CEIL_RSHIFT = AV_CEIL_RSHIFT; + /// FF_CMP_BIT = 0x5 + public const int FF_CMP_BIT = 0x5; + /// FF_CMP_CHROMA = 0x100 + public const int FF_CMP_CHROMA = 0x100; + /// FF_CMP_DCT = 0x3 + public const int FF_CMP_DCT = 0x3; + /// FF_CMP_DCT264 = 0xe + public const int FF_CMP_DCT264 = 0xe; + /// FF_CMP_DCTMAX = 0xd + public const int FF_CMP_DCTMAX = 0xd; + /// FF_CMP_MEDIAN_SAD = 0xf + public const int FF_CMP_MEDIAN_SAD = 0xf; + /// FF_CMP_NSSE = 0xa + public const int FF_CMP_NSSE = 0xa; + /// FF_CMP_PSNR = 0x4 + public const int FF_CMP_PSNR = 0x4; + /// FF_CMP_RD = 0x6 + public const int FF_CMP_RD = 0x6; + /// FF_CMP_SAD = 0x0 + public const int FF_CMP_SAD = 0x0; + /// FF_CMP_SATD = 0x2 + public const int FF_CMP_SATD = 0x2; + /// FF_CMP_SSE = 0x1 + public const int FF_CMP_SSE = 0x1; + /// FF_CMP_VSAD = 0x8 + public const int FF_CMP_VSAD = 0x8; + /// FF_CMP_VSSE = 0x9 + public const int FF_CMP_VSSE = 0x9; + /// FF_CMP_W53 = 0xb + public const int FF_CMP_W53 = 0xb; + /// FF_CMP_W97 = 0xc + public const int FF_CMP_W97 = 0xc; + /// FF_CMP_ZERO = 0x7 + public const int FF_CMP_ZERO = 0x7; + /// FF_CODEC_PROPERTY_CLOSED_CAPTIONS = 0x2 + public const int FF_CODEC_PROPERTY_CLOSED_CAPTIONS = 0x2; + /// FF_CODEC_PROPERTY_FILM_GRAIN = 0x4 + public const int FF_CODEC_PROPERTY_FILM_GRAIN = 0x4; + /// FF_CODEC_PROPERTY_LOSSLESS = 0x1 + public const int FF_CODEC_PROPERTY_LOSSLESS = 0x1; + /// FF_COMPLIANCE_EXPERIMENTAL = -0x2 + public const int FF_COMPLIANCE_EXPERIMENTAL = -0x2; + /// FF_COMPLIANCE_NORMAL = 0x0 + public const int FF_COMPLIANCE_NORMAL = 0x0; + /// FF_COMPLIANCE_STRICT = 0x1 + public const int FF_COMPLIANCE_STRICT = 0x1; + /// FF_COMPLIANCE_UNOFFICIAL = -0x1 + public const int FF_COMPLIANCE_UNOFFICIAL = -0x1; + /// FF_COMPLIANCE_VERY_STRICT = 0x2 + public const int FF_COMPLIANCE_VERY_STRICT = 0x2; + /// FF_COMPRESSION_DEFAULT = -0x1 + public const int FF_COMPRESSION_DEFAULT = -0x1; + /// FF_DCT_ALTIVEC = 0x5 + public const int FF_DCT_ALTIVEC = 0x5; + /// FF_DCT_AUTO = 0x0 + public const int FF_DCT_AUTO = 0x0; + /// FF_DCT_FAAN = 0x6 + public const int FF_DCT_FAAN = 0x6; + /// FF_DCT_FASTINT = 0x1 + public const int FF_DCT_FASTINT = 0x1; + /// FF_DCT_INT = 0x2 + public const int FF_DCT_INT = 0x2; + /// FF_DCT_MMX = 0x3 + public const int FF_DCT_MMX = 0x3; + /// FF_DEBUG_BITSTREAM = 0x4 + public const int FF_DEBUG_BITSTREAM = 0x4; + /// FF_DEBUG_BUFFERS = 0x8000 + public const int FF_DEBUG_BUFFERS = 0x8000; + /// FF_DEBUG_BUGS = 0x1000 + public const int FF_DEBUG_BUGS = 0x1000; + /// FF_DEBUG_DCT_COEFF = 0x40 + public const int FF_DEBUG_DCT_COEFF = 0x40; + /// FF_DEBUG_ER = 0x400 + public const int FF_DEBUG_ER = 0x400; + /// FF_DEBUG_GREEN_MD = 0x800000 + public const int FF_DEBUG_GREEN_MD = 0x800000; + /// FF_DEBUG_MB_TYPE = 0x8 + public const int FF_DEBUG_MB_TYPE = 0x8; + /// FF_DEBUG_MMCO = 0x800 + public const int FF_DEBUG_MMCO = 0x800; + /// FF_DEBUG_NOMC = 0x1000000 + public const int FF_DEBUG_NOMC = 0x1000000; + /// FF_DEBUG_PICT_INFO = 0x1 + public const int FF_DEBUG_PICT_INFO = 0x1; + /// FF_DEBUG_QP = 0x10 + public const int FF_DEBUG_QP = 0x10; + /// FF_DEBUG_RC = 0x2 + public const int FF_DEBUG_RC = 0x2; + /// FF_DEBUG_SKIP = 0x80 + public const int FF_DEBUG_SKIP = 0x80; + /// FF_DEBUG_STARTCODE = 0x100 + public const int FF_DEBUG_STARTCODE = 0x100; + /// FF_DEBUG_THREADS = 0x10000 + public const int FF_DEBUG_THREADS = 0x10000; + /// FF_DEBUG_VIS_MV_B_BACK = 0x4 + public const int FF_DEBUG_VIS_MV_B_BACK = 0x4; + /// FF_DEBUG_VIS_MV_B_FOR = 0x2 + public const int FF_DEBUG_VIS_MV_B_FOR = 0x2; + /// FF_DEBUG_VIS_MV_P_FOR = 0x1 + public const int FF_DEBUG_VIS_MV_P_FOR = 0x1; + /// FF_DECODE_ERROR_CONCEALMENT_ACTIVE = 4 + public const int FF_DECODE_ERROR_CONCEALMENT_ACTIVE = 0x4; + /// FF_DECODE_ERROR_DECODE_SLICES = 8 + public const int FF_DECODE_ERROR_DECODE_SLICES = 0x8; + /// FF_DECODE_ERROR_INVALID_BITSTREAM = 1 + public const int FF_DECODE_ERROR_INVALID_BITSTREAM = 0x1; + /// FF_DECODE_ERROR_MISSING_REFERENCE = 2 + public const int FF_DECODE_ERROR_MISSING_REFERENCE = 0x2; + /// FF_DXVA2_WORKAROUND_INTEL_CLEARVIDEO = 0x2 + public const int FF_DXVA2_WORKAROUND_INTEL_CLEARVIDEO = 0x2; + /// FF_DXVA2_WORKAROUND_SCALING_LIST_ZIGZAG = 0x1 + public const int FF_DXVA2_WORKAROUND_SCALING_LIST_ZIGZAG = 0x1; + /// FF_EC_DEBLOCK = 0x2 + public const int FF_EC_DEBLOCK = 0x2; + /// FF_EC_FAVOR_INTER = 0x100 + public const int FF_EC_FAVOR_INTER = 0x100; + /// FF_EC_GUESS_MVS = 0x1 + public const int FF_EC_GUESS_MVS = 0x1; + /// FF_FDEBUG_TS = 0x0001 + public const int FF_FDEBUG_TS = 0x1; + /// FF_HLS_TS_OPTIONS = (LIBAVFORMAT_VERSION_MAJOR < 60) + public const bool FF_HLS_TS_OPTIONS = LIBAVFORMAT_VERSION_MAJOR < 0x3c; + /// FF_IDCT_ALTIVEC = 0x8 + public const int FF_IDCT_ALTIVEC = 0x8; + /// FF_IDCT_ARM = 0x7 + public const int FF_IDCT_ARM = 0x7; + /// FF_IDCT_AUTO = 0x0 + public const int FF_IDCT_AUTO = 0x0; + /// FF_IDCT_FAAN = 0x14 + public const int FF_IDCT_FAAN = 0x14; + /// FF_IDCT_INT = 0x1 + public const int FF_IDCT_INT = 0x1; + /// FF_IDCT_NONE = 0x18 + public const int FF_IDCT_NONE = 0x18; + /// FF_IDCT_SIMPLE = 0x2 + public const int FF_IDCT_SIMPLE = 0x2; + /// FF_IDCT_SIMPLEARM = 0xa + public const int FF_IDCT_SIMPLEARM = 0xa; + /// FF_IDCT_SIMPLEARMV5TE = 0x10 + public const int FF_IDCT_SIMPLEARMV5TE = 0x10; + /// FF_IDCT_SIMPLEARMV6 = 0x11 + public const int FF_IDCT_SIMPLEARMV6 = 0x11; + /// FF_IDCT_SIMPLEAUTO = 0x80 + public const int FF_IDCT_SIMPLEAUTO = 0x80; + /// FF_IDCT_SIMPLEMMX = 0x3 + public const int FF_IDCT_SIMPLEMMX = 0x3; + /// FF_IDCT_SIMPLENEON = 0x16 + public const int FF_IDCT_SIMPLENEON = 0x16; + /// FF_IDCT_XVID = 0xe + public const int FF_IDCT_XVID = 0xe; + /// FF_LAMBDA_MAX = (256*128-1) + public const int FF_LAMBDA_MAX = 0x100 * 0x80 - 0x1; + /// FF_LAMBDA_SCALE = (1<<FF_LAMBDA_SHIFT) + public const int FF_LAMBDA_SCALE = 0x1 << FF_LAMBDA_SHIFT; + /// FF_LAMBDA_SHIFT = 7 + public const int FF_LAMBDA_SHIFT = 0x7; + /// FF_LEVEL_UNKNOWN = -0x63 + public const int FF_LEVEL_UNKNOWN = -0x63; + /// FF_LOSS_ALPHA = 0x8 + public const int FF_LOSS_ALPHA = 0x8; + /// FF_LOSS_CHROMA = 0x20 + public const int FF_LOSS_CHROMA = 0x20; + /// FF_LOSS_COLORQUANT = 0x10 + public const int FF_LOSS_COLORQUANT = 0x10; + /// FF_LOSS_COLORSPACE = 0x4 + public const int FF_LOSS_COLORSPACE = 0x4; + /// FF_LOSS_DEPTH = 0x2 + public const int FF_LOSS_DEPTH = 0x2; + /// FF_LOSS_RESOLUTION = 0x1 + public const int FF_LOSS_RESOLUTION = 0x1; + /// FF_MB_DECISION_BITS = 0x1 + public const int FF_MB_DECISION_BITS = 0x1; + /// FF_MB_DECISION_RD = 0x2 + public const int FF_MB_DECISION_RD = 0x2; + /// FF_MB_DECISION_SIMPLE = 0x0 + public const int FF_MB_DECISION_SIMPLE = 0x0; + /// FF_PROFILE_AAC_ELD = 0x26 + public const int FF_PROFILE_AAC_ELD = 0x26; + /// FF_PROFILE_AAC_HE = 0x4 + public const int FF_PROFILE_AAC_HE = 0x4; + /// FF_PROFILE_AAC_HE_V2 = 0x1c + public const int FF_PROFILE_AAC_HE_V2 = 0x1c; + /// FF_PROFILE_AAC_LD = 0x16 + public const int FF_PROFILE_AAC_LD = 0x16; + /// FF_PROFILE_AAC_LOW = 0x1 + public const int FF_PROFILE_AAC_LOW = 0x1; + /// FF_PROFILE_AAC_LTP = 0x3 + public const int FF_PROFILE_AAC_LTP = 0x3; + /// FF_PROFILE_AAC_MAIN = 0x0 + public const int FF_PROFILE_AAC_MAIN = 0x0; + /// FF_PROFILE_AAC_SSR = 0x2 + public const int FF_PROFILE_AAC_SSR = 0x2; + /// FF_PROFILE_ARIB_PROFILE_A = 0x0 + public const int FF_PROFILE_ARIB_PROFILE_A = 0x0; + /// FF_PROFILE_ARIB_PROFILE_C = 0x1 + public const int FF_PROFILE_ARIB_PROFILE_C = 0x1; + /// FF_PROFILE_AV1_HIGH = 0x1 + public const int FF_PROFILE_AV1_HIGH = 0x1; + /// FF_PROFILE_AV1_MAIN = 0x0 + public const int FF_PROFILE_AV1_MAIN = 0x0; + /// FF_PROFILE_AV1_PROFESSIONAL = 0x2 + public const int FF_PROFILE_AV1_PROFESSIONAL = 0x2; + /// FF_PROFILE_DNXHD = 0x0 + public const int FF_PROFILE_DNXHD = 0x0; + /// FF_PROFILE_DNXHR_444 = 0x5 + public const int FF_PROFILE_DNXHR_444 = 0x5; + /// FF_PROFILE_DNXHR_HQ = 0x3 + public const int FF_PROFILE_DNXHR_HQ = 0x3; + /// FF_PROFILE_DNXHR_HQX = 0x4 + public const int FF_PROFILE_DNXHR_HQX = 0x4; + /// FF_PROFILE_DNXHR_LB = 0x1 + public const int FF_PROFILE_DNXHR_LB = 0x1; + /// FF_PROFILE_DNXHR_SQ = 0x2 + public const int FF_PROFILE_DNXHR_SQ = 0x2; + /// FF_PROFILE_DTS = 0x14 + public const int FF_PROFILE_DTS = 0x14; + /// FF_PROFILE_DTS_96_24 = 0x28 + public const int FF_PROFILE_DTS_96_24 = 0x28; + /// FF_PROFILE_DTS_ES = 0x1e + public const int FF_PROFILE_DTS_ES = 0x1e; + /// FF_PROFILE_DTS_EXPRESS = 0x46 + public const int FF_PROFILE_DTS_EXPRESS = 0x46; + /// FF_PROFILE_DTS_HD_HRA = 0x32 + public const int FF_PROFILE_DTS_HD_HRA = 0x32; + /// FF_PROFILE_DTS_HD_MA = 0x3c + public const int FF_PROFILE_DTS_HD_MA = 0x3c; + /// FF_PROFILE_H264_BASELINE = 0x42 + public const int FF_PROFILE_H264_BASELINE = 0x42; + /// FF_PROFILE_H264_CAVLC_444 = 0x2c + public const int FF_PROFILE_H264_CAVLC_444 = 0x2c; + /// FF_PROFILE_H264_CONSTRAINED = 0x1 << 0x9 + public const int FF_PROFILE_H264_CONSTRAINED = 0x1 << 0x9; + /// FF_PROFILE_H264_CONSTRAINED_BASELINE = 0x42 | FF_PROFILE_H264_CONSTRAINED + public const int FF_PROFILE_H264_CONSTRAINED_BASELINE = 0x42 | FF_PROFILE_H264_CONSTRAINED; + /// FF_PROFILE_H264_EXTENDED = 0x58 + public const int FF_PROFILE_H264_EXTENDED = 0x58; + /// FF_PROFILE_H264_HIGH = 0x64 + public const int FF_PROFILE_H264_HIGH = 0x64; + /// FF_PROFILE_H264_HIGH_10 = 0x6e + public const int FF_PROFILE_H264_HIGH_10 = 0x6e; + /// FF_PROFILE_H264_HIGH_10_INTRA = 0x6e | FF_PROFILE_H264_INTRA + public const int FF_PROFILE_H264_HIGH_10_INTRA = 0x6e | FF_PROFILE_H264_INTRA; + /// FF_PROFILE_H264_HIGH_422 = 0x7a + public const int FF_PROFILE_H264_HIGH_422 = 0x7a; + /// FF_PROFILE_H264_HIGH_422_INTRA = 0x7a | FF_PROFILE_H264_INTRA + public const int FF_PROFILE_H264_HIGH_422_INTRA = 0x7a | FF_PROFILE_H264_INTRA; + /// FF_PROFILE_H264_HIGH_444 = 0x90 + public const int FF_PROFILE_H264_HIGH_444 = 0x90; + /// FF_PROFILE_H264_HIGH_444_INTRA = 0xf4 | FF_PROFILE_H264_INTRA + public const int FF_PROFILE_H264_HIGH_444_INTRA = 0xf4 | FF_PROFILE_H264_INTRA; + /// FF_PROFILE_H264_HIGH_444_PREDICTIVE = 0xf4 + public const int FF_PROFILE_H264_HIGH_444_PREDICTIVE = 0xf4; + /// FF_PROFILE_H264_INTRA = 0x1 << 0xb + public const int FF_PROFILE_H264_INTRA = 0x1 << 0xb; + /// FF_PROFILE_H264_MAIN = 0x4d + public const int FF_PROFILE_H264_MAIN = 0x4d; + /// FF_PROFILE_H264_MULTIVIEW_HIGH = 0x76 + public const int FF_PROFILE_H264_MULTIVIEW_HIGH = 0x76; + /// FF_PROFILE_H264_STEREO_HIGH = 0x80 + public const int FF_PROFILE_H264_STEREO_HIGH = 0x80; + /// FF_PROFILE_HEVC_MAIN = 0x1 + public const int FF_PROFILE_HEVC_MAIN = 0x1; + /// FF_PROFILE_HEVC_MAIN_10 = 0x2 + public const int FF_PROFILE_HEVC_MAIN_10 = 0x2; + /// FF_PROFILE_HEVC_MAIN_STILL_PICTURE = 0x3 + public const int FF_PROFILE_HEVC_MAIN_STILL_PICTURE = 0x3; + /// FF_PROFILE_HEVC_REXT = 0x4 + public const int FF_PROFILE_HEVC_REXT = 0x4; + /// FF_PROFILE_JPEG2000_CSTREAM_NO_RESTRICTION = 0x8000 + public const int FF_PROFILE_JPEG2000_CSTREAM_NO_RESTRICTION = 0x8000; + /// FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_0 = 0x1 + public const int FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_0 = 0x1; + /// FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_1 = 0x2 + public const int FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_1 = 0x2; + /// FF_PROFILE_JPEG2000_DCINEMA_2K = 0x3 + public const int FF_PROFILE_JPEG2000_DCINEMA_2K = 0x3; + /// FF_PROFILE_JPEG2000_DCINEMA_4K = 0x4 + public const int FF_PROFILE_JPEG2000_DCINEMA_4K = 0x4; + /// FF_PROFILE_KLVA_ASYNC = 0x1 + public const int FF_PROFILE_KLVA_ASYNC = 0x1; + /// FF_PROFILE_KLVA_SYNC = 0x0 + public const int FF_PROFILE_KLVA_SYNC = 0x0; + /// FF_PROFILE_MJPEG_HUFFMAN_BASELINE_DCT = 0xc0 + public const int FF_PROFILE_MJPEG_HUFFMAN_BASELINE_DCT = 0xc0; + /// FF_PROFILE_MJPEG_HUFFMAN_EXTENDED_SEQUENTIAL_DCT = 0xc1 + public const int FF_PROFILE_MJPEG_HUFFMAN_EXTENDED_SEQUENTIAL_DCT = 0xc1; + /// FF_PROFILE_MJPEG_HUFFMAN_LOSSLESS = 0xc3 + public const int FF_PROFILE_MJPEG_HUFFMAN_LOSSLESS = 0xc3; + /// FF_PROFILE_MJPEG_HUFFMAN_PROGRESSIVE_DCT = 0xc2 + public const int FF_PROFILE_MJPEG_HUFFMAN_PROGRESSIVE_DCT = 0xc2; + /// FF_PROFILE_MJPEG_JPEG_LS = 0xf7 + public const int FF_PROFILE_MJPEG_JPEG_LS = 0xf7; + /// FF_PROFILE_MPEG2_422 = 0x0 + public const int FF_PROFILE_MPEG2_422 = 0x0; + /// FF_PROFILE_MPEG2_AAC_HE = 0x83 + public const int FF_PROFILE_MPEG2_AAC_HE = 0x83; + /// FF_PROFILE_MPEG2_AAC_LOW = 0x80 + public const int FF_PROFILE_MPEG2_AAC_LOW = 0x80; + /// FF_PROFILE_MPEG2_HIGH = 0x1 + public const int FF_PROFILE_MPEG2_HIGH = 0x1; + /// FF_PROFILE_MPEG2_MAIN = 0x4 + public const int FF_PROFILE_MPEG2_MAIN = 0x4; + /// FF_PROFILE_MPEG2_SIMPLE = 0x5 + public const int FF_PROFILE_MPEG2_SIMPLE = 0x5; + /// FF_PROFILE_MPEG2_SNR_SCALABLE = 0x3 + public const int FF_PROFILE_MPEG2_SNR_SCALABLE = 0x3; + /// FF_PROFILE_MPEG2_SS = 0x2 + public const int FF_PROFILE_MPEG2_SS = 0x2; + /// FF_PROFILE_MPEG4_ADVANCED_CODING = 0xb + public const int FF_PROFILE_MPEG4_ADVANCED_CODING = 0xb; + /// FF_PROFILE_MPEG4_ADVANCED_CORE = 0xc + public const int FF_PROFILE_MPEG4_ADVANCED_CORE = 0xc; + /// FF_PROFILE_MPEG4_ADVANCED_REAL_TIME = 0x9 + public const int FF_PROFILE_MPEG4_ADVANCED_REAL_TIME = 0x9; + /// FF_PROFILE_MPEG4_ADVANCED_SCALABLE_TEXTURE = 0xd + public const int FF_PROFILE_MPEG4_ADVANCED_SCALABLE_TEXTURE = 0xd; + /// FF_PROFILE_MPEG4_ADVANCED_SIMPLE = 0xf + public const int FF_PROFILE_MPEG4_ADVANCED_SIMPLE = 0xf; + /// FF_PROFILE_MPEG4_BASIC_ANIMATED_TEXTURE = 0x7 + public const int FF_PROFILE_MPEG4_BASIC_ANIMATED_TEXTURE = 0x7; + /// FF_PROFILE_MPEG4_CORE = 0x2 + public const int FF_PROFILE_MPEG4_CORE = 0x2; + /// FF_PROFILE_MPEG4_CORE_SCALABLE = 0xa + public const int FF_PROFILE_MPEG4_CORE_SCALABLE = 0xa; + /// FF_PROFILE_MPEG4_HYBRID = 0x8 + public const int FF_PROFILE_MPEG4_HYBRID = 0x8; + /// FF_PROFILE_MPEG4_MAIN = 0x3 + public const int FF_PROFILE_MPEG4_MAIN = 0x3; + /// FF_PROFILE_MPEG4_N_BIT = 0x4 + public const int FF_PROFILE_MPEG4_N_BIT = 0x4; + /// FF_PROFILE_MPEG4_SCALABLE_TEXTURE = 0x5 + public const int FF_PROFILE_MPEG4_SCALABLE_TEXTURE = 0x5; + /// FF_PROFILE_MPEG4_SIMPLE = 0x0 + public const int FF_PROFILE_MPEG4_SIMPLE = 0x0; + /// FF_PROFILE_MPEG4_SIMPLE_FACE_ANIMATION = 0x6 + public const int FF_PROFILE_MPEG4_SIMPLE_FACE_ANIMATION = 0x6; + /// FF_PROFILE_MPEG4_SIMPLE_SCALABLE = 0x1 + public const int FF_PROFILE_MPEG4_SIMPLE_SCALABLE = 0x1; + /// FF_PROFILE_MPEG4_SIMPLE_STUDIO = 0xe + public const int FF_PROFILE_MPEG4_SIMPLE_STUDIO = 0xe; + /// FF_PROFILE_PRORES_4444 = 0x4 + public const int FF_PROFILE_PRORES_4444 = 0x4; + /// FF_PROFILE_PRORES_HQ = 0x3 + public const int FF_PROFILE_PRORES_HQ = 0x3; + /// FF_PROFILE_PRORES_LT = 0x1 + public const int FF_PROFILE_PRORES_LT = 0x1; + /// FF_PROFILE_PRORES_PROXY = 0x0 + public const int FF_PROFILE_PRORES_PROXY = 0x0; + /// FF_PROFILE_PRORES_STANDARD = 0x2 + public const int FF_PROFILE_PRORES_STANDARD = 0x2; + /// FF_PROFILE_PRORES_XQ = 0x5 + public const int FF_PROFILE_PRORES_XQ = 0x5; + /// FF_PROFILE_RESERVED = -0x64 + public const int FF_PROFILE_RESERVED = -0x64; + /// FF_PROFILE_SBC_MSBC = 0x1 + public const int FF_PROFILE_SBC_MSBC = 0x1; + /// FF_PROFILE_UNKNOWN = -0x63 + public const int FF_PROFILE_UNKNOWN = -0x63; + /// FF_PROFILE_VC1_ADVANCED = 0x3 + public const int FF_PROFILE_VC1_ADVANCED = 0x3; + /// FF_PROFILE_VC1_COMPLEX = 0x2 + public const int FF_PROFILE_VC1_COMPLEX = 0x2; + /// FF_PROFILE_VC1_MAIN = 0x1 + public const int FF_PROFILE_VC1_MAIN = 0x1; + /// FF_PROFILE_VC1_SIMPLE = 0x0 + public const int FF_PROFILE_VC1_SIMPLE = 0x0; + /// FF_PROFILE_VP9_0 = 0x0 + public const int FF_PROFILE_VP9_0 = 0x0; + /// FF_PROFILE_VP9_1 = 0x1 + public const int FF_PROFILE_VP9_1 = 0x1; + /// FF_PROFILE_VP9_2 = 0x2 + public const int FF_PROFILE_VP9_2 = 0x2; + /// FF_PROFILE_VP9_3 = 0x3 + public const int FF_PROFILE_VP9_3 = 0x3; + /// FF_PROFILE_VVC_MAIN_10 = 0x1 + public const int FF_PROFILE_VVC_MAIN_10 = 0x1; + /// FF_PROFILE_VVC_MAIN_10_444 = 0x21 + public const int FF_PROFILE_VVC_MAIN_10_444 = 0x21; + /// FF_QP2LAMBDA = 118 + public const int FF_QP2LAMBDA = 0x76; + /// FF_QUALITY_SCALE = FF_LAMBDA_SCALE + public const int FF_QUALITY_SCALE = FF_LAMBDA_SCALE; + /// FF_SUB_CHARENC_MODE_AUTOMATIC = 0x0 + public const int FF_SUB_CHARENC_MODE_AUTOMATIC = 0x0; + /// FF_SUB_CHARENC_MODE_DO_NOTHING = -0x1 + public const int FF_SUB_CHARENC_MODE_DO_NOTHING = -0x1; + /// FF_SUB_CHARENC_MODE_IGNORE = 0x2 + public const int FF_SUB_CHARENC_MODE_IGNORE = 0x2; + /// FF_SUB_CHARENC_MODE_PRE_DECODER = 0x1 + public const int FF_SUB_CHARENC_MODE_PRE_DECODER = 0x1; + /// FF_SUB_TEXT_FMT_ASS = 0x0 + public const int FF_SUB_TEXT_FMT_ASS = 0x0; + /// FF_THREAD_FRAME = 0x1 + public const int FF_THREAD_FRAME = 0x1; + /// FF_THREAD_SLICE = 0x2 + public const int FF_THREAD_SLICE = 0x2; + // public static FFABS = (a) ((a) >= 0 ? (a) : (-(a))); + // public static FFABS64U = (a) ((a) <= 0 ? -(uint64_t)(a) : (uint64_t)(a)); + // public static FFABSU = (a) ((a) <= 0 ? -(unsigned)(a) : (unsigned)(a)); + // public static FFALIGN = x; + // public static FFDIFFSIGN = x; + // public static FFERRTAG = a; + // public static FFMAX = (a,b) ((a) > (b) ? (a) : (b)); + // public static FFMAX3 = a; + // public static FFMIN = (a,b) ((a) > (b) ? (b) : (a)); + // public static FFMIN3 = a; + // public static FFNABS = (a) ((a) <= 0 ? (a) : (-(a))); + // public static FFSIGN = (a) ((a) > 0 ? 1 : -1); + // public static FFSWAP = (type,a,b) do{type SWAP_tmp= b; b= a; a= SWAP_tmp;}while(0); + // public static FFUDIV = (a,b) (((a)>0 ?(a):(a)-(b)+1) / (b)); + // public static FFUMOD = a; + // public static GET_UTF16 = (val, GET_16BIT, ERROR)val = (GET_16BIT);{unsigned int hi = val - 0xD800;if (hi < 0x800) {val = (GET_16BIT) - 0xDC00;if (val > 0x3FFU || hi > 0x3FFU){ERROR}val += (hi<<10) + 0x10000;}}; + // public static GET_UTF8 = (val, GET_BYTE, ERROR)val= (GET_BYTE);{uint32_t top = (val & 128) >> 1;if ((val & 0xc0) == 0x80 || val >= 0xFE){ERROR}while (val & top) {unsigned int tmp = (GET_BYTE) - 128;if(tmp>>6){ERROR}val= (val<<6) + tmp;top <<= 5;}val &= (top << 1) - 1;}; + /// LIBAVCODEC_BUILD = LIBAVCODEC_VERSION_INT + public static readonly int LIBAVCODEC_BUILD = LIBAVCODEC_VERSION_INT; + /// LIBAVCODEC_IDENT = "Lavc" + public const string LIBAVCODEC_IDENT = "Lavc"; + /// LIBAVCODEC_VERSION = AV_VERSION(LIBAVCODEC_VERSION_MAJOR, LIBAVCODEC_VERSION_MINOR, LIBAVCODEC_VERSION_MICRO) + public static readonly string LIBAVCODEC_VERSION = AV_VERSION(LIBAVCODEC_VERSION_MAJOR, LIBAVCODEC_VERSION_MINOR, LIBAVCODEC_VERSION_MICRO); + /// LIBAVCODEC_VERSION_INT = AV_VERSION_INT(LIBAVCODEC_VERSION_MAJOR, LIBAVCODEC_VERSION_MINOR, LIBAVCODEC_VERSION_MICRO) + public static readonly int LIBAVCODEC_VERSION_INT = AV_VERSION_INT(LIBAVCODEC_VERSION_MAJOR, LIBAVCODEC_VERSION_MINOR, LIBAVCODEC_VERSION_MICRO); + /// LIBAVCODEC_VERSION_MAJOR = 59 + public const int LIBAVCODEC_VERSION_MAJOR = 0x3b; + /// LIBAVCODEC_VERSION_MICRO = 0x64 + public const int LIBAVCODEC_VERSION_MICRO = 0x64; + /// LIBAVCODEC_VERSION_MINOR = 0x25 + public const int LIBAVCODEC_VERSION_MINOR = 0x25; + /// LIBAVDEVICE_BUILD = LIBAVDEVICE_VERSION_INT + public static readonly int LIBAVDEVICE_BUILD = LIBAVDEVICE_VERSION_INT; + /// LIBAVDEVICE_IDENT = "Lavd" AV_STRINGIFY(LIBAVDEVICE_VERSION) + public const string LIBAVDEVICE_IDENT = "Lavd"; + /// LIBAVDEVICE_VERSION = AV_VERSION(LIBAVDEVICE_VERSION_MAJOR, LIBAVDEVICE_VERSION_MINOR, LIBAVDEVICE_VERSION_MICRO) + public static readonly string LIBAVDEVICE_VERSION = AV_VERSION(LIBAVDEVICE_VERSION_MAJOR, LIBAVDEVICE_VERSION_MINOR, LIBAVDEVICE_VERSION_MICRO); + /// LIBAVDEVICE_VERSION_INT = AV_VERSION_INT(LIBAVDEVICE_VERSION_MAJOR, LIBAVDEVICE_VERSION_MINOR, LIBAVDEVICE_VERSION_MICRO) + public static readonly int LIBAVDEVICE_VERSION_INT = AV_VERSION_INT(LIBAVDEVICE_VERSION_MAJOR, LIBAVDEVICE_VERSION_MINOR, LIBAVDEVICE_VERSION_MICRO); + /// LIBAVDEVICE_VERSION_MAJOR = 59 + public const int LIBAVDEVICE_VERSION_MAJOR = 0x3b; + /// LIBAVDEVICE_VERSION_MICRO = 100 + public const int LIBAVDEVICE_VERSION_MICRO = 0x64; + /// LIBAVDEVICE_VERSION_MINOR = 7 + public const int LIBAVDEVICE_VERSION_MINOR = 0x7; + /// LIBAVFILTER_BUILD = LIBAVFILTER_VERSION_INT + public static readonly int LIBAVFILTER_BUILD = LIBAVFILTER_VERSION_INT; + /// LIBAVFILTER_IDENT = "Lavfi" + public const string LIBAVFILTER_IDENT = "Lavfi"; + /// LIBAVFILTER_VERSION = AV_VERSION(LIBAVFILTER_VERSION_MAJOR, LIBAVFILTER_VERSION_MINOR, LIBAVFILTER_VERSION_MICRO) + public static readonly string LIBAVFILTER_VERSION = AV_VERSION(LIBAVFILTER_VERSION_MAJOR, LIBAVFILTER_VERSION_MINOR, LIBAVFILTER_VERSION_MICRO); + /// LIBAVFILTER_VERSION_INT = AV_VERSION_INT(LIBAVFILTER_VERSION_MAJOR, LIBAVFILTER_VERSION_MINOR, LIBAVFILTER_VERSION_MICRO) + public static readonly int LIBAVFILTER_VERSION_INT = AV_VERSION_INT(LIBAVFILTER_VERSION_MAJOR, LIBAVFILTER_VERSION_MINOR, LIBAVFILTER_VERSION_MICRO); + /// LIBAVFILTER_VERSION_MAJOR = 0x8 + public const int LIBAVFILTER_VERSION_MAJOR = 0x8; + /// LIBAVFILTER_VERSION_MICRO = 0x64 + public const int LIBAVFILTER_VERSION_MICRO = 0x64; + /// LIBAVFILTER_VERSION_MINOR = 0x2c + public const int LIBAVFILTER_VERSION_MINOR = 0x2c; + /// LIBAVFORMAT_BUILD = LIBAVFORMAT_VERSION_INT + public static readonly int LIBAVFORMAT_BUILD = LIBAVFORMAT_VERSION_INT; + /// LIBAVFORMAT_IDENT = "Lavf" AV_STRINGIFY(LIBAVFORMAT_VERSION) + public const string LIBAVFORMAT_IDENT = "Lavf"; + /// LIBAVFORMAT_VERSION = AV_VERSION(LIBAVFORMAT_VERSION_MAJOR, LIBAVFORMAT_VERSION_MINOR, LIBAVFORMAT_VERSION_MICRO) + public static readonly string LIBAVFORMAT_VERSION = AV_VERSION(LIBAVFORMAT_VERSION_MAJOR, LIBAVFORMAT_VERSION_MINOR, LIBAVFORMAT_VERSION_MICRO); + /// LIBAVFORMAT_VERSION_INT = AV_VERSION_INT(LIBAVFORMAT_VERSION_MAJOR, LIBAVFORMAT_VERSION_MINOR, LIBAVFORMAT_VERSION_MICRO) + public static readonly int LIBAVFORMAT_VERSION_INT = AV_VERSION_INT(LIBAVFORMAT_VERSION_MAJOR, LIBAVFORMAT_VERSION_MINOR, LIBAVFORMAT_VERSION_MICRO); + /// LIBAVFORMAT_VERSION_MAJOR = 59 + public const int LIBAVFORMAT_VERSION_MAJOR = 0x3b; + /// LIBAVFORMAT_VERSION_MICRO = 100 + public const int LIBAVFORMAT_VERSION_MICRO = 0x64; + /// LIBAVFORMAT_VERSION_MINOR = 27 + public const int LIBAVFORMAT_VERSION_MINOR = 0x1b; + /// LIBAVUTIL_BUILD = LIBAVUTIL_VERSION_INT + public static readonly int LIBAVUTIL_BUILD = LIBAVUTIL_VERSION_INT; + /// LIBAVUTIL_IDENT = "Lavu" AV_STRINGIFY(LIBAVUTIL_VERSION) + public const string LIBAVUTIL_IDENT = "Lavu"; + /// LIBAVUTIL_VERSION = AV_VERSION(LIBAVUTIL_VERSION_MAJOR, LIBAVUTIL_VERSION_MINOR, LIBAVUTIL_VERSION_MICRO) + public static readonly string LIBAVUTIL_VERSION = AV_VERSION(LIBAVUTIL_VERSION_MAJOR, LIBAVUTIL_VERSION_MINOR, LIBAVUTIL_VERSION_MICRO); + /// LIBAVUTIL_VERSION_INT = AV_VERSION_INT(LIBAVUTIL_VERSION_MAJOR, LIBAVUTIL_VERSION_MINOR, LIBAVUTIL_VERSION_MICRO) + public static readonly int LIBAVUTIL_VERSION_INT = AV_VERSION_INT(LIBAVUTIL_VERSION_MAJOR, LIBAVUTIL_VERSION_MINOR, LIBAVUTIL_VERSION_MICRO); + /// LIBAVUTIL_VERSION_MAJOR = 57 + public const int LIBAVUTIL_VERSION_MAJOR = 0x39; + /// LIBAVUTIL_VERSION_MICRO = 100 + public const int LIBAVUTIL_VERSION_MICRO = 0x64; + /// LIBAVUTIL_VERSION_MINOR = 28 + public const int LIBAVUTIL_VERSION_MINOR = 0x1c; + /// LIBPOSTPROC_BUILD = LIBPOSTPROC_VERSION_INT + public static readonly int LIBPOSTPROC_BUILD = LIBPOSTPROC_VERSION_INT; + /// LIBPOSTPROC_IDENT = "postproc" + public const string LIBPOSTPROC_IDENT = "postproc"; + /// LIBPOSTPROC_VERSION = AV_VERSION(LIBPOSTPROC_VERSION_MAJOR, LIBPOSTPROC_VERSION_MINOR, LIBPOSTPROC_VERSION_MICRO) + public static readonly string LIBPOSTPROC_VERSION = AV_VERSION(LIBPOSTPROC_VERSION_MAJOR, LIBPOSTPROC_VERSION_MINOR, LIBPOSTPROC_VERSION_MICRO); + /// LIBPOSTPROC_VERSION_INT = AV_VERSION_INT(LIBPOSTPROC_VERSION_MAJOR, LIBPOSTPROC_VERSION_MINOR, LIBPOSTPROC_VERSION_MICRO) + public static readonly int LIBPOSTPROC_VERSION_INT = AV_VERSION_INT(LIBPOSTPROC_VERSION_MAJOR, LIBPOSTPROC_VERSION_MINOR, LIBPOSTPROC_VERSION_MICRO); + /// LIBPOSTPROC_VERSION_MAJOR = 0x38 + public const int LIBPOSTPROC_VERSION_MAJOR = 0x38; + /// LIBPOSTPROC_VERSION_MICRO = 0x64 + public const int LIBPOSTPROC_VERSION_MICRO = 0x64; + /// LIBPOSTPROC_VERSION_MINOR = 0x6 + public const int LIBPOSTPROC_VERSION_MINOR = 0x6; + /// LIBSWRESAMPLE_BUILD = LIBSWRESAMPLE_VERSION_INT + public static readonly int LIBSWRESAMPLE_BUILD = LIBSWRESAMPLE_VERSION_INT; + /// LIBSWRESAMPLE_IDENT = "SwR" + public const string LIBSWRESAMPLE_IDENT = "SwR"; + /// LIBSWRESAMPLE_VERSION = AV_VERSION(LIBSWRESAMPLE_VERSION_MAJOR, LIBSWRESAMPLE_VERSION_MINOR, LIBSWRESAMPLE_VERSION_MICRO) + public static readonly string LIBSWRESAMPLE_VERSION = AV_VERSION(LIBSWRESAMPLE_VERSION_MAJOR, LIBSWRESAMPLE_VERSION_MINOR, LIBSWRESAMPLE_VERSION_MICRO); + /// LIBSWRESAMPLE_VERSION_INT = AV_VERSION_INT(LIBSWRESAMPLE_VERSION_MAJOR, LIBSWRESAMPLE_VERSION_MINOR, LIBSWRESAMPLE_VERSION_MICRO) + public static readonly int LIBSWRESAMPLE_VERSION_INT = AV_VERSION_INT(LIBSWRESAMPLE_VERSION_MAJOR, LIBSWRESAMPLE_VERSION_MINOR, LIBSWRESAMPLE_VERSION_MICRO); + /// LIBSWRESAMPLE_VERSION_MAJOR = 0x4 + public const int LIBSWRESAMPLE_VERSION_MAJOR = 0x4; + /// LIBSWRESAMPLE_VERSION_MICRO = 0x64 + public const int LIBSWRESAMPLE_VERSION_MICRO = 0x64; + /// LIBSWRESAMPLE_VERSION_MINOR = 0x7 + public const int LIBSWRESAMPLE_VERSION_MINOR = 0x7; + /// LIBSWSCALE_BUILD = LIBSWSCALE_VERSION_INT + public static readonly int LIBSWSCALE_BUILD = LIBSWSCALE_VERSION_INT; + /// LIBSWSCALE_IDENT = "SwS" + public const string LIBSWSCALE_IDENT = "SwS"; + /// LIBSWSCALE_VERSION = AV_VERSION(LIBSWSCALE_VERSION_MAJOR, LIBSWSCALE_VERSION_MINOR, LIBSWSCALE_VERSION_MICRO) + public static readonly string LIBSWSCALE_VERSION = AV_VERSION(LIBSWSCALE_VERSION_MAJOR, LIBSWSCALE_VERSION_MINOR, LIBSWSCALE_VERSION_MICRO); + /// LIBSWSCALE_VERSION_INT = AV_VERSION_INT(LIBSWSCALE_VERSION_MAJOR, LIBSWSCALE_VERSION_MINOR, LIBSWSCALE_VERSION_MICRO) + public static readonly int LIBSWSCALE_VERSION_INT = AV_VERSION_INT(LIBSWSCALE_VERSION_MAJOR, LIBSWSCALE_VERSION_MINOR, LIBSWSCALE_VERSION_MICRO); + /// LIBSWSCALE_VERSION_MAJOR = 0x6 + public const int LIBSWSCALE_VERSION_MAJOR = 0x6; + /// LIBSWSCALE_VERSION_MICRO = 0x64 + public const int LIBSWSCALE_VERSION_MICRO = 0x64; + /// LIBSWSCALE_VERSION_MINOR = 0x7 + public const int LIBSWSCALE_VERSION_MINOR = 0x7; + /// M_E = 2.7182818284590452354 + public const double M_E = 2.718281828459045D; + /// M_LN10 = 2.30258509299404568402 + public const double M_LN10 = 2.302585092994046D; + /// M_LN2 = 0.69314718055994530942 + public const double M_LN2 = 0.6931471805599453D; + /// M_LOG2_10 = 3.32192809488736234787 + public const double M_LOG2_10 = 3.321928094887362D; + /// M_PHI = 1.61803398874989484820 + public const double M_PHI = 1.618033988749895D; + /// M_PI = 3.14159265358979323846 + public const double M_PI = 3.141592653589793D; + /// M_PI_2 = 1.57079632679489661923 + public const double M_PI_2 = 1.5707963267948966D; + /// M_SQRT1_2 = 0.70710678118654752440 + public const double M_SQRT1_2 = 0.7071067811865476D; + /// M_SQRT2 = 1.41421356237309504880 + public const double M_SQRT2 = 1.4142135623730951D; + // public static MKBETAG = a; + // public static MKTAG = a; + /// PARSER_FLAG_COMPLETE_FRAMES = 0x1 + public const int PARSER_FLAG_COMPLETE_FRAMES = 0x1; + /// PARSER_FLAG_FETCHED_OFFSET = 0x4 + public const int PARSER_FLAG_FETCHED_OFFSET = 0x4; + /// PARSER_FLAG_ONCE = 0x2 + public const int PARSER_FLAG_ONCE = 0x2; + /// PARSER_FLAG_USE_CODEC_TS = 0x1000 + public const int PARSER_FLAG_USE_CODEC_TS = 0x1000; + /// PP_CPU_CAPS_3DNOW = 0x40000000 + public const int PP_CPU_CAPS_3DNOW = 0x40000000; + /// PP_CPU_CAPS_ALTIVEC = 0x10000000 + public const int PP_CPU_CAPS_ALTIVEC = 0x10000000; + /// PP_CPU_CAPS_AUTO = 0x80000 + public const int PP_CPU_CAPS_AUTO = 0x80000; + /// PP_CPU_CAPS_MMX = 0x80000000U + public const uint PP_CPU_CAPS_MMX = 0x80000000U; + /// PP_CPU_CAPS_MMX2 = 0x20000000 + public const int PP_CPU_CAPS_MMX2 = 0x20000000; + /// PP_FORMAT = 0x8 + public const int PP_FORMAT = 0x8; + /// PP_FORMAT_411 = 0x2 | PP_FORMAT + public const int PP_FORMAT_411 = 0x2 | PP_FORMAT; + /// PP_FORMAT_420 = 0x11 | PP_FORMAT + public const int PP_FORMAT_420 = 0x11 | PP_FORMAT; + /// PP_FORMAT_422 = 0x1 | PP_FORMAT + public const int PP_FORMAT_422 = 0x1 | PP_FORMAT; + /// PP_FORMAT_440 = 0x10 | PP_FORMAT + public const int PP_FORMAT_440 = 0x10 | PP_FORMAT; + /// PP_FORMAT_444 = 0x0 | PP_FORMAT + public const int PP_FORMAT_444 = 0x0 | PP_FORMAT; + /// PP_PICT_TYPE_QP2 = 0x10 + public const int PP_PICT_TYPE_QP2 = 0x10; + /// PP_QUALITY_MAX = 0x6 + public const int PP_QUALITY_MAX = 0x6; + // public static PUT_UTF16 = (val, tmp, PUT_16BIT){uint32_t in = val;if (in < 0x10000) {tmp = in;PUT_16BIT} else {tmp = 0xD800 | ((in - 0x10000) >> 10);PUT_16BITtmp = 0xDC00 | ((in - 0x10000) & 0x3FF);PUT_16BIT}}; + // public static PUT_UTF8 = (val, tmp, PUT_BYTE){int bytes, shift;uint32_t in = val;if (in < 0x80) {tmp = in;PUT_BYTE} else {bytes = (av_log2(in) + 4) / 5;shift = (bytes - 1) * 6;tmp = (256 - (256 >> bytes)) | (in >> shift);PUT_BYTEwhile (shift >= 6) {shift -= 6;tmp = 0x80 | ((in >> shift) & 0x3f);PUT_BYTE}}}; + // public static ROUNDED_DIV = (a,b) (((a)>=0 ? (a) + ((b)>>1) : (a) - ((b)>>1))/(b)); + // public static RSHIFT = (a,b) ((a) > 0 ? ((a) + ((1<<(b))>>1))>>(b) : ((a) + ((1<<(b))>>1)-1)>>(b)); + /// SLICE_FLAG_ALLOW_FIELD = 0x2 + public const int SLICE_FLAG_ALLOW_FIELD = 0x2; + /// SLICE_FLAG_ALLOW_PLANE = 0x4 + public const int SLICE_FLAG_ALLOW_PLANE = 0x4; + /// SLICE_FLAG_CODED_ORDER = 0x1 + public const int SLICE_FLAG_CODED_ORDER = 0x1; + /// SWR_FLAG_RESAMPLE = 0x1 + public const int SWR_FLAG_RESAMPLE = 0x1; + /// SWS_ACCURATE_RND = 0x40000 + public const int SWS_ACCURATE_RND = 0x40000; + /// SWS_AREA = 0x20 + public const int SWS_AREA = 0x20; + /// SWS_BICUBIC = 0x4 + public const int SWS_BICUBIC = 0x4; + /// SWS_BICUBLIN = 0x40 + public const int SWS_BICUBLIN = 0x40; + /// SWS_BILINEAR = 0x2 + public const int SWS_BILINEAR = 0x2; + /// SWS_BITEXACT = 0x80000 + public const int SWS_BITEXACT = 0x80000; + /// SWS_CS_BT2020 = 0x9 + public const int SWS_CS_BT2020 = 0x9; + /// SWS_CS_DEFAULT = 0x5 + public const int SWS_CS_DEFAULT = 0x5; + /// SWS_CS_FCC = 0x4 + public const int SWS_CS_FCC = 0x4; + /// SWS_CS_ITU601 = 0x5 + public const int SWS_CS_ITU601 = 0x5; + /// SWS_CS_ITU624 = 0x5 + public const int SWS_CS_ITU624 = 0x5; + /// SWS_CS_ITU709 = 0x1 + public const int SWS_CS_ITU709 = 0x1; + /// SWS_CS_SMPTE170M = 0x5 + public const int SWS_CS_SMPTE170M = 0x5; + /// SWS_CS_SMPTE240M = 0x7 + public const int SWS_CS_SMPTE240M = 0x7; + /// SWS_DIRECT_BGR = 0x8000 + public const int SWS_DIRECT_BGR = 0x8000; + /// SWS_ERROR_DIFFUSION = 0x800000 + public const int SWS_ERROR_DIFFUSION = 0x800000; + /// SWS_FAST_BILINEAR = 0x1 + public const int SWS_FAST_BILINEAR = 0x1; + /// SWS_FULL_CHR_H_INP = 0x4000 + public const int SWS_FULL_CHR_H_INP = 0x4000; + /// SWS_FULL_CHR_H_INT = 0x2000 + public const int SWS_FULL_CHR_H_INT = 0x2000; + /// SWS_GAUSS = 0x80 + public const int SWS_GAUSS = 0x80; + /// SWS_LANCZOS = 0x200 + public const int SWS_LANCZOS = 0x200; + /// SWS_MAX_REDUCE_CUTOFF = 0.002D + public const double SWS_MAX_REDUCE_CUTOFF = 0.002D; + /// SWS_PARAM_DEFAULT = 0x1e240 + public const int SWS_PARAM_DEFAULT = 0x1e240; + /// SWS_POINT = 0x10 + public const int SWS_POINT = 0x10; + /// SWS_PRINT_INFO = 0x1000 + public const int SWS_PRINT_INFO = 0x1000; + /// SWS_SINC = 0x100 + public const int SWS_SINC = 0x100; + /// SWS_SPLINE = 0x400 + public const int SWS_SPLINE = 0x400; + /// SWS_SRC_V_CHR_DROP_MASK = 0x30000 + public const int SWS_SRC_V_CHR_DROP_MASK = 0x30000; + /// SWS_SRC_V_CHR_DROP_SHIFT = 0x10 + public const int SWS_SRC_V_CHR_DROP_SHIFT = 0x10; + /// SWS_X = 0x8 + public const int SWS_X = 0x8; +} diff --git a/FFmpeg.AutoGen/generated/vectors.g.cs b/FFmpeg.AutoGen/generated/vectors.g.cs new file mode 100644 index 00000000..9c307833 --- /dev/null +++ b/FFmpeg.AutoGen/generated/vectors.g.cs @@ -0,0 +1,3725 @@ +using System; +using System.Runtime.InteropServices; + +namespace FFmpeg.AutoGen; + +public static unsafe partial class vectors +{ + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVABufferSinkParams* av_abuffersink_params_alloc_delegate(); + public static av_abuffersink_params_alloc_delegate av_abuffersink_params_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_add_index_entry_delegate(AVStream* @st, long @pos, long @timestamp, int @size, int @distance, int @flags); + public static av_add_index_entry_delegate av_add_index_entry; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_add_q_delegate(AVRational @b, AVRational @c); + public static av_add_q_delegate av_add_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_add_stable_delegate(AVRational @ts_tb, long @ts, AVRational @inc_tb, long @inc); + public static av_add_stable_delegate av_add_stable; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_append_packet_delegate(AVIOContext* @s, AVPacket* @pkt, int @size); + public static av_append_packet_delegate av_append_packet; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVAudioFifo* av_audio_fifo_alloc_delegate(AVSampleFormat @sample_fmt, int @channels, int @nb_samples); + public static av_audio_fifo_alloc_delegate av_audio_fifo_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_audio_fifo_drain_delegate(AVAudioFifo* @af, int @nb_samples); + public static av_audio_fifo_drain_delegate av_audio_fifo_drain; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_audio_fifo_free_delegate(AVAudioFifo* @af); + public static av_audio_fifo_free_delegate av_audio_fifo_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_audio_fifo_peek_delegate(AVAudioFifo* @af, void** @data, int @nb_samples); + public static av_audio_fifo_peek_delegate av_audio_fifo_peek; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_audio_fifo_peek_at_delegate(AVAudioFifo* @af, void** @data, int @nb_samples, int @offset); + public static av_audio_fifo_peek_at_delegate av_audio_fifo_peek_at; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_audio_fifo_read_delegate(AVAudioFifo* @af, void** @data, int @nb_samples); + public static av_audio_fifo_read_delegate av_audio_fifo_read; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_audio_fifo_realloc_delegate(AVAudioFifo* @af, int @nb_samples); + public static av_audio_fifo_realloc_delegate av_audio_fifo_realloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_audio_fifo_reset_delegate(AVAudioFifo* @af); + public static av_audio_fifo_reset_delegate av_audio_fifo_reset; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_audio_fifo_size_delegate(AVAudioFifo* @af); + public static av_audio_fifo_size_delegate av_audio_fifo_size; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_audio_fifo_space_delegate(AVAudioFifo* @af); + public static av_audio_fifo_space_delegate av_audio_fifo_space; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_audio_fifo_write_delegate(AVAudioFifo* @af, void** @data, int @nb_samples); + public static av_audio_fifo_write_delegate av_audio_fifo_write; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_bprint_channel_layout_delegate(AVBPrint* @bp, int @nb_channels, ulong @channel_layout); + public static av_bprint_channel_layout_delegate av_bprint_channel_layout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_bsf_alloc_delegate(AVBitStreamFilter* @filter, AVBSFContext** @ctx); + public static av_bsf_alloc_delegate av_bsf_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_bsf_flush_delegate(AVBSFContext* @ctx); + public static av_bsf_flush_delegate av_bsf_flush; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_bsf_free_delegate(AVBSFContext** @ctx); + public static av_bsf_free_delegate av_bsf_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBitStreamFilter* av_bsf_get_by_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_bsf_get_by_name_delegate av_bsf_get_by_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* av_bsf_get_class_delegate(); + public static av_bsf_get_class_delegate av_bsf_get_class; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_bsf_get_null_filter_delegate(AVBSFContext** @bsf); + public static av_bsf_get_null_filter_delegate av_bsf_get_null_filter; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_bsf_init_delegate(AVBSFContext* @ctx); + public static av_bsf_init_delegate av_bsf_init; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBitStreamFilter* av_bsf_iterate_delegate(void** @opaque); + public static av_bsf_iterate_delegate av_bsf_iterate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBSFList* av_bsf_list_alloc_delegate(); + public static av_bsf_list_alloc_delegate av_bsf_list_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_bsf_list_append_delegate(AVBSFList* @lst, AVBSFContext* @bsf); + public static av_bsf_list_append_delegate av_bsf_list_append; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_bsf_list_append2_delegate(AVBSFList* @lst, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @bsf_name, AVDictionary** @options); + public static av_bsf_list_append2_delegate av_bsf_list_append2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_bsf_list_finalize_delegate(AVBSFList** @lst, AVBSFContext** @bsf); + public static av_bsf_list_finalize_delegate av_bsf_list_finalize; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_bsf_list_free_delegate(AVBSFList** @lst); + public static av_bsf_list_free_delegate av_bsf_list_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_bsf_list_parse_str_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str, AVBSFContext** @bsf); + public static av_bsf_list_parse_str_delegate av_bsf_list_parse_str; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_bsf_receive_packet_delegate(AVBSFContext* @ctx, AVPacket* @pkt); + public static av_bsf_receive_packet_delegate av_bsf_receive_packet; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_bsf_send_packet_delegate(AVBSFContext* @ctx, AVPacket* @pkt); + public static av_bsf_send_packet_delegate av_bsf_send_packet; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferRef* av_buffer_alloc_delegate(ulong @size); + public static av_buffer_alloc_delegate av_buffer_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferRef* av_buffer_allocz_delegate(ulong @size); + public static av_buffer_allocz_delegate av_buffer_allocz; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferRef* av_buffer_create_delegate(byte* @data, ulong @size, av_buffer_create_free_func @free, void* @opaque, int @flags); + public static av_buffer_create_delegate av_buffer_create; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_buffer_default_free_delegate(void* @opaque, byte* @data); + public static av_buffer_default_free_delegate av_buffer_default_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_buffer_get_opaque_delegate(AVBufferRef* @buf); + public static av_buffer_get_opaque_delegate av_buffer_get_opaque; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffer_get_ref_count_delegate(AVBufferRef* @buf); + public static av_buffer_get_ref_count_delegate av_buffer_get_ref_count; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffer_is_writable_delegate(AVBufferRef* @buf); + public static av_buffer_is_writable_delegate av_buffer_is_writable; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffer_make_writable_delegate(AVBufferRef** @buf); + public static av_buffer_make_writable_delegate av_buffer_make_writable; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_buffer_pool_buffer_get_opaque_delegate(AVBufferRef* @ref); + public static av_buffer_pool_buffer_get_opaque_delegate av_buffer_pool_buffer_get_opaque; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferRef* av_buffer_pool_get_delegate(AVBufferPool* @pool); + public static av_buffer_pool_get_delegate av_buffer_pool_get; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferPool* av_buffer_pool_init_delegate(ulong @size, av_buffer_pool_init_alloc_func @alloc); + public static av_buffer_pool_init_delegate av_buffer_pool_init; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferPool* av_buffer_pool_init2_delegate(ulong @size, void* @opaque, av_buffer_pool_init2_alloc_func @alloc, av_buffer_pool_init2_pool_free_func @pool_free); + public static av_buffer_pool_init2_delegate av_buffer_pool_init2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_buffer_pool_uninit_delegate(AVBufferPool** @pool); + public static av_buffer_pool_uninit_delegate av_buffer_pool_uninit; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffer_realloc_delegate(AVBufferRef** @buf, ulong @size); + public static av_buffer_realloc_delegate av_buffer_realloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferRef* av_buffer_ref_delegate(AVBufferRef* @buf); + public static av_buffer_ref_delegate av_buffer_ref; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffer_replace_delegate(AVBufferRef** @dst, AVBufferRef* @src); + public static av_buffer_replace_delegate av_buffer_replace; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_buffer_unref_delegate(AVBufferRef** @buf); + public static av_buffer_unref_delegate av_buffer_unref; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersink_get_ch_layout_delegate(AVFilterContext* @ctx, AVChannelLayout* @ch_layout); + public static av_buffersink_get_ch_layout_delegate av_buffersink_get_ch_layout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate ulong av_buffersink_get_channel_layout_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_channel_layout_delegate av_buffersink_get_channel_layout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersink_get_channels_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_channels_delegate av_buffersink_get_channels; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersink_get_format_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_format_delegate av_buffersink_get_format; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersink_get_frame_delegate(AVFilterContext* @ctx, AVFrame* @frame); + public static av_buffersink_get_frame_delegate av_buffersink_get_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersink_get_frame_flags_delegate(AVFilterContext* @ctx, AVFrame* @frame, int @flags); + public static av_buffersink_get_frame_flags_delegate av_buffersink_get_frame_flags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_buffersink_get_frame_rate_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_frame_rate_delegate av_buffersink_get_frame_rate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersink_get_h_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_h_delegate av_buffersink_get_h; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferRef* av_buffersink_get_hw_frames_ctx_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_hw_frames_ctx_delegate av_buffersink_get_hw_frames_ctx; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_buffersink_get_sample_aspect_ratio_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_sample_aspect_ratio_delegate av_buffersink_get_sample_aspect_ratio; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersink_get_sample_rate_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_sample_rate_delegate av_buffersink_get_sample_rate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersink_get_samples_delegate(AVFilterContext* @ctx, AVFrame* @frame, int @nb_samples); + public static av_buffersink_get_samples_delegate av_buffersink_get_samples; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_buffersink_get_time_base_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_time_base_delegate av_buffersink_get_time_base; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVMediaType av_buffersink_get_type_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_type_delegate av_buffersink_get_type; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersink_get_w_delegate(AVFilterContext* @ctx); + public static av_buffersink_get_w_delegate av_buffersink_get_w; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferSinkParams* av_buffersink_params_alloc_delegate(); + public static av_buffersink_params_alloc_delegate av_buffersink_params_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_buffersink_set_frame_size_delegate(AVFilterContext* @ctx, uint @frame_size); + public static av_buffersink_set_frame_size_delegate av_buffersink_set_frame_size; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersrc_add_frame_delegate(AVFilterContext* @ctx, AVFrame* @frame); + public static av_buffersrc_add_frame_delegate av_buffersrc_add_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersrc_add_frame_flags_delegate(AVFilterContext* @buffer_src, AVFrame* @frame, int @flags); + public static av_buffersrc_add_frame_flags_delegate av_buffersrc_add_frame_flags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersrc_close_delegate(AVFilterContext* @ctx, long @pts, uint @flags); + public static av_buffersrc_close_delegate av_buffersrc_close; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint av_buffersrc_get_nb_failed_requests_delegate(AVFilterContext* @buffer_src); + public static av_buffersrc_get_nb_failed_requests_delegate av_buffersrc_get_nb_failed_requests; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferSrcParameters* av_buffersrc_parameters_alloc_delegate(); + public static av_buffersrc_parameters_alloc_delegate av_buffersrc_parameters_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersrc_parameters_set_delegate(AVFilterContext* @ctx, AVBufferSrcParameters* @param); + public static av_buffersrc_parameters_set_delegate av_buffersrc_parameters_set; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_buffersrc_write_frame_delegate(AVFilterContext* @ctx, AVFrame* @frame); + public static av_buffersrc_write_frame_delegate av_buffersrc_write_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_calloc_delegate(ulong @nmemb, ulong @size); + public static av_calloc_delegate av_calloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_description_delegate(byte* @buf, ulong @buf_size, AVChannel @channel); + public static av_channel_description_delegate av_channel_description; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_channel_description_bprint_delegate(AVBPrint* @bp, AVChannel @channel_id); + public static av_channel_description_bprint_delegate av_channel_description_bprint; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVChannel av_channel_from_string_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_channel_from_string_delegate av_channel_from_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVChannel av_channel_layout_channel_from_index_delegate(AVChannelLayout* @channel_layout, uint @idx); + public static av_channel_layout_channel_from_index_delegate av_channel_layout_channel_from_index; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVChannel av_channel_layout_channel_from_string_delegate(AVChannelLayout* @channel_layout, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_channel_layout_channel_from_string_delegate av_channel_layout_channel_from_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_layout_check_delegate(AVChannelLayout* @channel_layout); + public static av_channel_layout_check_delegate av_channel_layout_check; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_layout_compare_delegate(AVChannelLayout* @chl, AVChannelLayout* @chl1); + public static av_channel_layout_compare_delegate av_channel_layout_compare; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_layout_copy_delegate(AVChannelLayout* @dst, AVChannelLayout* @src); + public static av_channel_layout_copy_delegate av_channel_layout_copy; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_channel_layout_default_delegate(AVChannelLayout* @ch_layout, int @nb_channels); + public static av_channel_layout_default_delegate av_channel_layout_default; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_layout_describe_delegate(AVChannelLayout* @channel_layout, byte* @buf, ulong @buf_size); + public static av_channel_layout_describe_delegate av_channel_layout_describe; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_layout_describe_bprint_delegate(AVChannelLayout* @channel_layout, AVBPrint* @bp); + public static av_channel_layout_describe_bprint_delegate av_channel_layout_describe_bprint; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate ulong av_channel_layout_extract_channel_delegate(ulong @channel_layout, int @index); + public static av_channel_layout_extract_channel_delegate av_channel_layout_extract_channel; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_layout_from_mask_delegate(AVChannelLayout* @channel_layout, ulong @mask); + public static av_channel_layout_from_mask_delegate av_channel_layout_from_mask; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_layout_from_string_delegate(AVChannelLayout* @channel_layout, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str); + public static av_channel_layout_from_string_delegate av_channel_layout_from_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_layout_index_from_channel_delegate(AVChannelLayout* @channel_layout, AVChannel @channel); + public static av_channel_layout_index_from_channel_delegate av_channel_layout_index_from_channel; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_layout_index_from_string_delegate(AVChannelLayout* @channel_layout, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_channel_layout_index_from_string_delegate av_channel_layout_index_from_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVChannelLayout* av_channel_layout_standard_delegate(void** @opaque); + public static av_channel_layout_standard_delegate av_channel_layout_standard; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate ulong av_channel_layout_subset_delegate(AVChannelLayout* @channel_layout, ulong @mask); + public static av_channel_layout_subset_delegate av_channel_layout_subset; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_channel_layout_uninit_delegate(AVChannelLayout* @channel_layout); + public static av_channel_layout_uninit_delegate av_channel_layout_uninit; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_channel_name_delegate(byte* @buf, ulong @buf_size, AVChannel @channel); + public static av_channel_name_delegate av_channel_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_channel_name_bprint_delegate(AVBPrint* @bp, AVChannel @channel_id); + public static av_channel_name_bprint_delegate av_channel_name_bprint; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_chroma_location_from_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_chroma_location_from_name_delegate av_chroma_location_from_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_chroma_location_name_delegate(AVChromaLocation @location); + public static av_chroma_location_name_delegate av_chroma_location_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecID av_codec_get_id_delegate(AVCodecTag** @tags, uint @tag); + public static av_codec_get_id_delegate av_codec_get_id; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint av_codec_get_tag_delegate(AVCodecTag** @tags, AVCodecID @id); + public static av_codec_get_tag_delegate av_codec_get_tag; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_codec_get_tag2_delegate(AVCodecTag** @tags, AVCodecID @id, uint* @tag); + public static av_codec_get_tag2_delegate av_codec_get_tag2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_codec_is_decoder_delegate(AVCodec* @codec); + public static av_codec_is_decoder_delegate av_codec_is_decoder; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_codec_is_encoder_delegate(AVCodec* @codec); + public static av_codec_is_encoder_delegate av_codec_is_encoder; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodec* av_codec_iterate_delegate(void** @opaque); + public static av_codec_iterate_delegate av_codec_iterate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_color_primaries_from_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_color_primaries_from_name_delegate av_color_primaries_from_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_color_primaries_name_delegate(AVColorPrimaries @primaries); + public static av_color_primaries_name_delegate av_color_primaries_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_color_range_from_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_color_range_from_name_delegate av_color_range_from_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_color_range_name_delegate(AVColorRange @range); + public static av_color_range_name_delegate av_color_range_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_color_space_from_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_color_space_from_name_delegate av_color_space_from_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_color_space_name_delegate(AVColorSpace @space); + public static av_color_space_name_delegate av_color_space_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_color_transfer_from_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_color_transfer_from_name_delegate av_color_transfer_from_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_color_transfer_name_delegate(AVColorTransferCharacteristic @transfer); + public static av_color_transfer_name_delegate av_color_transfer_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_compare_mod_delegate(ulong @a, ulong @b, ulong @mod); + public static av_compare_mod_delegate av_compare_mod; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_compare_ts_delegate(long @ts_a, AVRational @tb_a, long @ts_b, AVRational @tb_b); + public static av_compare_ts_delegate av_compare_ts; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVContentLightMetadata* av_content_light_metadata_alloc_delegate(ulong* @size); + public static av_content_light_metadata_alloc_delegate av_content_light_metadata_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVContentLightMetadata* av_content_light_metadata_create_side_data_delegate(AVFrame* @frame); + public static av_content_light_metadata_create_side_data_delegate av_content_light_metadata_create_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCPBProperties* av_cpb_properties_alloc_delegate(ulong* @size); + public static av_cpb_properties_alloc_delegate av_cpb_properties_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_cpu_count_delegate(); + public static av_cpu_count_delegate av_cpu_count; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_cpu_force_count_delegate(int @count); + public static av_cpu_force_count_delegate av_cpu_force_count; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate ulong av_cpu_max_align_delegate(); + public static av_cpu_max_align_delegate av_cpu_max_align; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_d2q_delegate(double @d, int @max); + public static av_d2q_delegate av_d2q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVD3D11VAContext* av_d3d11va_alloc_context_delegate(); + public static av_d3d11va_alloc_context_delegate av_d3d11va_alloc_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClassCategory av_default_get_category_delegate(void* @ptr); + public static av_default_get_category_delegate av_default_get_category; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_default_item_name_delegate(void* @ctx); + public static av_default_item_name_delegate av_default_item_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVInputFormat* av_demuxer_iterate_delegate(void** @opaque); + public static av_demuxer_iterate_delegate av_demuxer_iterate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_dict_copy_delegate(AVDictionary** @dst, AVDictionary* @src, int @flags); + public static av_dict_copy_delegate av_dict_copy; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_dict_count_delegate(AVDictionary* @m); + public static av_dict_count_delegate av_dict_count; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_dict_free_delegate(AVDictionary** @m); + public static av_dict_free_delegate av_dict_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVDictionaryEntry* av_dict_get_delegate(AVDictionary* @m, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, AVDictionaryEntry* @prev, int @flags); + public static av_dict_get_delegate av_dict_get; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_dict_get_string_delegate(AVDictionary* @m, byte** @buffer, byte @key_val_sep, byte @pairs_sep); + public static av_dict_get_string_delegate av_dict_get_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_dict_parse_string_delegate(AVDictionary** @pm, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key_val_sep, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @pairs_sep, int @flags); + public static av_dict_parse_string_delegate av_dict_parse_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_dict_set_delegate(AVDictionary** @pm, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @value, int @flags); + public static av_dict_set_delegate av_dict_set; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_dict_set_int_delegate(AVDictionary** @pm, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, long @value, int @flags); + public static av_dict_set_int_delegate av_dict_set_int; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_disposition_from_string_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @disp); + public static av_disposition_from_string_delegate av_disposition_from_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_disposition_to_string_delegate(int @disposition); + public static av_disposition_to_string_delegate av_disposition_to_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_div_q_delegate(AVRational @b, AVRational @c); + public static av_div_q_delegate av_div_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_dump_format_delegate(AVFormatContext* @ic, int @index, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, int @is_output); + public static av_dump_format_delegate av_dump_format; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVDynamicHDRPlus* av_dynamic_hdr_plus_alloc_delegate(ulong* @size); + public static av_dynamic_hdr_plus_alloc_delegate av_dynamic_hdr_plus_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVDynamicHDRPlus* av_dynamic_hdr_plus_create_side_data_delegate(AVFrame* @frame); + public static av_dynamic_hdr_plus_create_side_data_delegate av_dynamic_hdr_plus_create_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_dynarray_add_delegate(void* @tab_ptr, int* @nb_ptr, void* @elem); + public static av_dynarray_add_delegate av_dynarray_add; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_dynarray_add_nofree_delegate(void* @tab_ptr, int* @nb_ptr, void* @elem); + public static av_dynarray_add_nofree_delegate av_dynarray_add_nofree; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_dynarray2_add_delegate(void** @tab_ptr, int* @nb_ptr, ulong @elem_size, byte* @elem_data); + public static av_dynarray2_add_delegate av_dynarray2_add; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_fast_malloc_delegate(void* @ptr, uint* @size, ulong @min_size); + public static av_fast_malloc_delegate av_fast_malloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_fast_mallocz_delegate(void* @ptr, uint* @size, ulong @min_size); + public static av_fast_mallocz_delegate av_fast_mallocz; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_fast_padded_malloc_delegate(void* @ptr, uint* @size, ulong @min_size); + public static av_fast_padded_malloc_delegate av_fast_padded_malloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_fast_padded_mallocz_delegate(void* @ptr, uint* @size, ulong @min_size); + public static av_fast_padded_mallocz_delegate av_fast_padded_mallocz; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_fast_realloc_delegate(void* @ptr, uint* @size, ulong @min_size); + public static av_fast_realloc_delegate av_fast_realloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_file_map_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename, byte** @bufptr, ulong* @size, int @log_offset, void* @log_ctx); + public static av_file_map_delegate av_file_map; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_file_unmap_delegate(byte* @bufptr, ulong @size); + public static av_file_unmap_delegate av_file_unmap; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_filename_number_test_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename); + public static av_filename_number_test_delegate av_filename_number_test; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFilter* av_filter_iterate_delegate(void** @opaque); + public static av_filter_iterate_delegate av_filter_iterate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVPixelFormat av_find_best_pix_fmt_of_2_delegate(AVPixelFormat @dst_pix_fmt1, AVPixelFormat @dst_pix_fmt2, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr); + public static av_find_best_pix_fmt_of_2_delegate av_find_best_pix_fmt_of_2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_find_best_stream_delegate(AVFormatContext* @ic, AVMediaType @type, int @wanted_stream_nb, int @related_stream, AVCodec** @decoder_ret, int @flags); + public static av_find_best_stream_delegate av_find_best_stream; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_find_default_stream_index_delegate(AVFormatContext* @s); + public static av_find_default_stream_index_delegate av_find_default_stream_index; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVInputFormat* av_find_input_format_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @short_name); + public static av_find_input_format_delegate av_find_input_format; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_find_nearest_q_idx_delegate(AVRational @q, AVRational* @q_list); + public static av_find_nearest_q_idx_delegate av_find_nearest_q_idx; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVProgram* av_find_program_from_stream_delegate(AVFormatContext* @ic, AVProgram* @last, int @s); + public static av_find_program_from_stream_delegate av_find_program_from_stream; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVDurationEstimationMethod av_fmt_ctx_get_duration_estimation_method_delegate(AVFormatContext* @ctx); + public static av_fmt_ctx_get_duration_estimation_method_delegate av_fmt_ctx_get_duration_estimation_method; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate _iobuf* av_fopen_utf8_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @path, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @mode); + public static av_fopen_utf8_delegate av_fopen_utf8; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_force_cpu_flags_delegate(int @flags); + public static av_force_cpu_flags_delegate av_force_cpu_flags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_format_inject_global_side_data_delegate(AVFormatContext* @s); + public static av_format_inject_global_side_data_delegate av_format_inject_global_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_fourcc_make_string_delegate(byte* @buf, uint @fourcc); + public static av_fourcc_make_string_delegate av_fourcc_make_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFrame* av_frame_alloc_delegate(); + public static av_frame_alloc_delegate av_frame_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_frame_apply_cropping_delegate(AVFrame* @frame, int @flags); + public static av_frame_apply_cropping_delegate av_frame_apply_cropping; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFrame* av_frame_clone_delegate(AVFrame* @src); + public static av_frame_clone_delegate av_frame_clone; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_frame_copy_delegate(AVFrame* @dst, AVFrame* @src); + public static av_frame_copy_delegate av_frame_copy; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_frame_copy_props_delegate(AVFrame* @dst, AVFrame* @src); + public static av_frame_copy_props_delegate av_frame_copy_props; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_frame_free_delegate(AVFrame** @frame); + public static av_frame_free_delegate av_frame_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_frame_get_buffer_delegate(AVFrame* @frame, int @align); + public static av_frame_get_buffer_delegate av_frame_get_buffer; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferRef* av_frame_get_plane_buffer_delegate(AVFrame* @frame, int @plane); + public static av_frame_get_plane_buffer_delegate av_frame_get_plane_buffer; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFrameSideData* av_frame_get_side_data_delegate(AVFrame* @frame, AVFrameSideDataType @type); + public static av_frame_get_side_data_delegate av_frame_get_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_frame_is_writable_delegate(AVFrame* @frame); + public static av_frame_is_writable_delegate av_frame_is_writable; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_frame_make_writable_delegate(AVFrame* @frame); + public static av_frame_make_writable_delegate av_frame_make_writable; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_frame_move_ref_delegate(AVFrame* @dst, AVFrame* @src); + public static av_frame_move_ref_delegate av_frame_move_ref; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFrameSideData* av_frame_new_side_data_delegate(AVFrame* @frame, AVFrameSideDataType @type, ulong @size); + public static av_frame_new_side_data_delegate av_frame_new_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFrameSideData* av_frame_new_side_data_from_buf_delegate(AVFrame* @frame, AVFrameSideDataType @type, AVBufferRef* @buf); + public static av_frame_new_side_data_from_buf_delegate av_frame_new_side_data_from_buf; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_frame_ref_delegate(AVFrame* @dst, AVFrame* @src); + public static av_frame_ref_delegate av_frame_ref; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_frame_remove_side_data_delegate(AVFrame* @frame, AVFrameSideDataType @type); + public static av_frame_remove_side_data_delegate av_frame_remove_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_frame_side_data_name_delegate(AVFrameSideDataType @type); + public static av_frame_side_data_name_delegate av_frame_side_data_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_frame_unref_delegate(AVFrame* @frame); + public static av_frame_unref_delegate av_frame_unref; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_free_delegate(void* @ptr); + public static av_free_delegate av_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_freep_delegate(void* @ptr); + public static av_freep_delegate av_freep; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_gcd_delegate(long @a, long @b); + public static av_gcd_delegate av_gcd; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_gcd_q_delegate(AVRational @a, AVRational @b, int @max_den, AVRational @def); + public static av_gcd_q_delegate av_gcd_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVSampleFormat av_get_alt_sample_fmt_delegate(AVSampleFormat @sample_fmt, int @planar); + public static av_get_alt_sample_fmt_delegate av_get_alt_sample_fmt; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_audio_frame_duration_delegate(AVCodecContext* @avctx, int @frame_bytes); + public static av_get_audio_frame_duration_delegate av_get_audio_frame_duration; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_audio_frame_duration2_delegate(AVCodecParameters* @par, int @frame_bytes); + public static av_get_audio_frame_duration2_delegate av_get_audio_frame_duration2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_bits_per_pixel_delegate(AVPixFmtDescriptor* @pixdesc); + public static av_get_bits_per_pixel_delegate av_get_bits_per_pixel; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_bits_per_sample_delegate(AVCodecID @codec_id); + public static av_get_bits_per_sample_delegate av_get_bits_per_sample; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_bytes_per_sample_delegate(AVSampleFormat @sample_fmt); + public static av_get_bytes_per_sample_delegate av_get_bytes_per_sample; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_get_channel_description_delegate(ulong @channel); + public static av_get_channel_description_delegate av_get_channel_description; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate ulong av_get_channel_layout_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_get_channel_layout_delegate av_get_channel_layout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_channel_layout_channel_index_delegate(ulong @channel_layout, ulong @channel); + public static av_get_channel_layout_channel_index_delegate av_get_channel_layout_channel_index; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_channel_layout_nb_channels_delegate(ulong @channel_layout); + public static av_get_channel_layout_nb_channels_delegate av_get_channel_layout_nb_channels; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_get_channel_layout_string_delegate(byte* @buf, int @buf_size, int @nb_channels, ulong @channel_layout); + public static av_get_channel_layout_string_delegate av_get_channel_layout_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_get_channel_name_delegate(ulong @channel); + public static av_get_channel_name_delegate av_get_channel_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_get_colorspace_name_delegate(AVColorSpace @val); + public static av_get_colorspace_name_delegate av_get_colorspace_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_cpu_flags_delegate(); + public static av_get_cpu_flags_delegate av_get_cpu_flags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_get_default_channel_layout_delegate(int @nb_channels); + public static av_get_default_channel_layout_delegate av_get_default_channel_layout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_exact_bits_per_sample_delegate(AVCodecID @codec_id); + public static av_get_exact_bits_per_sample_delegate av_get_exact_bits_per_sample; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_extended_channel_layout_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, ulong* @channel_layout, int* @nb_channels); + public static av_get_extended_channel_layout_delegate av_get_extended_channel_layout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_frame_filename_delegate(byte* @buf, int @buf_size, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @path, int @number); + public static av_get_frame_filename_delegate av_get_frame_filename; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_frame_filename2_delegate(byte* @buf, int @buf_size, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @path, int @number, int @flags); + public static av_get_frame_filename2_delegate av_get_frame_filename2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_get_media_type_string_delegate(AVMediaType @media_type); + public static av_get_media_type_string_delegate av_get_media_type_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_output_timestamp_delegate(AVFormatContext* @s, int @stream, long* @dts, long* @wall); + public static av_get_output_timestamp_delegate av_get_output_timestamp; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVSampleFormat av_get_packed_sample_fmt_delegate(AVSampleFormat @sample_fmt); + public static av_get_packed_sample_fmt_delegate av_get_packed_sample_fmt; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_packet_delegate(AVIOContext* @s, AVPacket* @pkt, int @size); + public static av_get_packet_delegate av_get_packet; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_padded_bits_per_pixel_delegate(AVPixFmtDescriptor* @pixdesc); + public static av_get_padded_bits_per_pixel_delegate av_get_padded_bits_per_pixel; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecID av_get_pcm_codec_delegate(AVSampleFormat @fmt, int @be); + public static av_get_pcm_codec_delegate av_get_pcm_codec; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte av_get_picture_type_char_delegate(AVPictureType @pict_type); + public static av_get_picture_type_char_delegate av_get_picture_type_char; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVPixelFormat av_get_pix_fmt_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_get_pix_fmt_delegate av_get_pix_fmt; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_pix_fmt_loss_delegate(AVPixelFormat @dst_pix_fmt, AVPixelFormat @src_pix_fmt, int @has_alpha); + public static av_get_pix_fmt_loss_delegate av_get_pix_fmt_loss; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_get_pix_fmt_name_delegate(AVPixelFormat @pix_fmt); + public static av_get_pix_fmt_name_delegate av_get_pix_fmt_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_get_pix_fmt_string_delegate(byte* @buf, int @buf_size, AVPixelFormat @pix_fmt); + public static av_get_pix_fmt_string_delegate av_get_pix_fmt_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVSampleFormat av_get_planar_sample_fmt_delegate(AVSampleFormat @sample_fmt); + public static av_get_planar_sample_fmt_delegate av_get_planar_sample_fmt; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_get_profile_name_delegate(AVCodec* @codec, int @profile); + public static av_get_profile_name_delegate av_get_profile_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVSampleFormat av_get_sample_fmt_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_get_sample_fmt_delegate av_get_sample_fmt; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_get_sample_fmt_name_delegate(AVSampleFormat @sample_fmt); + public static av_get_sample_fmt_name_delegate av_get_sample_fmt_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_get_sample_fmt_string_delegate(byte* @buf, int @buf_size, AVSampleFormat @sample_fmt); + public static av_get_sample_fmt_string_delegate av_get_sample_fmt_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_get_standard_channel_layout_delegate(uint @index, ulong* @layout, byte** @name); + public static av_get_standard_channel_layout_delegate av_get_standard_channel_layout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_get_time_base_q_delegate(); + public static av_get_time_base_q_delegate av_get_time_base_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_gettime_delegate(); + public static av_gettime_delegate av_gettime; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_gettime_relative_delegate(); + public static av_gettime_relative_delegate av_gettime_relative; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_gettime_relative_is_monotonic_delegate(); + public static av_gettime_relative_is_monotonic_delegate av_gettime_relative_is_monotonic; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_grow_packet_delegate(AVPacket* @pkt, int @grow_by); + public static av_grow_packet_delegate av_grow_packet; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecID av_guess_codec_delegate(AVOutputFormat* @fmt, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @short_name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @mime_type, AVMediaType @type); + public static av_guess_codec_delegate av_guess_codec; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVOutputFormat* av_guess_format_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @short_name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @mime_type); + public static av_guess_format_delegate av_guess_format; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_guess_frame_rate_delegate(AVFormatContext* @ctx, AVStream* @stream, AVFrame* @frame); + public static av_guess_frame_rate_delegate av_guess_frame_rate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_guess_sample_aspect_ratio_delegate(AVFormatContext* @format, AVStream* @stream, AVFrame* @frame); + public static av_guess_sample_aspect_ratio_delegate av_guess_sample_aspect_ratio; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_hex_dump_delegate(_iobuf* @f, byte* @buf, int @size); + public static av_hex_dump_delegate av_hex_dump; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_hex_dump_log_delegate(void* @avcl, int @level, byte* @buf, int @size); + public static av_hex_dump_log_delegate av_hex_dump_log; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferRef* av_hwdevice_ctx_alloc_delegate(AVHWDeviceType @type); + public static av_hwdevice_ctx_alloc_delegate av_hwdevice_ctx_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_hwdevice_ctx_create_delegate(AVBufferRef** @device_ctx, AVHWDeviceType @type, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @device, AVDictionary* @opts, int @flags); + public static av_hwdevice_ctx_create_delegate av_hwdevice_ctx_create; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_hwdevice_ctx_create_derived_delegate(AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, int @flags); + public static av_hwdevice_ctx_create_derived_delegate av_hwdevice_ctx_create_derived; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_hwdevice_ctx_create_derived_opts_delegate(AVBufferRef** @dst_ctx, AVHWDeviceType @type, AVBufferRef* @src_ctx, AVDictionary* @options, int @flags); + public static av_hwdevice_ctx_create_derived_opts_delegate av_hwdevice_ctx_create_derived_opts; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_hwdevice_ctx_init_delegate(AVBufferRef* @ref); + public static av_hwdevice_ctx_init_delegate av_hwdevice_ctx_init; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVHWDeviceType av_hwdevice_find_type_by_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_hwdevice_find_type_by_name_delegate av_hwdevice_find_type_by_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVHWFramesConstraints* av_hwdevice_get_hwframe_constraints_delegate(AVBufferRef* @ref, void* @hwconfig); + public static av_hwdevice_get_hwframe_constraints_delegate av_hwdevice_get_hwframe_constraints; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_hwdevice_get_type_name_delegate(AVHWDeviceType @type); + public static av_hwdevice_get_type_name_delegate av_hwdevice_get_type_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_hwdevice_hwconfig_alloc_delegate(AVBufferRef* @device_ctx); + public static av_hwdevice_hwconfig_alloc_delegate av_hwdevice_hwconfig_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVHWDeviceType av_hwdevice_iterate_types_delegate(AVHWDeviceType @prev); + public static av_hwdevice_iterate_types_delegate av_hwdevice_iterate_types; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_hwframe_constraints_free_delegate(AVHWFramesConstraints** @constraints); + public static av_hwframe_constraints_free_delegate av_hwframe_constraints_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVBufferRef* av_hwframe_ctx_alloc_delegate(AVBufferRef* @device_ctx); + public static av_hwframe_ctx_alloc_delegate av_hwframe_ctx_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_hwframe_ctx_create_derived_delegate(AVBufferRef** @derived_frame_ctx, AVPixelFormat @format, AVBufferRef* @derived_device_ctx, AVBufferRef* @source_frame_ctx, int @flags); + public static av_hwframe_ctx_create_derived_delegate av_hwframe_ctx_create_derived; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_hwframe_ctx_init_delegate(AVBufferRef* @ref); + public static av_hwframe_ctx_init_delegate av_hwframe_ctx_init; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_hwframe_get_buffer_delegate(AVBufferRef* @hwframe_ctx, AVFrame* @frame, int @flags); + public static av_hwframe_get_buffer_delegate av_hwframe_get_buffer; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_hwframe_map_delegate(AVFrame* @dst, AVFrame* @src, int @flags); + public static av_hwframe_map_delegate av_hwframe_map; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_hwframe_transfer_data_delegate(AVFrame* @dst, AVFrame* @src, int @flags); + public static av_hwframe_transfer_data_delegate av_hwframe_transfer_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_hwframe_transfer_get_formats_delegate(AVBufferRef* @hwframe_ctx, AVHWFrameTransferDirection @dir, AVPixelFormat** @formats, int @flags); + public static av_hwframe_transfer_get_formats_delegate av_hwframe_transfer_get_formats; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_alloc_delegate(ref byte_ptrArray4 @pointers, ref int_array4 @linesizes, int @w, int @h, AVPixelFormat @pix_fmt, int @align); + public static av_image_alloc_delegate av_image_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_check_sar_delegate(uint @w, uint @h, AVRational @sar); + public static av_image_check_sar_delegate av_image_check_sar; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_check_size_delegate(uint @w, uint @h, int @log_offset, void* @log_ctx); + public static av_image_check_size_delegate av_image_check_size; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_check_size2_delegate(uint @w, uint @h, long @max_pixels, AVPixelFormat @pix_fmt, int @log_offset, void* @log_ctx); + public static av_image_check_size2_delegate av_image_check_size2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_image_copy_delegate(ref byte_ptrArray4 @dst_data, ref int_array4 @dst_linesizes, in byte_ptrArray4 @src_data, in int_array4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height); + public static av_image_copy_delegate av_image_copy; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_image_copy_plane_delegate(byte* @dst, int @dst_linesize, byte* @src, int @src_linesize, int @bytewidth, int @height); + public static av_image_copy_plane_delegate av_image_copy_plane; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_image_copy_plane_uc_from_delegate(byte* @dst, long @dst_linesize, byte* @src, long @src_linesize, long @bytewidth, int @height); + public static av_image_copy_plane_uc_from_delegate av_image_copy_plane_uc_from; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_copy_to_buffer_delegate(byte* @dst, int @dst_size, in byte_ptrArray4 @src_data, in int_array4 @src_linesize, AVPixelFormat @pix_fmt, int @width, int @height, int @align); + public static av_image_copy_to_buffer_delegate av_image_copy_to_buffer; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_image_copy_uc_from_delegate(ref byte_ptrArray4 @dst_data, in long_array4 @dst_linesizes, in byte_ptrArray4 @src_data, in long_array4 @src_linesizes, AVPixelFormat @pix_fmt, int @width, int @height); + public static av_image_copy_uc_from_delegate av_image_copy_uc_from; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_fill_arrays_delegate(ref byte_ptrArray4 @dst_data, ref int_array4 @dst_linesize, byte* @src, AVPixelFormat @pix_fmt, int @width, int @height, int @align); + public static av_image_fill_arrays_delegate av_image_fill_arrays; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_fill_black_delegate(ref byte_ptrArray4 @dst_data, in long_array4 @dst_linesize, AVPixelFormat @pix_fmt, AVColorRange @range, int @width, int @height); + public static av_image_fill_black_delegate av_image_fill_black; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_fill_linesizes_delegate(ref int_array4 @linesizes, AVPixelFormat @pix_fmt, int @width); + public static av_image_fill_linesizes_delegate av_image_fill_linesizes; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_image_fill_max_pixsteps_delegate(ref int_array4 @max_pixsteps, ref int_array4 @max_pixstep_comps, AVPixFmtDescriptor* @pixdesc); + public static av_image_fill_max_pixsteps_delegate av_image_fill_max_pixsteps; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_fill_plane_sizes_delegate(ref ulong_array4 @size, AVPixelFormat @pix_fmt, int @height, in long_array4 @linesizes); + public static av_image_fill_plane_sizes_delegate av_image_fill_plane_sizes; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_fill_pointers_delegate(ref byte_ptrArray4 @data, AVPixelFormat @pix_fmt, int @height, byte* @ptr, in int_array4 @linesizes); + public static av_image_fill_pointers_delegate av_image_fill_pointers; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_get_buffer_size_delegate(AVPixelFormat @pix_fmt, int @width, int @height, int @align); + public static av_image_get_buffer_size_delegate av_image_get_buffer_size; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_image_get_linesize_delegate(AVPixelFormat @pix_fmt, int @width, int @plane); + public static av_image_get_linesize_delegate av_image_get_linesize; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_index_search_timestamp_delegate(AVStream* @st, long @timestamp, int @flags); + public static av_index_search_timestamp_delegate av_index_search_timestamp; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_init_packet_delegate(AVPacket* @pkt); + public static av_init_packet_delegate av_init_packet; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVInputFormat* av_input_audio_device_next_delegate(AVInputFormat* @d); + public static av_input_audio_device_next_delegate av_input_audio_device_next; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVInputFormat* av_input_video_device_next_delegate(AVInputFormat* @d); + public static av_input_video_device_next_delegate av_input_video_device_next; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint av_int_list_length_for_size_delegate(uint @elsize, void* @list, ulong @term); + public static av_int_list_length_for_size_delegate av_int_list_length_for_size; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_interleaved_write_frame_delegate(AVFormatContext* @s, AVPacket* @pkt); + public static av_interleaved_write_frame_delegate av_interleaved_write_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_interleaved_write_uncoded_frame_delegate(AVFormatContext* @s, int @stream_index, AVFrame* @frame); + public static av_interleaved_write_uncoded_frame_delegate av_interleaved_write_uncoded_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_log_delegate(void* @avcl, int @level, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt); + public static av_log_delegate av_log; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_log_default_callback_delegate(void* @avcl, int @level, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt, byte* @vl); + public static av_log_default_callback_delegate av_log_default_callback; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_log_format_line_delegate(void* @ptr, int @level, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix); + public static av_log_format_line_delegate av_log_format_line; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_log_format_line2_delegate(void* @ptr, int @level, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt, byte* @vl, byte* @line, int @line_size, int* @print_prefix); + public static av_log_format_line2_delegate av_log_format_line2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_log_get_flags_delegate(); + public static av_log_get_flags_delegate av_log_get_flags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_log_get_level_delegate(); + public static av_log_get_level_delegate av_log_get_level; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_log_once_delegate(void* @avcl, int @initial_level, int @subsequent_level, int* @state, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt); + public static av_log_once_delegate av_log_once; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_log_set_callback_delegate(av_log_set_callback_callback_func @callback); + public static av_log_set_callback_delegate av_log_set_callback; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_log_set_flags_delegate(int @arg); + public static av_log_set_flags_delegate av_log_set_flags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_log_set_level_delegate(int @level); + public static av_log_set_level_delegate av_log_set_level; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_log2_delegate(uint @v); + public static av_log2_delegate av_log2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_log2_16bit_delegate(uint @v); + public static av_log2_16bit_delegate av_log2_16bit; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_malloc_delegate(ulong @size); + public static av_malloc_delegate av_malloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_malloc_array_delegate(ulong @nmemb, ulong @size); + public static av_malloc_array_delegate av_malloc_array; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_mallocz_delegate(ulong @size); + public static av_mallocz_delegate av_mallocz; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_mallocz_array_delegate(ulong @nmemb, ulong @size); + public static av_mallocz_array_delegate av_mallocz_array; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVMasteringDisplayMetadata* av_mastering_display_metadata_alloc_delegate(); + public static av_mastering_display_metadata_alloc_delegate av_mastering_display_metadata_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVMasteringDisplayMetadata* av_mastering_display_metadata_create_side_data_delegate(AVFrame* @frame); + public static av_mastering_display_metadata_create_side_data_delegate av_mastering_display_metadata_create_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_match_ext_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @extensions); + public static av_match_ext_delegate av_match_ext; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_max_alloc_delegate(ulong @max); + public static av_max_alloc_delegate av_max_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_memcpy_backptr_delegate(byte* @dst, int @back, int @cnt); + public static av_memcpy_backptr_delegate av_memcpy_backptr; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_memdup_delegate(void* @p, ulong @size); + public static av_memdup_delegate av_memdup; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_mul_q_delegate(AVRational @b, AVRational @c); + public static av_mul_q_delegate av_mul_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVOutputFormat* av_muxer_iterate_delegate(void** @opaque); + public static av_muxer_iterate_delegate av_muxer_iterate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_nearer_q_delegate(AVRational @q, AVRational @q1, AVRational @q2); + public static av_nearer_q_delegate av_nearer_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_new_packet_delegate(AVPacket* @pkt, int @size); + public static av_new_packet_delegate av_new_packet; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVProgram* av_new_program_delegate(AVFormatContext* @s, int @id); + public static av_new_program_delegate av_new_program; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* av_opt_child_class_iterate_delegate(AVClass* @parent, void** @iter); + public static av_opt_child_class_iterate_delegate av_opt_child_class_iterate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_opt_child_next_delegate(void* @obj, void* @prev); + public static av_opt_child_next_delegate av_opt_child_next; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_copy_delegate(void* @dest, void* @src); + public static av_opt_copy_delegate av_opt_copy; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_eval_double_delegate(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, double* @double_out); + public static av_opt_eval_double_delegate av_opt_eval_double; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_eval_flags_delegate(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, int* @flags_out); + public static av_opt_eval_flags_delegate av_opt_eval_flags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_eval_float_delegate(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, float* @float_out); + public static av_opt_eval_float_delegate av_opt_eval_float; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_eval_int_delegate(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, int* @int_out); + public static av_opt_eval_int_delegate av_opt_eval_int; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_eval_int64_delegate(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, long* @int64_out); + public static av_opt_eval_int64_delegate av_opt_eval_int64; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_eval_q_delegate(void* @obj, AVOption* @o, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, AVRational* @q_out); + public static av_opt_eval_q_delegate av_opt_eval_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVOption* av_opt_find_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @unit, int @opt_flags, int @search_flags); + public static av_opt_find_delegate av_opt_find; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVOption* av_opt_find2_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @unit, int @opt_flags, int @search_flags, void** @target_obj); + public static av_opt_find2_delegate av_opt_find2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_flag_is_set_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @field_name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @flag_name); + public static av_opt_flag_is_set_delegate av_opt_flag_is_set; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_opt_free_delegate(void* @obj); + public static av_opt_free_delegate av_opt_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_opt_freep_ranges_delegate(AVOptionRanges** @ranges); + public static av_opt_freep_ranges_delegate av_opt_freep_ranges; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, byte** @out_val); + public static av_opt_get_delegate av_opt_get; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_channel_layout_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, long* @ch_layout); + public static av_opt_get_channel_layout_delegate av_opt_get_channel_layout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_chlayout_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVChannelLayout* @layout); + public static av_opt_get_chlayout_delegate av_opt_get_chlayout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_dict_val_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVDictionary** @out_val); + public static av_opt_get_dict_val_delegate av_opt_get_dict_val; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_double_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, double* @out_val); + public static av_opt_get_double_delegate av_opt_get_double; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_image_size_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, int* @w_out, int* @h_out); + public static av_opt_get_image_size_delegate av_opt_get_image_size; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_int_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, long* @out_val); + public static av_opt_get_int_delegate av_opt_get_int; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_key_value_delegate(byte** @ropts, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key_val_sep, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @pairs_sep, uint @flags, byte** @rkey, byte** @rval); + public static av_opt_get_key_value_delegate av_opt_get_key_value; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_pixel_fmt_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVPixelFormat* @out_fmt); + public static av_opt_get_pixel_fmt_delegate av_opt_get_pixel_fmt; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_q_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVRational* @out_val); + public static av_opt_get_q_delegate av_opt_get_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_sample_fmt_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVSampleFormat* @out_fmt); + public static av_opt_get_sample_fmt_delegate av_opt_get_sample_fmt; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_get_video_rate_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags, AVRational* @out_val); + public static av_opt_get_video_rate_delegate av_opt_get_video_rate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_is_set_to_default_delegate(void* @obj, AVOption* @o); + public static av_opt_is_set_to_default_delegate av_opt_is_set_to_default; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_is_set_to_default_by_name_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @search_flags); + public static av_opt_is_set_to_default_by_name_delegate av_opt_is_set_to_default_by_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVOption* av_opt_next_delegate(void* @obj, AVOption* @prev); + public static av_opt_next_delegate av_opt_next; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_opt_ptr_delegate(AVClass* @avclass, void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static av_opt_ptr_delegate av_opt_ptr; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_query_ranges_delegate(AVOptionRanges** @p0, void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, int @flags); + public static av_opt_query_ranges_delegate av_opt_query_ranges; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_query_ranges_default_delegate(AVOptionRanges** @p0, void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key, int @flags); + public static av_opt_query_ranges_default_delegate av_opt_query_ranges_default; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_serialize_delegate(void* @obj, int @opt_flags, int @flags, byte** @buffer, byte @key_val_sep, byte @pairs_sep); + public static av_opt_serialize_delegate av_opt_serialize; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @val, int @search_flags); + public static av_opt_set_delegate av_opt_set; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_bin_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, byte* @val, int @size, int @search_flags); + public static av_opt_set_bin_delegate av_opt_set_bin; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_channel_layout_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, long @ch_layout, int @search_flags); + public static av_opt_set_channel_layout_delegate av_opt_set_channel_layout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_chlayout_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVChannelLayout* @layout, int @search_flags); + public static av_opt_set_chlayout_delegate av_opt_set_chlayout; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_opt_set_defaults_delegate(void* @s); + public static av_opt_set_defaults_delegate av_opt_set_defaults; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_opt_set_defaults2_delegate(void* @s, int @mask, int @flags); + public static av_opt_set_defaults2_delegate av_opt_set_defaults2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_dict_delegate(void* @obj, AVDictionary** @options); + public static av_opt_set_dict_delegate av_opt_set_dict; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_dict_val_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVDictionary* @val, int @search_flags); + public static av_opt_set_dict_val_delegate av_opt_set_dict_val; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_dict2_delegate(void* @obj, AVDictionary** @options, int @search_flags); + public static av_opt_set_dict2_delegate av_opt_set_dict2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_double_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, double @val, int @search_flags); + public static av_opt_set_double_delegate av_opt_set_double; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_from_string_delegate(void* @ctx, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @opts, byte** @shorthand, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key_val_sep, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @pairs_sep); + public static av_opt_set_from_string_delegate av_opt_set_from_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_image_size_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @w, int @h, int @search_flags); + public static av_opt_set_image_size_delegate av_opt_set_image_size; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_int_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, long @val, int @search_flags); + public static av_opt_set_int_delegate av_opt_set_int; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_pixel_fmt_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVPixelFormat @fmt, int @search_flags); + public static av_opt_set_pixel_fmt_delegate av_opt_set_pixel_fmt; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_q_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVRational @val, int @search_flags); + public static av_opt_set_q_delegate av_opt_set_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_sample_fmt_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVSampleFormat @fmt, int @search_flags); + public static av_opt_set_sample_fmt_delegate av_opt_set_sample_fmt; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_set_video_rate_delegate(void* @obj, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, AVRational @val, int @search_flags); + public static av_opt_set_video_rate_delegate av_opt_set_video_rate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_opt_show2_delegate(void* @obj, void* @av_log_obj, int @req_flags, int @rej_flags); + public static av_opt_show2_delegate av_opt_show2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVOutputFormat* av_output_audio_device_next_delegate(AVOutputFormat* @d); + public static av_output_audio_device_next_delegate av_output_audio_device_next; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVOutputFormat* av_output_video_device_next_delegate(AVOutputFormat* @d); + public static av_output_video_device_next_delegate av_output_video_device_next; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_packet_add_side_data_delegate(AVPacket* @pkt, AVPacketSideDataType @type, byte* @data, ulong @size); + public static av_packet_add_side_data_delegate av_packet_add_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVPacket* av_packet_alloc_delegate(); + public static av_packet_alloc_delegate av_packet_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVPacket* av_packet_clone_delegate(AVPacket* @src); + public static av_packet_clone_delegate av_packet_clone; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_packet_copy_props_delegate(AVPacket* @dst, AVPacket* @src); + public static av_packet_copy_props_delegate av_packet_copy_props; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_packet_free_delegate(AVPacket** @pkt); + public static av_packet_free_delegate av_packet_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_packet_free_side_data_delegate(AVPacket* @pkt); + public static av_packet_free_side_data_delegate av_packet_free_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_packet_from_data_delegate(AVPacket* @pkt, byte* @data, int @size); + public static av_packet_from_data_delegate av_packet_from_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_packet_get_side_data_delegate(AVPacket* @pkt, AVPacketSideDataType @type, ulong* @size); + public static av_packet_get_side_data_delegate av_packet_get_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_packet_make_refcounted_delegate(AVPacket* @pkt); + public static av_packet_make_refcounted_delegate av_packet_make_refcounted; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_packet_make_writable_delegate(AVPacket* @pkt); + public static av_packet_make_writable_delegate av_packet_make_writable; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_packet_move_ref_delegate(AVPacket* @dst, AVPacket* @src); + public static av_packet_move_ref_delegate av_packet_move_ref; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_packet_new_side_data_delegate(AVPacket* @pkt, AVPacketSideDataType @type, ulong @size); + public static av_packet_new_side_data_delegate av_packet_new_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_packet_pack_dictionary_delegate(AVDictionary* @dict, ulong* @size); + public static av_packet_pack_dictionary_delegate av_packet_pack_dictionary; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_packet_ref_delegate(AVPacket* @dst, AVPacket* @src); + public static av_packet_ref_delegate av_packet_ref; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_packet_rescale_ts_delegate(AVPacket* @pkt, AVRational @tb_src, AVRational @tb_dst); + public static av_packet_rescale_ts_delegate av_packet_rescale_ts; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_packet_shrink_side_data_delegate(AVPacket* @pkt, AVPacketSideDataType @type, ulong @size); + public static av_packet_shrink_side_data_delegate av_packet_shrink_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_packet_side_data_name_delegate(AVPacketSideDataType @type); + public static av_packet_side_data_name_delegate av_packet_side_data_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_packet_unpack_dictionary_delegate(byte* @data, ulong @size, AVDictionary** @dict); + public static av_packet_unpack_dictionary_delegate av_packet_unpack_dictionary; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_packet_unref_delegate(AVPacket* @pkt); + public static av_packet_unref_delegate av_packet_unref; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_parse_cpu_caps_delegate(uint* @flags, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @s); + public static av_parse_cpu_caps_delegate av_parse_cpu_caps; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_parser_close_delegate(AVCodecParserContext* @s); + public static av_parser_close_delegate av_parser_close; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecParserContext* av_parser_init_delegate(int @codec_id); + public static av_parser_init_delegate av_parser_init; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecParser* av_parser_iterate_delegate(void** @opaque); + public static av_parser_iterate_delegate av_parser_iterate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_parser_parse2_delegate(AVCodecParserContext* @s, AVCodecContext* @avctx, byte** @poutbuf, int* @poutbuf_size, byte* @buf, int @buf_size, long @pts, long @dts, long @pos); + public static av_parser_parse2_delegate av_parser_parse2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_pix_fmt_count_planes_delegate(AVPixelFormat @pix_fmt); + public static av_pix_fmt_count_planes_delegate av_pix_fmt_count_planes; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVPixFmtDescriptor* av_pix_fmt_desc_get_delegate(AVPixelFormat @pix_fmt); + public static av_pix_fmt_desc_get_delegate av_pix_fmt_desc_get; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVPixelFormat av_pix_fmt_desc_get_id_delegate(AVPixFmtDescriptor* @desc); + public static av_pix_fmt_desc_get_id_delegate av_pix_fmt_desc_get_id; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVPixFmtDescriptor* av_pix_fmt_desc_next_delegate(AVPixFmtDescriptor* @prev); + public static av_pix_fmt_desc_next_delegate av_pix_fmt_desc_next; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_pix_fmt_get_chroma_sub_sample_delegate(AVPixelFormat @pix_fmt, int* @h_shift, int* @v_shift); + public static av_pix_fmt_get_chroma_sub_sample_delegate av_pix_fmt_get_chroma_sub_sample; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVPixelFormat av_pix_fmt_swap_endianness_delegate(AVPixelFormat @pix_fmt); + public static av_pix_fmt_swap_endianness_delegate av_pix_fmt_swap_endianness; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_pkt_dump_log2_delegate(void* @avcl, int @level, AVPacket* @pkt, int @dump_payload, AVStream* @st); + public static av_pkt_dump_log2_delegate av_pkt_dump_log2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_pkt_dump2_delegate(_iobuf* @f, AVPacket* @pkt, int @dump_payload, AVStream* @st); + public static av_pkt_dump2_delegate av_pkt_dump2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_probe_input_buffer_delegate(AVIOContext* @pb, AVInputFormat** @fmt, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, void* @logctx, uint @offset, uint @max_probe_size); + public static av_probe_input_buffer_delegate av_probe_input_buffer; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_probe_input_buffer2_delegate(AVIOContext* @pb, AVInputFormat** @fmt, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, void* @logctx, uint @offset, uint @max_probe_size); + public static av_probe_input_buffer2_delegate av_probe_input_buffer2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVInputFormat* av_probe_input_format_delegate(AVProbeData* @pd, int @is_opened); + public static av_probe_input_format_delegate av_probe_input_format; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVInputFormat* av_probe_input_format2_delegate(AVProbeData* @pd, int @is_opened, int* @score_max); + public static av_probe_input_format2_delegate av_probe_input_format2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVInputFormat* av_probe_input_format3_delegate(AVProbeData* @pd, int @is_opened, int* @score_ret); + public static av_probe_input_format3_delegate av_probe_input_format3; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_program_add_stream_index_delegate(AVFormatContext* @ac, int @progid, uint @idx); + public static av_program_add_stream_index_delegate av_program_add_stream_index; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint av_q2intfloat_delegate(AVRational @q); + public static av_q2intfloat_delegate av_q2intfloat; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_read_frame_delegate(AVFormatContext* @s, AVPacket* @pkt); + public static av_read_frame_delegate av_read_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_read_image_line_delegate(ushort* @dst, in byte_ptrArray4 @data, in int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component); + public static av_read_image_line_delegate av_read_image_line; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_read_image_line2_delegate(void* @dst, in byte_ptrArray4 @data, in int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @read_pal_component, int @dst_element_size); + public static av_read_image_line2_delegate av_read_image_line2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_read_pause_delegate(AVFormatContext* @s); + public static av_read_pause_delegate av_read_pause; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_read_play_delegate(AVFormatContext* @s); + public static av_read_play_delegate av_read_play; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_realloc_delegate(void* @ptr, ulong @size); + public static av_realloc_delegate av_realloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_realloc_array_delegate(void* @ptr, ulong @nmemb, ulong @size); + public static av_realloc_array_delegate av_realloc_array; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_realloc_f_delegate(void* @ptr, ulong @nelem, ulong @elsize); + public static av_realloc_f_delegate av_realloc_f; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_reallocp_delegate(void* @ptr, ulong @size); + public static av_reallocp_delegate av_reallocp; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_reallocp_array_delegate(void* @ptr, ulong @nmemb, ulong @size); + public static av_reallocp_array_delegate av_reallocp_array; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_reduce_delegate(int* @dst_num, int* @dst_den, long @num, long @den, long @max); + public static av_reduce_delegate av_reduce; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_rescale_delegate(long @a, long @b, long @c); + public static av_rescale_delegate av_rescale; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_rescale_delta_delegate(AVRational @in_tb, long @in_ts, AVRational @fs_tb, int @duration, long* @last, AVRational @out_tb); + public static av_rescale_delta_delegate av_rescale_delta; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_rescale_q_delegate(long @a, AVRational @bq, AVRational @cq); + public static av_rescale_q_delegate av_rescale_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_rescale_q_rnd_delegate(long @a, AVRational @bq, AVRational @cq, AVRounding @rnd); + public static av_rescale_q_rnd_delegate av_rescale_q_rnd; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_rescale_rnd_delegate(long @a, long @b, long @c, AVRounding @rnd); + public static av_rescale_rnd_delegate av_rescale_rnd; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_sample_fmt_is_planar_delegate(AVSampleFormat @sample_fmt); + public static av_sample_fmt_is_planar_delegate av_sample_fmt_is_planar; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_samples_alloc_delegate(byte** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align); + public static av_samples_alloc_delegate av_samples_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_samples_alloc_array_and_samples_delegate(byte*** @audio_data, int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align); + public static av_samples_alloc_array_and_samples_delegate av_samples_alloc_array_and_samples; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_samples_copy_delegate(byte** @dst, byte** @src, int @dst_offset, int @src_offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt); + public static av_samples_copy_delegate av_samples_copy; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_samples_fill_arrays_delegate(byte** @audio_data, int* @linesize, byte* @buf, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align); + public static av_samples_fill_arrays_delegate av_samples_fill_arrays; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_samples_get_buffer_size_delegate(int* @linesize, int @nb_channels, int @nb_samples, AVSampleFormat @sample_fmt, int @align); + public static av_samples_get_buffer_size_delegate av_samples_get_buffer_size; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_samples_set_silence_delegate(byte** @audio_data, int @offset, int @nb_samples, int @nb_channels, AVSampleFormat @sample_fmt); + public static av_samples_set_silence_delegate av_samples_set_silence; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_sdp_create_delegate(AVFormatContext** @ac, int @n_files, byte* @buf, int @size); + public static av_sdp_create_delegate av_sdp_create; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_seek_frame_delegate(AVFormatContext* @s, int @stream_index, long @timestamp, int @flags); + public static av_seek_frame_delegate av_seek_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_set_options_string_delegate(void* @ctx, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @opts, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @key_val_sep, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @pairs_sep); + public static av_set_options_string_delegate av_set_options_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_shrink_packet_delegate(AVPacket* @pkt, int @size); + public static av_shrink_packet_delegate av_shrink_packet; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_size_mult_delegate(ulong @a, ulong @b, ulong* @r); + public static av_size_mult_delegate av_size_mult; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_strdup_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @s); + public static av_strdup_delegate av_strdup; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_stream_add_side_data_delegate(AVStream* @st, AVPacketSideDataType @type, byte* @data, ulong @size); + public static av_stream_add_side_data_delegate av_stream_add_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* av_stream_get_class_delegate(); + public static av_stream_get_class_delegate av_stream_get_class; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_stream_get_codec_timebase_delegate(AVStream* @st); + public static av_stream_get_codec_timebase_delegate av_stream_get_codec_timebase; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long av_stream_get_end_pts_delegate(AVStream* @st); + public static av_stream_get_end_pts_delegate av_stream_get_end_pts; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecParserContext* av_stream_get_parser_delegate(AVStream* @s); + public static av_stream_get_parser_delegate av_stream_get_parser; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_stream_get_side_data_delegate(AVStream* @stream, AVPacketSideDataType @type, ulong* @size); + public static av_stream_get_side_data_delegate av_stream_get_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_stream_new_side_data_delegate(AVStream* @stream, AVPacketSideDataType @type, ulong @size); + public static av_stream_new_side_data_delegate av_stream_new_side_data; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_strerror_delegate(int @errnum, byte* @errbuf, ulong @errbuf_size); + public static av_strerror_delegate av_strerror; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_strndup_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @s, ulong @len); + public static av_strndup_delegate av_strndup; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVRational av_sub_q_delegate(AVRational @b, AVRational @c); + public static av_sub_q_delegate av_sub_q; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_tempfile_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @prefix, byte** @filename, int @log_offset, void* @log_ctx); + public static av_tempfile_delegate av_tempfile; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_timecode_adjust_ntsc_framenum2_delegate(int @framenum, int @fps); + public static av_timecode_adjust_ntsc_framenum2_delegate av_timecode_adjust_ntsc_framenum2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_timecode_check_frame_rate_delegate(AVRational @rate); + public static av_timecode_check_frame_rate_delegate av_timecode_check_frame_rate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint av_timecode_get_smpte_delegate(AVRational @rate, int @drop, int @hh, int @mm, int @ss, int @ff); + public static av_timecode_get_smpte_delegate av_timecode_get_smpte; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint av_timecode_get_smpte_from_framenum_delegate(AVTimecode* @tc, int @framenum); + public static av_timecode_get_smpte_from_framenum_delegate av_timecode_get_smpte_from_framenum; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_timecode_init_delegate(AVTimecode* @tc, AVRational @rate, int @flags, int @frame_start, void* @log_ctx); + public static av_timecode_init_delegate av_timecode_init; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_timecode_init_from_components_delegate(AVTimecode* @tc, AVRational @rate, int @flags, int @hh, int @mm, int @ss, int @ff, void* @log_ctx); + public static av_timecode_init_from_components_delegate av_timecode_init_from_components; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_timecode_init_from_string_delegate(AVTimecode* @tc, AVRational @rate, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str, void* @log_ctx); + public static av_timecode_init_from_string_delegate av_timecode_init_from_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_timecode_make_mpeg_tc_string_delegate(byte* @buf, uint @tc25bit); + public static av_timecode_make_mpeg_tc_string_delegate av_timecode_make_mpeg_tc_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_timecode_make_smpte_tc_string_delegate(byte* @buf, uint @tcsmpte, int @prevent_df); + public static av_timecode_make_smpte_tc_string_delegate av_timecode_make_smpte_tc_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_timecode_make_smpte_tc_string2_delegate(byte* @buf, AVRational @rate, uint @tcsmpte, int @prevent_df, int @skip_field); + public static av_timecode_make_smpte_tc_string2_delegate av_timecode_make_smpte_tc_string2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* av_timecode_make_string_delegate(AVTimecode* @tc, byte* @buf, int @framenum); + public static av_timecode_make_string_delegate av_timecode_make_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_tree_destroy_delegate(AVTreeNode* @t); + public static av_tree_destroy_delegate av_tree_destroy; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_tree_enumerate_delegate(AVTreeNode* @t, void* @opaque, av_tree_enumerate_cmp_func @cmp, av_tree_enumerate_enu_func @enu); + public static av_tree_enumerate_delegate av_tree_enumerate; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_tree_find_delegate(AVTreeNode* @root, void* @key, av_tree_find_cmp_func @cmp, ref void_ptrArray2 @next); + public static av_tree_find_delegate av_tree_find; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* av_tree_insert_delegate(AVTreeNode** @rootp, void* @key, av_tree_insert_cmp_func @cmp, AVTreeNode** @next); + public static av_tree_insert_delegate av_tree_insert; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVTreeNode* av_tree_node_alloc_delegate(); + public static av_tree_node_alloc_delegate av_tree_node_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_url_split_delegate(byte* @proto, int @proto_size, byte* @authorization, int @authorization_size, byte* @hostname, int @hostname_size, int* @port_ptr, byte* @path, int @path_size, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url); + public static av_url_split_delegate av_url_split; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_usleep_delegate(uint @usec); + public static av_usleep_delegate av_usleep; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string av_version_info_delegate(); + public static av_version_info_delegate av_version_info; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_vlog_delegate(void* @avcl, int @level, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt, byte* @vl); + public static av_vlog_delegate av_vlog; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_write_frame_delegate(AVFormatContext* @s, AVPacket* @pkt); + public static av_write_frame_delegate av_write_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_write_image_line_delegate(ushort* @src, ref byte_ptrArray4 @data, in int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w); + public static av_write_image_line_delegate av_write_image_line; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void av_write_image_line2_delegate(void* @src, ref byte_ptrArray4 @data, in int_array4 @linesize, AVPixFmtDescriptor* @desc, int @x, int @y, int @c, int @w, int @src_element_size); + public static av_write_image_line2_delegate av_write_image_line2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_write_trailer_delegate(AVFormatContext* @s); + public static av_write_trailer_delegate av_write_trailer; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_write_uncoded_frame_delegate(AVFormatContext* @s, int @stream_index, AVFrame* @frame); + public static av_write_uncoded_frame_delegate av_write_uncoded_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int av_write_uncoded_frame_query_delegate(AVFormatContext* @s, int @stream_index); + public static av_write_uncoded_frame_query_delegate av_write_uncoded_frame_query; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint av_xiphlacing_delegate(byte* @s, uint @v); + public static av_xiphlacing_delegate av_xiphlacing; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avcodec_align_dimensions_delegate(AVCodecContext* @s, int* @width, int* @height); + public static avcodec_align_dimensions_delegate avcodec_align_dimensions; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avcodec_align_dimensions2_delegate(AVCodecContext* @s, int* @width, int* @height, ref int_array8 @linesize_align); + public static avcodec_align_dimensions2_delegate avcodec_align_dimensions2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecContext* avcodec_alloc_context3_delegate(AVCodec* @codec); + public static avcodec_alloc_context3_delegate avcodec_alloc_context3; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVChromaLocation avcodec_chroma_pos_to_enum_delegate(int @xpos, int @ypos); + public static avcodec_chroma_pos_to_enum_delegate avcodec_chroma_pos_to_enum; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_close_delegate(AVCodecContext* @avctx); + public static avcodec_close_delegate avcodec_close; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avcodec_configuration_delegate(); + public static avcodec_configuration_delegate avcodec_configuration; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_decode_subtitle2_delegate(AVCodecContext* @avctx, AVSubtitle* @sub, int* @got_sub_ptr, AVPacket* @avpkt); + public static avcodec_decode_subtitle2_delegate avcodec_decode_subtitle2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_default_execute_delegate(AVCodecContext* @c, avcodec_default_execute_func_func @func, void* @arg, int* @ret, int @count, int @size); + public static avcodec_default_execute_delegate avcodec_default_execute; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_default_execute2_delegate(AVCodecContext* @c, avcodec_default_execute2_func_func @func, void* @arg, int* @ret, int @count); + public static avcodec_default_execute2_delegate avcodec_default_execute2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_default_get_buffer2_delegate(AVCodecContext* @s, AVFrame* @frame, int @flags); + public static avcodec_default_get_buffer2_delegate avcodec_default_get_buffer2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_default_get_encode_buffer_delegate(AVCodecContext* @s, AVPacket* @pkt, int @flags); + public static avcodec_default_get_encode_buffer_delegate avcodec_default_get_encode_buffer; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVPixelFormat avcodec_default_get_format_delegate(AVCodecContext* @s, AVPixelFormat* @fmt); + public static avcodec_default_get_format_delegate avcodec_default_get_format; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecDescriptor* avcodec_descriptor_get_delegate(AVCodecID @id); + public static avcodec_descriptor_get_delegate avcodec_descriptor_get; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecDescriptor* avcodec_descriptor_get_by_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static avcodec_descriptor_get_by_name_delegate avcodec_descriptor_get_by_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecDescriptor* avcodec_descriptor_next_delegate(AVCodecDescriptor* @prev); + public static avcodec_descriptor_next_delegate avcodec_descriptor_next; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_encode_subtitle_delegate(AVCodecContext* @avctx, byte* @buf, int @buf_size, AVSubtitle* @sub); + public static avcodec_encode_subtitle_delegate avcodec_encode_subtitle; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_enum_to_chroma_pos_delegate(int* @xpos, int* @ypos, AVChromaLocation @pos); + public static avcodec_enum_to_chroma_pos_delegate avcodec_enum_to_chroma_pos; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_fill_audio_frame_delegate(AVFrame* @frame, int @nb_channels, AVSampleFormat @sample_fmt, byte* @buf, int @buf_size, int @align); + public static avcodec_fill_audio_frame_delegate avcodec_fill_audio_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVPixelFormat avcodec_find_best_pix_fmt_of_list_delegate(AVPixelFormat* @pix_fmt_list, AVPixelFormat @src_pix_fmt, int @has_alpha, int* @loss_ptr); + public static avcodec_find_best_pix_fmt_of_list_delegate avcodec_find_best_pix_fmt_of_list; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodec* avcodec_find_decoder_delegate(AVCodecID @id); + public static avcodec_find_decoder_delegate avcodec_find_decoder; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodec* avcodec_find_decoder_by_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static avcodec_find_decoder_by_name_delegate avcodec_find_decoder_by_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodec* avcodec_find_encoder_delegate(AVCodecID @id); + public static avcodec_find_encoder_delegate avcodec_find_encoder; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodec* avcodec_find_encoder_by_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static avcodec_find_encoder_by_name_delegate avcodec_find_encoder_by_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avcodec_flush_buffers_delegate(AVCodecContext* @avctx); + public static avcodec_flush_buffers_delegate avcodec_flush_buffers; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avcodec_free_context_delegate(AVCodecContext** @avctx); + public static avcodec_free_context_delegate avcodec_free_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* avcodec_get_class_delegate(); + public static avcodec_get_class_delegate avcodec_get_class; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* avcodec_get_frame_class_delegate(); + public static avcodec_get_frame_class_delegate avcodec_get_frame_class; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecHWConfig* avcodec_get_hw_config_delegate(AVCodec* @codec, int @index); + public static avcodec_get_hw_config_delegate avcodec_get_hw_config; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_get_hw_frames_parameters_delegate(AVCodecContext* @avctx, AVBufferRef* @device_ref, AVPixelFormat @hw_pix_fmt, AVBufferRef** @out_frames_ref); + public static avcodec_get_hw_frames_parameters_delegate avcodec_get_hw_frames_parameters; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avcodec_get_name_delegate(AVCodecID @id); + public static avcodec_get_name_delegate avcodec_get_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* avcodec_get_subtitle_rect_class_delegate(); + public static avcodec_get_subtitle_rect_class_delegate avcodec_get_subtitle_rect_class; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVMediaType avcodec_get_type_delegate(AVCodecID @codec_id); + public static avcodec_get_type_delegate avcodec_get_type; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_is_open_delegate(AVCodecContext* @s); + public static avcodec_is_open_delegate avcodec_is_open; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avcodec_license_delegate(); + public static avcodec_license_delegate avcodec_license; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_open2_delegate(AVCodecContext* @avctx, AVCodec* @codec, AVDictionary** @options); + public static avcodec_open2_delegate avcodec_open2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecParameters* avcodec_parameters_alloc_delegate(); + public static avcodec_parameters_alloc_delegate avcodec_parameters_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_parameters_copy_delegate(AVCodecParameters* @dst, AVCodecParameters* @src); + public static avcodec_parameters_copy_delegate avcodec_parameters_copy; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avcodec_parameters_free_delegate(AVCodecParameters** @par); + public static avcodec_parameters_free_delegate avcodec_parameters_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_parameters_from_context_delegate(AVCodecParameters* @par, AVCodecContext* @codec); + public static avcodec_parameters_from_context_delegate avcodec_parameters_from_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_parameters_to_context_delegate(AVCodecContext* @codec, AVCodecParameters* @par); + public static avcodec_parameters_to_context_delegate avcodec_parameters_to_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avcodec_pix_fmt_to_codec_tag_delegate(AVPixelFormat @pix_fmt); + public static avcodec_pix_fmt_to_codec_tag_delegate avcodec_pix_fmt_to_codec_tag; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avcodec_profile_name_delegate(AVCodecID @codec_id, int @profile); + public static avcodec_profile_name_delegate avcodec_profile_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_receive_frame_delegate(AVCodecContext* @avctx, AVFrame* @frame); + public static avcodec_receive_frame_delegate avcodec_receive_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_receive_packet_delegate(AVCodecContext* @avctx, AVPacket* @avpkt); + public static avcodec_receive_packet_delegate avcodec_receive_packet; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_send_frame_delegate(AVCodecContext* @avctx, AVFrame* @frame); + public static avcodec_send_frame_delegate avcodec_send_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avcodec_send_packet_delegate(AVCodecContext* @avctx, AVPacket* @avpkt); + public static avcodec_send_packet_delegate avcodec_send_packet; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avcodec_string_delegate(byte* @buf, int @buf_size, AVCodecContext* @enc, int @encode); + public static avcodec_string_delegate avcodec_string; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avcodec_version_delegate(); + public static avcodec_version_delegate avcodec_version; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avdevice_app_to_dev_control_message_delegate(AVFormatContext* @s, AVAppToDevMessageType @type, void* @data, ulong @data_size); + public static avdevice_app_to_dev_control_message_delegate avdevice_app_to_dev_control_message; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avdevice_capabilities_create_delegate(AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s, AVDictionary** @device_options); + public static avdevice_capabilities_create_delegate avdevice_capabilities_create; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avdevice_capabilities_free_delegate(AVDeviceCapabilitiesQuery** @caps, AVFormatContext* @s); + public static avdevice_capabilities_free_delegate avdevice_capabilities_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avdevice_configuration_delegate(); + public static avdevice_configuration_delegate avdevice_configuration; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avdevice_dev_to_app_control_message_delegate(AVFormatContext* @s, AVDevToAppMessageType @type, void* @data, ulong @data_size); + public static avdevice_dev_to_app_control_message_delegate avdevice_dev_to_app_control_message; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avdevice_free_list_devices_delegate(AVDeviceInfoList** @device_list); + public static avdevice_free_list_devices_delegate avdevice_free_list_devices; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avdevice_license_delegate(); + public static avdevice_license_delegate avdevice_license; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avdevice_list_devices_delegate(AVFormatContext* @s, AVDeviceInfoList** @device_list); + public static avdevice_list_devices_delegate avdevice_list_devices; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avdevice_list_input_sources_delegate(AVInputFormat* @device, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list); + public static avdevice_list_input_sources_delegate avdevice_list_input_sources; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avdevice_list_output_sinks_delegate(AVOutputFormat* @device, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @device_name, AVDictionary* @device_options, AVDeviceInfoList** @device_list); + public static avdevice_list_output_sinks_delegate avdevice_list_output_sinks; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avdevice_register_all_delegate(); + public static avdevice_register_all_delegate avdevice_register_all; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avdevice_version_delegate(); + public static avdevice_version_delegate avdevice_version; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_config_links_delegate(AVFilterContext* @filter); + public static avfilter_config_links_delegate avfilter_config_links; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avfilter_configuration_delegate(); + public static avfilter_configuration_delegate avfilter_configuration; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avfilter_filter_pad_count_delegate(AVFilter* @filter, int @is_output); + public static avfilter_filter_pad_count_delegate avfilter_filter_pad_count; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avfilter_free_delegate(AVFilterContext* @filter); + public static avfilter_free_delegate avfilter_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFilter* avfilter_get_by_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static avfilter_get_by_name_delegate avfilter_get_by_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* avfilter_get_class_delegate(); + public static avfilter_get_class_delegate avfilter_get_class; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFilterGraph* avfilter_graph_alloc_delegate(); + public static avfilter_graph_alloc_delegate avfilter_graph_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFilterContext* avfilter_graph_alloc_filter_delegate(AVFilterGraph* @graph, AVFilter* @filter, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static avfilter_graph_alloc_filter_delegate avfilter_graph_alloc_filter; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_graph_config_delegate(AVFilterGraph* @graphctx, void* @log_ctx); + public static avfilter_graph_config_delegate avfilter_graph_config; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_graph_create_filter_delegate(AVFilterContext** @filt_ctx, AVFilter* @filt, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @args, void* @opaque, AVFilterGraph* @graph_ctx); + public static avfilter_graph_create_filter_delegate avfilter_graph_create_filter; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate byte* avfilter_graph_dump_delegate(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @options); + public static avfilter_graph_dump_delegate avfilter_graph_dump; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avfilter_graph_free_delegate(AVFilterGraph** @graph); + public static avfilter_graph_free_delegate avfilter_graph_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFilterContext* avfilter_graph_get_filter_delegate(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static avfilter_graph_get_filter_delegate avfilter_graph_get_filter; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_graph_parse_delegate(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filters, AVFilterInOut* @inputs, AVFilterInOut* @outputs, void* @log_ctx); + public static avfilter_graph_parse_delegate avfilter_graph_parse; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_graph_parse_ptr_delegate(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs, void* @log_ctx); + public static avfilter_graph_parse_ptr_delegate avfilter_graph_parse_ptr; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_graph_parse2_delegate(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filters, AVFilterInOut** @inputs, AVFilterInOut** @outputs); + public static avfilter_graph_parse2_delegate avfilter_graph_parse2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_graph_queue_command_delegate(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @target, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @cmd, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @arg, int @flags, double @ts); + public static avfilter_graph_queue_command_delegate avfilter_graph_queue_command; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_graph_request_oldest_delegate(AVFilterGraph* @graph); + public static avfilter_graph_request_oldest_delegate avfilter_graph_request_oldest; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_graph_send_command_delegate(AVFilterGraph* @graph, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @target, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @cmd, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @arg, byte* @res, int @res_len, int @flags); + public static avfilter_graph_send_command_delegate avfilter_graph_send_command; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avfilter_graph_set_auto_convert_delegate(AVFilterGraph* @graph, uint @flags); + public static avfilter_graph_set_auto_convert_delegate avfilter_graph_set_auto_convert; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_init_dict_delegate(AVFilterContext* @ctx, AVDictionary** @options); + public static avfilter_init_dict_delegate avfilter_init_dict; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_init_str_delegate(AVFilterContext* @ctx, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @args); + public static avfilter_init_str_delegate avfilter_init_str; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFilterInOut* avfilter_inout_alloc_delegate(); + public static avfilter_inout_alloc_delegate avfilter_inout_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avfilter_inout_free_delegate(AVFilterInOut** @inout); + public static avfilter_inout_free_delegate avfilter_inout_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_insert_filter_delegate(AVFilterLink* @link, AVFilterContext* @filt, uint @filt_srcpad_idx, uint @filt_dstpad_idx); + public static avfilter_insert_filter_delegate avfilter_insert_filter; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avfilter_license_delegate(); + public static avfilter_license_delegate avfilter_license; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_link_delegate(AVFilterContext* @src, uint @srcpad, AVFilterContext* @dst, uint @dstpad); + public static avfilter_link_delegate avfilter_link; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avfilter_link_free_delegate(AVFilterLink** @link); + public static avfilter_link_free_delegate avfilter_link_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_pad_count_delegate(AVFilterPad* @pads); + public static avfilter_pad_count_delegate avfilter_pad_count; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avfilter_pad_get_name_delegate(AVFilterPad* @pads, int @pad_idx); + public static avfilter_pad_get_name_delegate avfilter_pad_get_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVMediaType avfilter_pad_get_type_delegate(AVFilterPad* @pads, int @pad_idx); + public static avfilter_pad_get_type_delegate avfilter_pad_get_type; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avfilter_process_command_delegate(AVFilterContext* @filter, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @cmd, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @arg, byte* @res, int @res_len, int @flags); + public static avfilter_process_command_delegate avfilter_process_command; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avfilter_version_delegate(); + public static avfilter_version_delegate avfilter_version; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVFormatContext* avformat_alloc_context_delegate(); + public static avformat_alloc_context_delegate avformat_alloc_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_alloc_output_context2_delegate(AVFormatContext** @ctx, AVOutputFormat* @oformat, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @format_name, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @filename); + public static avformat_alloc_output_context2_delegate avformat_alloc_output_context2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avformat_close_input_delegate(AVFormatContext** @s); + public static avformat_close_input_delegate avformat_close_input; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avformat_configuration_delegate(); + public static avformat_configuration_delegate avformat_configuration; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_find_stream_info_delegate(AVFormatContext* @ic, AVDictionary** @options); + public static avformat_find_stream_info_delegate avformat_find_stream_info; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_flush_delegate(AVFormatContext* @s); + public static avformat_flush_delegate avformat_flush; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avformat_free_context_delegate(AVFormatContext* @s); + public static avformat_free_context_delegate avformat_free_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* avformat_get_class_delegate(); + public static avformat_get_class_delegate avformat_get_class; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecTag* avformat_get_mov_audio_tags_delegate(); + public static avformat_get_mov_audio_tags_delegate avformat_get_mov_audio_tags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecTag* avformat_get_mov_video_tags_delegate(); + public static avformat_get_mov_video_tags_delegate avformat_get_mov_video_tags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecTag* avformat_get_riff_audio_tags_delegate(); + public static avformat_get_riff_audio_tags_delegate avformat_get_riff_audio_tags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVCodecTag* avformat_get_riff_video_tags_delegate(); + public static avformat_get_riff_video_tags_delegate avformat_get_riff_video_tags; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_index_get_entries_count_delegate(AVStream* @st); + public static avformat_index_get_entries_count_delegate avformat_index_get_entries_count; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVIndexEntry* avformat_index_get_entry_delegate(AVStream* @st, int @idx); + public static avformat_index_get_entry_delegate avformat_index_get_entry; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVIndexEntry* avformat_index_get_entry_from_timestamp_delegate(AVStream* @st, long @wanted_timestamp, int @flags); + public static avformat_index_get_entry_from_timestamp_delegate avformat_index_get_entry_from_timestamp; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_init_output_delegate(AVFormatContext* @s, AVDictionary** @options); + public static avformat_init_output_delegate avformat_init_output; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avformat_license_delegate(); + public static avformat_license_delegate avformat_license; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_match_stream_specifier_delegate(AVFormatContext* @s, AVStream* @st, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @spec); + public static avformat_match_stream_specifier_delegate avformat_match_stream_specifier; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_network_deinit_delegate(); + public static avformat_network_deinit_delegate avformat_network_deinit; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_network_init_delegate(); + public static avformat_network_init_delegate avformat_network_init; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVStream* avformat_new_stream_delegate(AVFormatContext* @s, AVCodec* @c); + public static avformat_new_stream_delegate avformat_new_stream; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_open_input_delegate(AVFormatContext** @ps, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, AVInputFormat* @fmt, AVDictionary** @options); + public static avformat_open_input_delegate avformat_open_input; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_query_codec_delegate(AVOutputFormat* @ofmt, AVCodecID @codec_id, int @std_compliance); + public static avformat_query_codec_delegate avformat_query_codec; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_queue_attached_pictures_delegate(AVFormatContext* @s); + public static avformat_queue_attached_pictures_delegate avformat_queue_attached_pictures; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_seek_file_delegate(AVFormatContext* @s, int @stream_index, long @min_ts, long @ts, long @max_ts, int @flags); + public static avformat_seek_file_delegate avformat_seek_file; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_transfer_internal_stream_timing_info_delegate(AVOutputFormat* @ofmt, AVStream* @ost, AVStream* @ist, AVTimebaseSource @copy_tb); + public static avformat_transfer_internal_stream_timing_info_delegate avformat_transfer_internal_stream_timing_info; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avformat_version_delegate(); + public static avformat_version_delegate avformat_version; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avformat_write_header_delegate(AVFormatContext* @s, AVDictionary** @options); + public static avformat_write_header_delegate avformat_write_header; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_accept_delegate(AVIOContext* @s, AVIOContext** @c); + public static avio_accept_delegate avio_accept; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVIOContext* avio_alloc_context_delegate(byte* @buffer, int @buffer_size, int @write_flag, void* @opaque, avio_alloc_context_read_packet_func @read_packet, avio_alloc_context_write_packet_func @write_packet, avio_alloc_context_seek_func @seek); + public static avio_alloc_context_delegate avio_alloc_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_check_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, int @flags); + public static avio_check_delegate avio_check; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_close_delegate(AVIOContext* @s); + public static avio_close_delegate avio_close; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_close_dir_delegate(AVIODirContext** @s); + public static avio_close_dir_delegate avio_close_dir; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_close_dyn_buf_delegate(AVIOContext* @s, byte** @pbuffer); + public static avio_close_dyn_buf_delegate avio_close_dyn_buf; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_closep_delegate(AVIOContext** @s); + public static avio_closep_delegate avio_closep; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_context_free_delegate(AVIOContext** @s); + public static avio_context_free_delegate avio_context_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avio_enum_protocols_delegate(void** @opaque, int @output); + public static avio_enum_protocols_delegate avio_enum_protocols; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_feof_delegate(AVIOContext* @s); + public static avio_feof_delegate avio_feof; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avio_find_protocol_name_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url); + public static avio_find_protocol_name_delegate avio_find_protocol_name; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_flush_delegate(AVIOContext* @s); + public static avio_flush_delegate avio_flush; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_free_directory_entry_delegate(AVIODirEntry** @entry); + public static avio_free_directory_entry_delegate avio_free_directory_entry; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_get_dyn_buf_delegate(AVIOContext* @s, byte** @pbuffer); + public static avio_get_dyn_buf_delegate avio_get_dyn_buf; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_get_str_delegate(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen); + public static avio_get_str_delegate avio_get_str; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_get_str16be_delegate(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen); + public static avio_get_str16be_delegate avio_get_str16be; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_get_str16le_delegate(AVIOContext* @pb, int @maxlen, byte* @buf, int @buflen); + public static avio_get_str16le_delegate avio_get_str16le; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_handshake_delegate(AVIOContext* @c); + public static avio_handshake_delegate avio_handshake; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_open_delegate(AVIOContext** @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, int @flags); + public static avio_open_delegate avio_open; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_open_dir_delegate(AVIODirContext** @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, AVDictionary** @options); + public static avio_open_dir_delegate avio_open_dir; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_open_dyn_buf_delegate(AVIOContext** @s); + public static avio_open_dyn_buf_delegate avio_open_dyn_buf; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_open2_delegate(AVIOContext** @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @url, int @flags, AVIOInterruptCB* @int_cb, AVDictionary** @options); + public static avio_open2_delegate avio_open2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_pause_delegate(AVIOContext* @h, int @pause); + public static avio_pause_delegate avio_pause; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_print_string_array_delegate(AVIOContext* @s, byte*[] @strings); + public static avio_print_string_array_delegate avio_print_string_array; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_printf_delegate(AVIOContext* @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt); + public static avio_printf_delegate avio_printf; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* avio_protocol_get_class_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name); + public static avio_protocol_get_class_delegate avio_protocol_get_class; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_put_str_delegate(AVIOContext* @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str); + public static avio_put_str_delegate avio_put_str; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_put_str16be_delegate(AVIOContext* @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str); + public static avio_put_str16be_delegate avio_put_str16be; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_put_str16le_delegate(AVIOContext* @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @str); + public static avio_put_str16le_delegate avio_put_str16le; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_r8_delegate(AVIOContext* @s); + public static avio_r8_delegate avio_r8; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avio_rb16_delegate(AVIOContext* @s); + public static avio_rb16_delegate avio_rb16; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avio_rb24_delegate(AVIOContext* @s); + public static avio_rb24_delegate avio_rb24; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avio_rb32_delegate(AVIOContext* @s); + public static avio_rb32_delegate avio_rb32; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate ulong avio_rb64_delegate(AVIOContext* @s); + public static avio_rb64_delegate avio_rb64; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_read_delegate(AVIOContext* @s, byte* @buf, int @size); + public static avio_read_delegate avio_read; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_read_dir_delegate(AVIODirContext* @s, AVIODirEntry** @next); + public static avio_read_dir_delegate avio_read_dir; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_read_partial_delegate(AVIOContext* @s, byte* @buf, int @size); + public static avio_read_partial_delegate avio_read_partial; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_read_to_bprint_delegate(AVIOContext* @h, AVBPrint* @pb, ulong @max_size); + public static avio_read_to_bprint_delegate avio_read_to_bprint; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avio_rl16_delegate(AVIOContext* @s); + public static avio_rl16_delegate avio_rl16; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avio_rl24_delegate(AVIOContext* @s); + public static avio_rl24_delegate avio_rl24; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avio_rl32_delegate(AVIOContext* @s); + public static avio_rl32_delegate avio_rl32; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate ulong avio_rl64_delegate(AVIOContext* @s); + public static avio_rl64_delegate avio_rl64; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long avio_seek_delegate(AVIOContext* @s, long @offset, int @whence); + public static avio_seek_delegate avio_seek; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long avio_seek_time_delegate(AVIOContext* @h, int @stream_index, long @timestamp, int @flags); + public static avio_seek_time_delegate avio_seek_time; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long avio_size_delegate(AVIOContext* @s); + public static avio_size_delegate avio_size; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long avio_skip_delegate(AVIOContext* @s, long @offset); + public static avio_skip_delegate avio_skip; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int avio_vprintf_delegate(AVIOContext* @s, + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @fmt, byte* @ap); + public static avio_vprintf_delegate avio_vprintf; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_w8_delegate(AVIOContext* @s, int @b); + public static avio_w8_delegate avio_w8; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_wb16_delegate(AVIOContext* @s, uint @val); + public static avio_wb16_delegate avio_wb16; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_wb24_delegate(AVIOContext* @s, uint @val); + public static avio_wb24_delegate avio_wb24; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_wb32_delegate(AVIOContext* @s, uint @val); + public static avio_wb32_delegate avio_wb32; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_wb64_delegate(AVIOContext* @s, ulong @val); + public static avio_wb64_delegate avio_wb64; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_wl16_delegate(AVIOContext* @s, uint @val); + public static avio_wl16_delegate avio_wl16; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_wl24_delegate(AVIOContext* @s, uint @val); + public static avio_wl24_delegate avio_wl24; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_wl32_delegate(AVIOContext* @s, uint @val); + public static avio_wl32_delegate avio_wl32; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_wl64_delegate(AVIOContext* @s, ulong @val); + public static avio_wl64_delegate avio_wl64; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_write_delegate(AVIOContext* @s, byte* @buf, int @size); + public static avio_write_delegate avio_write; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avio_write_marker_delegate(AVIOContext* @s, long @time, AVIODataMarkerType @type); + public static avio_write_marker_delegate avio_write_marker; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void avsubtitle_free_delegate(AVSubtitle* @sub); + public static avsubtitle_free_delegate avsubtitle_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avutil_configuration_delegate(); + public static avutil_configuration_delegate avutil_configuration; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string avutil_license_delegate(); + public static avutil_license_delegate avutil_license; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint avutil_version_delegate(); + public static avutil_version_delegate avutil_version; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string postproc_configuration_delegate(); + public static postproc_configuration_delegate postproc_configuration; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string postproc_license_delegate(); + public static postproc_license_delegate postproc_license; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint postproc_version_delegate(); + public static postproc_version_delegate postproc_version; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void pp_free_context_delegate(void* @ppContext); + public static pp_free_context_delegate pp_free_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void pp_free_mode_delegate(void* @mode); + public static pp_free_mode_delegate pp_free_mode; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* pp_get_context_delegate(int @width, int @height, int @flags); + public static pp_get_context_delegate pp_get_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void* pp_get_mode_by_name_and_quality_delegate( + #if NETSTANDARD2_1_OR_GREATER + [MarshalAs(UnmanagedType.LPUTF8Str)] + #else + [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(UTF8Marshaler))] + #endif + string @name, int @quality); + public static pp_get_mode_by_name_and_quality_delegate pp_get_mode_by_name_and_quality; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void pp_postprocess_delegate(in byte_ptrArray3 @src, in int_array3 @srcStride, ref byte_ptrArray3 @dst, in int_array3 @dstStride, int @horizontalSize, int @verticalSize, sbyte* @QP_store, int @QP_stride, void* @mode, void* @ppContext, int @pict_type); + public static pp_postprocess_delegate pp_postprocess; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate SwrContext* swr_alloc_delegate(); + public static swr_alloc_delegate swr_alloc; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate SwrContext* swr_alloc_set_opts_delegate(SwrContext* @s, long @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, long @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx); + public static swr_alloc_set_opts_delegate swr_alloc_set_opts; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_alloc_set_opts2_delegate(SwrContext** @ps, AVChannelLayout* @out_ch_layout, AVSampleFormat @out_sample_fmt, int @out_sample_rate, AVChannelLayout* @in_ch_layout, AVSampleFormat @in_sample_fmt, int @in_sample_rate, int @log_offset, void* @log_ctx); + public static swr_alloc_set_opts2_delegate swr_alloc_set_opts2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_build_matrix_delegate(ulong @in_layout, ulong @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @rematrix_maxval, double @rematrix_volume, double* @matrix, int @stride, AVMatrixEncoding @matrix_encoding, void* @log_ctx); + public static swr_build_matrix_delegate swr_build_matrix; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_build_matrix2_delegate(AVChannelLayout* @in_layout, AVChannelLayout* @out_layout, double @center_mix_level, double @surround_mix_level, double @lfe_mix_level, double @maxval, double @rematrix_volume, double* @matrix, long @stride, AVMatrixEncoding @matrix_encoding, void* @log_context); + public static swr_build_matrix2_delegate swr_build_matrix2; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void swr_close_delegate(SwrContext* @s); + public static swr_close_delegate swr_close; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_config_frame_delegate(SwrContext* @swr, AVFrame* @out, AVFrame* @in); + public static swr_config_frame_delegate swr_config_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_convert_delegate(SwrContext* @s, byte** @out, int @out_count, byte** @in, int @in_count); + public static swr_convert_delegate swr_convert; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_convert_frame_delegate(SwrContext* @swr, AVFrame* @output, AVFrame* @input); + public static swr_convert_frame_delegate swr_convert_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_drop_output_delegate(SwrContext* @s, int @count); + public static swr_drop_output_delegate swr_drop_output; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void swr_free_delegate(SwrContext** @s); + public static swr_free_delegate swr_free; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* swr_get_class_delegate(); + public static swr_get_class_delegate swr_get_class; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long swr_get_delay_delegate(SwrContext* @s, long @base); + public static swr_get_delay_delegate swr_get_delay; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_get_out_samples_delegate(SwrContext* @s, int @in_samples); + public static swr_get_out_samples_delegate swr_get_out_samples; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_init_delegate(SwrContext* @s); + public static swr_init_delegate swr_init; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_inject_silence_delegate(SwrContext* @s, int @count); + public static swr_inject_silence_delegate swr_inject_silence; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_is_initialized_delegate(SwrContext* @s); + public static swr_is_initialized_delegate swr_is_initialized; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate long swr_next_pts_delegate(SwrContext* @s, long @pts); + public static swr_next_pts_delegate swr_next_pts; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_set_channel_mapping_delegate(SwrContext* @s, int* @channel_map); + public static swr_set_channel_mapping_delegate swr_set_channel_mapping; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_set_compensation_delegate(SwrContext* @s, int @sample_delta, int @compensation_distance); + public static swr_set_compensation_delegate swr_set_compensation; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int swr_set_matrix_delegate(SwrContext* @s, double* @matrix, int @stride); + public static swr_set_matrix_delegate swr_set_matrix; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string swresample_configuration_delegate(); + public static swresample_configuration_delegate swresample_configuration; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string swresample_license_delegate(); + public static swresample_license_delegate swresample_license; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint swresample_version_delegate(); + public static swresample_version_delegate swresample_version; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate SwsContext* sws_alloc_context_delegate(); + public static sws_alloc_context_delegate sws_alloc_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate SwsVector* sws_allocVec_delegate(int @length); + public static sws_allocVec_delegate sws_allocVec; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void sws_convertPalette8ToPacked24_delegate(byte* @src, byte* @dst, int @num_pixels, byte* @palette); + public static sws_convertPalette8ToPacked24_delegate sws_convertPalette8ToPacked24; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void sws_convertPalette8ToPacked32_delegate(byte* @src, byte* @dst, int @num_pixels, byte* @palette); + public static sws_convertPalette8ToPacked32_delegate sws_convertPalette8ToPacked32; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void sws_frame_end_delegate(SwsContext* @c); + public static sws_frame_end_delegate sws_frame_end; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_frame_start_delegate(SwsContext* @c, AVFrame* @dst, AVFrame* @src); + public static sws_frame_start_delegate sws_frame_start; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void sws_freeContext_delegate(SwsContext* @swsContext); + public static sws_freeContext_delegate sws_freeContext; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void sws_freeFilter_delegate(SwsFilter* @filter); + public static sws_freeFilter_delegate sws_freeFilter; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void sws_freeVec_delegate(SwsVector* @a); + public static sws_freeVec_delegate sws_freeVec; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate AVClass* sws_get_class_delegate(); + public static sws_get_class_delegate sws_get_class; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate SwsContext* sws_getCachedContext_delegate(SwsContext* @context, int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param); + public static sws_getCachedContext_delegate sws_getCachedContext; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int* sws_getCoefficients_delegate(int @colorspace); + public static sws_getCoefficients_delegate sws_getCoefficients; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_getColorspaceDetails_delegate(SwsContext* @c, int** @inv_table, int* @srcRange, int** @table, int* @dstRange, int* @brightness, int* @contrast, int* @saturation); + public static sws_getColorspaceDetails_delegate sws_getColorspaceDetails; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate SwsContext* sws_getContext_delegate(int @srcW, int @srcH, AVPixelFormat @srcFormat, int @dstW, int @dstH, AVPixelFormat @dstFormat, int @flags, SwsFilter* @srcFilter, SwsFilter* @dstFilter, double* @param); + public static sws_getContext_delegate sws_getContext; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate SwsFilter* sws_getDefaultFilter_delegate(float @lumaGBlur, float @chromaGBlur, float @lumaSharpen, float @chromaSharpen, float @chromaHShift, float @chromaVShift, int @verbose); + public static sws_getDefaultFilter_delegate sws_getDefaultFilter; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate SwsVector* sws_getGaussianVec_delegate(double @variance, double @quality); + public static sws_getGaussianVec_delegate sws_getGaussianVec; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_init_context_delegate(SwsContext* @sws_context, SwsFilter* @srcFilter, SwsFilter* @dstFilter); + public static sws_init_context_delegate sws_init_context; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_isSupportedEndiannessConversion_delegate(AVPixelFormat @pix_fmt); + public static sws_isSupportedEndiannessConversion_delegate sws_isSupportedEndiannessConversion; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_isSupportedInput_delegate(AVPixelFormat @pix_fmt); + public static sws_isSupportedInput_delegate sws_isSupportedInput; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_isSupportedOutput_delegate(AVPixelFormat @pix_fmt); + public static sws_isSupportedOutput_delegate sws_isSupportedOutput; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void sws_normalizeVec_delegate(SwsVector* @a, double @height); + public static sws_normalizeVec_delegate sws_normalizeVec; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_receive_slice_delegate(SwsContext* @c, uint @slice_start, uint @slice_height); + public static sws_receive_slice_delegate sws_receive_slice; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint sws_receive_slice_alignment_delegate(SwsContext* @c); + public static sws_receive_slice_alignment_delegate sws_receive_slice_alignment; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_scale_delegate(SwsContext* @c, byte*[] @srcSlice, int[] @srcStride, int @srcSliceY, int @srcSliceH, byte*[] @dst, int[] @dstStride); + public static sws_scale_delegate sws_scale; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_scale_frame_delegate(SwsContext* @c, AVFrame* @dst, AVFrame* @src); + public static sws_scale_frame_delegate sws_scale_frame; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate void sws_scaleVec_delegate(SwsVector* @a, double @scalar); + public static sws_scaleVec_delegate sws_scaleVec; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_send_slice_delegate(SwsContext* @c, uint @slice_start, uint @slice_height); + public static sws_send_slice_delegate sws_send_slice; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate int sws_setColorspaceDetails_delegate(SwsContext* @c, in int_array4 @inv_table, int @srcRange, in int_array4 @table, int @dstRange, int @brightness, int @contrast, int @saturation); + public static sws_setColorspaceDetails_delegate sws_setColorspaceDetails; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string swscale_configuration_delegate(); + public static swscale_configuration_delegate swscale_configuration; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + [return: MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(ConstCharPtrMarshaler))] + public delegate string swscale_license_delegate(); + public static swscale_license_delegate swscale_license; + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate uint swscale_version_delegate(); + public static swscale_version_delegate swscale_version; + +}