// ------------------------------------------------------------------------------ // // This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // // ------------------------------------------------------------------------------ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using HexaGen.Runtime; using System.Numerics; using Dalamud.Bindings.ImGui; namespace Dalamud.Bindings.ImPlot { public unsafe partial class ImPlot { /// /// To be documented. /// public static bool ImOverlaps(int minA, int maxA, int minB, int maxB) { byte ret = ImOverlapsNative(minA, maxA, minB, maxB); return ret != 0; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte ImOverlapsNative(uint minA, uint maxA, uint minB, uint maxB) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[476])(minA, maxA, minB, maxB); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[476])(minA, maxA, minB, maxB); #endif } /// /// To be documented. /// public static bool ImOverlaps(uint minA, uint maxA, uint minB, uint maxB) { byte ret = ImOverlapsNative(minA, maxA, minB, maxB); return ret != 0; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte ImOverlapsNative(long minA, long maxA, long minB, long maxB) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[477])(minA, maxA, minB, maxB); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[477])(minA, maxA, minB, maxB); #endif } /// /// To be documented. /// public static bool ImOverlaps(long minA, long maxA, long minB, long maxB) { byte ret = ImOverlapsNative(minA, maxA, minB, maxB); return ret != 0; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte ImOverlapsNative(ulong minA, ulong maxA, ulong minB, ulong maxB) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[478])(minA, maxA, minB, maxB); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[478])(minA, maxA, minB, maxB); #endif } /// /// To be documented. /// public static bool ImOverlaps(ulong minA, ulong maxA, ulong minB, ulong maxB) { byte ret = ImOverlapsNative(minA, maxA, minB, maxB); return ret != 0; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotDateTimeSpec* ImPlotDateTimeSpecNative() { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[479])(); #else return (ImPlotDateTimeSpec*)((delegate* unmanaged[Cdecl])funcTable[479])(); #endif } /// /// To be documented. /// public static ImPlotDateTimeSpecPtr ImPlotDateTimeSpec() { ImPlotDateTimeSpecPtr ret = ImPlotDateTimeSpecNative(); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotDateTimeSpec* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[480])(self); #else ((delegate* unmanaged[Cdecl])funcTable[480])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotDateTimeSpecPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotDateTimeSpec self) { fixed (ImPlotDateTimeSpec* pself = &self) { DestroyNative((ImPlotDateTimeSpec*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotDateTimeSpec* ImPlotDateTimeSpecNative(ImPlotDateFmt dateFmt, ImPlotTimeFmt timeFmt, byte use24HrClk, byte useIso8601) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[481])(dateFmt, timeFmt, use24HrClk, useIso8601); #else return (ImPlotDateTimeSpec*)((delegate* unmanaged[Cdecl])funcTable[481])(dateFmt, timeFmt, use24HrClk, useIso8601); #endif } /// /// To be documented. /// public static ImPlotDateTimeSpecPtr ImPlotDateTimeSpec(ImPlotDateFmt dateFmt, ImPlotTimeFmt timeFmt, bool use24HrClk, bool useIso8601) { ImPlotDateTimeSpecPtr ret = ImPlotDateTimeSpecNative(dateFmt, timeFmt, use24HrClk ? (byte)1 : (byte)0, useIso8601 ? (byte)1 : (byte)0); return ret; } /// /// To be documented. /// public static ImPlotDateTimeSpecPtr ImPlotDateTimeSpec(ImPlotDateFmt dateFmt, ImPlotTimeFmt timeFmt, bool use24HrClk) { ImPlotDateTimeSpecPtr ret = ImPlotDateTimeSpecNative(dateFmt, timeFmt, use24HrClk ? (byte)1 : (byte)0, (byte)(0)); return ret; } /// /// To be documented. /// public static ImPlotDateTimeSpecPtr ImPlotDateTimeSpec(ImPlotDateFmt dateFmt, ImPlotTimeFmt timeFmt) { ImPlotDateTimeSpecPtr ret = ImPlotDateTimeSpecNative(dateFmt, timeFmt, (byte)(0), (byte)(0)); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotTime* ImPlotTimeNative() { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[482])(); #else return (ImPlotTime*)((delegate* unmanaged[Cdecl])funcTable[482])(); #endif } /// /// To be documented. /// public static ImPlotTimePtr ImPlotTime() { ImPlotTimePtr ret = ImPlotTimeNative(); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotTime* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[483])(self); #else ((delegate* unmanaged[Cdecl])funcTable[483])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotTimePtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotTime self) { fixed (ImPlotTime* pself = &self) { DestroyNative((ImPlotTime*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotTime* ImPlotTimeNative(long s, int us) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[484])(s, us); #else return (ImPlotTime*)((delegate* unmanaged[Cdecl])funcTable[484])(s, us); #endif } /// /// To be documented. /// public static ImPlotTimePtr ImPlotTime(long s, int us) { ImPlotTimePtr ret = ImPlotTimeNative(s, us); return ret; } /// /// To be documented. /// public static ImPlotTimePtr ImPlotTime(long s) { ImPlotTimePtr ret = ImPlotTimeNative(s, (int)(0)); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void RollOverNative(ImPlotTime* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[485])(self); #else ((delegate* unmanaged[Cdecl])funcTable[485])((nint)self); #endif } /// /// To be documented. /// public static void RollOver(ImPlotTimePtr self) { RollOverNative(self); } /// /// To be documented. /// public static void RollOver(ref ImPlotTime self) { fixed (ImPlotTime* pself = &self) { RollOverNative((ImPlotTime*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static double ToDoubleNative(ImPlotTime* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[486])(self); #else return (double)((delegate* unmanaged[Cdecl])funcTable[486])((nint)self); #endif } /// /// To be documented. /// public static double ToDouble(ImPlotTimePtr self) { double ret = ToDoubleNative(self); return ret; } /// /// To be documented. /// public static double ToDouble(ref ImPlotTime self) { fixed (ImPlotTime* pself = &self) { double ret = ToDoubleNative((ImPlotTime*)pself); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void FromDoubleNative(ImPlotTime* pOut, double t) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[487])(pOut, t); #else ((delegate* unmanaged[Cdecl])funcTable[487])((nint)pOut, t); #endif } /// /// To be documented. /// public static ImPlotTime FromDouble(double t) { ImPlotTime ret; FromDoubleNative(&ret, t); return ret; } /// /// To be documented. /// public static void FromDouble(ImPlotTimePtr pOut, double t) { FromDoubleNative(pOut, t); } /// /// To be documented. /// public static void FromDouble(ref ImPlotTime pOut, double t) { fixed (ImPlotTime* ppOut = &pOut) { FromDoubleNative((ImPlotTime*)ppOut, t); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotColormapData* ImPlotColormapDataNative() { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[488])(); #else return (ImPlotColormapData*)((delegate* unmanaged[Cdecl])funcTable[488])(); #endif } /// /// To be documented. /// public static ImPlotColormapDataPtr ImPlotColormapData() { ImPlotColormapDataPtr ret = ImPlotColormapDataNative(); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotColormapData* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[489])(self); #else ((delegate* unmanaged[Cdecl])funcTable[489])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotColormapDataPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotColormapData self) { fixed (ImPlotColormapData* pself = &self) { DestroyNative((ImPlotColormapData*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static int AppendNative(ImPlotColormapData* self, byte* name, uint* keys, int count, byte qual) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[490])(self, name, keys, count, qual); #else return (int)((delegate* unmanaged[Cdecl])funcTable[490])((nint)self, (nint)name, (nint)keys, count, qual); #endif } /// /// To be documented. /// public static int Append(ImPlotColormapDataPtr self, byte* name, uint* keys, int count, bool qual) { int ret = AppendNative(self, name, keys, count, qual ? (byte)1 : (byte)0); return ret; } /// /// To be documented. /// public static int Append(ref ImPlotColormapData self, byte* name, uint* keys, int count, bool qual) { fixed (ImPlotColormapData* pself = &self) { int ret = AppendNative((ImPlotColormapData*)pself, name, keys, count, qual ? (byte)1 : (byte)0); return ret; } } /// /// To be documented. /// public static int Append(ImPlotColormapDataPtr self, ref byte name, uint* keys, int count, bool qual) { fixed (byte* pname = &name) { int ret = AppendNative(self, (byte*)pname, keys, count, qual ? (byte)1 : (byte)0); return ret; } } /// /// To be documented. /// public static int Append(ImPlotColormapDataPtr self, ReadOnlySpan name, uint* keys, int count, bool qual) { fixed (byte* pname = name) { int ret = AppendNative(self, (byte*)pname, keys, count, qual ? (byte)1 : (byte)0); return ret; } } /// /// To be documented. /// public static int Append(ImPlotColormapDataPtr self, string name, uint* keys, int count, bool qual) { byte* pStr0 = null; int pStrSize0 = 0; if (name != null) { pStrSize0 = Utils.GetByteCountUTF8(name); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(name, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } int ret = AppendNative(self, pStr0, keys, count, qual ? (byte)1 : (byte)0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } return ret; } /// /// To be documented. /// public static int Append(ref ImPlotColormapData self, ref byte name, uint* keys, int count, bool qual) { fixed (ImPlotColormapData* pself = &self) { fixed (byte* pname = &name) { int ret = AppendNative((ImPlotColormapData*)pself, (byte*)pname, keys, count, qual ? (byte)1 : (byte)0); return ret; } } } /// /// To be documented. /// public static int Append(ref ImPlotColormapData self, ReadOnlySpan name, uint* keys, int count, bool qual) { fixed (ImPlotColormapData* pself = &self) { fixed (byte* pname = name) { int ret = AppendNative((ImPlotColormapData*)pself, (byte*)pname, keys, count, qual ? (byte)1 : (byte)0); return ret; } } } /// /// To be documented. /// public static int Append(ref ImPlotColormapData self, string name, uint* keys, int count, bool qual) { fixed (ImPlotColormapData* pself = &self) { byte* pStr0 = null; int pStrSize0 = 0; if (name != null) { pStrSize0 = Utils.GetByteCountUTF8(name); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(name, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } int ret = AppendNative((ImPlotColormapData*)pself, pStr0, keys, count, qual ? (byte)1 : (byte)0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void _AppendTableNative(ImPlotColormapData* self, ImPlotColormap cmap) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[491])(self, cmap); #else ((delegate* unmanaged[Cdecl])funcTable[491])((nint)self, cmap); #endif } /// /// To be documented. /// public static void _AppendTable(ImPlotColormapDataPtr self, ImPlotColormap cmap) { _AppendTableNative(self, cmap); } /// /// To be documented. /// public static void _AppendTable(ref ImPlotColormapData self, ImPlotColormap cmap) { fixed (ImPlotColormapData* pself = &self) { _AppendTableNative((ImPlotColormapData*)pself, cmap); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void RebuildTablesNative(ImPlotColormapData* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[492])(self); #else ((delegate* unmanaged[Cdecl])funcTable[492])((nint)self); #endif } /// /// To be documented. /// public static void RebuildTables(ImPlotColormapDataPtr self) { RebuildTablesNative(self); } /// /// To be documented. /// public static void RebuildTables(ref ImPlotColormapData self) { fixed (ImPlotColormapData* pself = &self) { RebuildTablesNative((ImPlotColormapData*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsQualNative(ImPlotColormapData* self, ImPlotColormap cmap) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[493])(self, cmap); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[493])((nint)self, cmap); #endif } /// /// To be documented. /// public static bool IsQual(ImPlotColormapDataPtr self, ImPlotColormap cmap) { byte ret = IsQualNative(self, cmap); return ret != 0; } /// /// To be documented. /// public static bool IsQual(ref ImPlotColormapData self, ImPlotColormap cmap) { fixed (ImPlotColormapData* pself = &self) { byte ret = IsQualNative((ImPlotColormapData*)pself, cmap); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte* GetNameNative(ImPlotColormapData* self, ImPlotColormap cmap) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[494])(self, cmap); #else return (byte*)((delegate* unmanaged[Cdecl])funcTable[494])((nint)self, cmap); #endif } /// /// To be documented. /// public static byte* GetName(ImPlotColormapDataPtr self, ImPlotColormap cmap) { byte* ret = GetNameNative(self, cmap); return ret; } /// /// To be documented. /// public static string GetNameS(ImPlotColormapDataPtr self, ImPlotColormap cmap) { string ret = Utils.DecodeStringUTF8(GetNameNative(self, cmap)); return ret; } /// /// To be documented. /// public static byte* GetName(ref ImPlotColormapData self, ImPlotColormap cmap) { fixed (ImPlotColormapData* pself = &self) { byte* ret = GetNameNative((ImPlotColormapData*)pself, cmap); return ret; } } /// /// To be documented. /// public static string GetNameS(ref ImPlotColormapData self, ImPlotColormap cmap) { fixed (ImPlotColormapData* pself = &self) { string ret = Utils.DecodeStringUTF8(GetNameNative((ImPlotColormapData*)pself, cmap)); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotColormap GetIndexNative(ImPlotColormapData* self, byte* name) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[495])(self, name); #else return (ImPlotColormap)((delegate* unmanaged[Cdecl])funcTable[495])((nint)self, (nint)name); #endif } /// /// To be documented. /// public static ImPlotColormap GetIndex(ImPlotColormapDataPtr self, byte* name) { ImPlotColormap ret = GetIndexNative(self, name); return ret; } /// /// To be documented. /// public static ImPlotColormap GetIndex(ref ImPlotColormapData self, byte* name) { fixed (ImPlotColormapData* pself = &self) { ImPlotColormap ret = GetIndexNative((ImPlotColormapData*)pself, name); return ret; } } /// /// To be documented. /// public static ImPlotColormap GetIndex(ImPlotColormapDataPtr self, ref byte name) { fixed (byte* pname = &name) { ImPlotColormap ret = GetIndexNative(self, (byte*)pname); return ret; } } /// /// To be documented. /// public static ImPlotColormap GetIndex(ImPlotColormapDataPtr self, ReadOnlySpan name) { fixed (byte* pname = name) { ImPlotColormap ret = GetIndexNative(self, (byte*)pname); return ret; } } /// /// To be documented. /// public static ImPlotColormap GetIndex(ImPlotColormapDataPtr self, string name) { byte* pStr0 = null; int pStrSize0 = 0; if (name != null) { pStrSize0 = Utils.GetByteCountUTF8(name); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(name, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } ImPlotColormap ret = GetIndexNative(self, pStr0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } return ret; } /// /// To be documented. /// public static ImPlotColormap GetIndex(ref ImPlotColormapData self, ref byte name) { fixed (ImPlotColormapData* pself = &self) { fixed (byte* pname = &name) { ImPlotColormap ret = GetIndexNative((ImPlotColormapData*)pself, (byte*)pname); return ret; } } } /// /// To be documented. /// public static ImPlotColormap GetIndex(ref ImPlotColormapData self, ReadOnlySpan name) { fixed (ImPlotColormapData* pself = &self) { fixed (byte* pname = name) { ImPlotColormap ret = GetIndexNative((ImPlotColormapData*)pself, (byte*)pname); return ret; } } } /// /// To be documented. /// public static ImPlotColormap GetIndex(ref ImPlotColormapData self, string name) { fixed (ImPlotColormapData* pself = &self) { byte* pStr0 = null; int pStrSize0 = 0; if (name != null) { pStrSize0 = Utils.GetByteCountUTF8(name); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(name, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } ImPlotColormap ret = GetIndexNative((ImPlotColormapData*)pself, pStr0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static uint* GetKeysNative(ImPlotColormapData* self, ImPlotColormap cmap) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[496])(self, cmap); #else return (uint*)((delegate* unmanaged[Cdecl])funcTable[496])((nint)self, cmap); #endif } /// /// To be documented. /// public static uint* GetKeys(ImPlotColormapDataPtr self, ImPlotColormap cmap) { uint* ret = GetKeysNative(self, cmap); return ret; } /// /// To be documented. /// public static uint* GetKeys(ref ImPlotColormapData self, ImPlotColormap cmap) { fixed (ImPlotColormapData* pself = &self) { uint* ret = GetKeysNative((ImPlotColormapData*)pself, cmap); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static int GetKeyCountNative(ImPlotColormapData* self, ImPlotColormap cmap) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[497])(self, cmap); #else return (int)((delegate* unmanaged[Cdecl])funcTable[497])((nint)self, cmap); #endif } /// /// To be documented. /// public static int GetKeyCount(ImPlotColormapDataPtr self, ImPlotColormap cmap) { int ret = GetKeyCountNative(self, cmap); return ret; } /// /// To be documented. /// public static int GetKeyCount(ref ImPlotColormapData self, ImPlotColormap cmap) { fixed (ImPlotColormapData* pself = &self) { int ret = GetKeyCountNative((ImPlotColormapData*)pself, cmap); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static uint GetKeyColorNative(ImPlotColormapData* self, ImPlotColormap cmap, int idx) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[498])(self, cmap, idx); #else return (uint)((delegate* unmanaged[Cdecl])funcTable[498])((nint)self, cmap, idx); #endif } /// /// To be documented. /// public static uint GetKeyColor(ImPlotColormapDataPtr self, ImPlotColormap cmap, int idx) { uint ret = GetKeyColorNative(self, cmap, idx); return ret; } /// /// To be documented. /// public static uint GetKeyColor(ref ImPlotColormapData self, ImPlotColormap cmap, int idx) { fixed (ImPlotColormapData* pself = &self) { uint ret = GetKeyColorNative((ImPlotColormapData*)pself, cmap, idx); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void SetKeyColorNative(ImPlotColormapData* self, ImPlotColormap cmap, int idx, uint value) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[499])(self, cmap, idx, value); #else ((delegate* unmanaged[Cdecl])funcTable[499])((nint)self, cmap, idx, value); #endif } /// /// To be documented. /// public static void SetKeyColor(ImPlotColormapDataPtr self, ImPlotColormap cmap, int idx, uint value) { SetKeyColorNative(self, cmap, idx, value); } /// /// To be documented. /// public static void SetKeyColor(ref ImPlotColormapData self, ImPlotColormap cmap, int idx, uint value) { fixed (ImPlotColormapData* pself = &self) { SetKeyColorNative((ImPlotColormapData*)pself, cmap, idx, value); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static uint* GetTableNative(ImPlotColormapData* self, ImPlotColormap cmap) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[500])(self, cmap); #else return (uint*)((delegate* unmanaged[Cdecl])funcTable[500])((nint)self, cmap); #endif } /// /// To be documented. /// public static uint* GetTable(ImPlotColormapDataPtr self, ImPlotColormap cmap) { uint* ret = GetTableNative(self, cmap); return ret; } /// /// To be documented. /// public static uint* GetTable(ref ImPlotColormapData self, ImPlotColormap cmap) { fixed (ImPlotColormapData* pself = &self) { uint* ret = GetTableNative((ImPlotColormapData*)pself, cmap); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static int GetTableSizeNative(ImPlotColormapData* self, ImPlotColormap cmap) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[501])(self, cmap); #else return (int)((delegate* unmanaged[Cdecl])funcTable[501])((nint)self, cmap); #endif } /// /// To be documented. /// public static int GetTableSize(ImPlotColormapDataPtr self, ImPlotColormap cmap) { int ret = GetTableSizeNative(self, cmap); return ret; } /// /// To be documented. /// public static int GetTableSize(ref ImPlotColormapData self, ImPlotColormap cmap) { fixed (ImPlotColormapData* pself = &self) { int ret = GetTableSizeNative((ImPlotColormapData*)pself, cmap); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static uint GetTableColorNative(ImPlotColormapData* self, ImPlotColormap cmap, int idx) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[502])(self, cmap, idx); #else return (uint)((delegate* unmanaged[Cdecl])funcTable[502])((nint)self, cmap, idx); #endif } /// /// To be documented. /// public static uint GetTableColor(ImPlotColormapDataPtr self, ImPlotColormap cmap, int idx) { uint ret = GetTableColorNative(self, cmap, idx); return ret; } /// /// To be documented. /// public static uint GetTableColor(ref ImPlotColormapData self, ImPlotColormap cmap, int idx) { fixed (ImPlotColormapData* pself = &self) { uint ret = GetTableColorNative((ImPlotColormapData*)pself, cmap, idx); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static uint LerpTableNative(ImPlotColormapData* self, ImPlotColormap cmap, float t) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[503])(self, cmap, t); #else return (uint)((delegate* unmanaged[Cdecl])funcTable[503])((nint)self, cmap, t); #endif } /// /// To be documented. /// public static uint LerpTable(ImPlotColormapDataPtr self, ImPlotColormap cmap, float t) { uint ret = LerpTableNative(self, cmap, t); return ret; } /// /// To be documented. /// public static uint LerpTable(ref ImPlotColormapData self, ImPlotColormap cmap, float t) { fixed (ImPlotColormapData* pself = &self) { uint ret = LerpTableNative((ImPlotColormapData*)pself, cmap, t); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotPointError* ImPlotPointErrorNative(double x, double y, double neg, double pos) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[504])(x, y, neg, pos); #else return (ImPlotPointError*)((delegate* unmanaged[Cdecl])funcTable[504])(x, y, neg, pos); #endif } /// /// To be documented. /// public static ImPlotPointErrorPtr ImPlotPointError(double x, double y, double neg, double pos) { ImPlotPointErrorPtr ret = ImPlotPointErrorNative(x, y, neg, pos); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotPointError* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[505])(self); #else ((delegate* unmanaged[Cdecl])funcTable[505])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotPointErrorPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotPointError self) { fixed (ImPlotPointError* pself = &self) { DestroyNative((ImPlotPointError*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotAnnotationCollection* ImPlotAnnotationCollectionNative() { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[506])(); #else return (ImPlotAnnotationCollection*)((delegate* unmanaged[Cdecl])funcTable[506])(); #endif } /// /// To be documented. /// public static ImPlotAnnotationCollectionPtr ImPlotAnnotationCollection() { ImPlotAnnotationCollectionPtr ret = ImPlotAnnotationCollectionNative(); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotAnnotationCollection* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[507])(self); #else ((delegate* unmanaged[Cdecl])funcTable[507])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotAnnotationCollectionPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotAnnotationCollection self) { fixed (ImPlotAnnotationCollection* pself = &self) { DestroyNative((ImPlotAnnotationCollection*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void AppendVNative(ImPlotAnnotationCollection* self, Vector2 pos, Vector2 off, uint bg, uint fg, byte clamp, byte* fmt, nuint args) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[508])(self, pos, off, bg, fg, clamp, fmt, args); #else ((delegate* unmanaged[Cdecl])funcTable[508])((nint)self, pos, off, bg, fg, clamp, (nint)fmt, args); #endif } /// /// To be documented. /// public static void AppendV(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, byte* fmt, nuint args) { AppendVNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, fmt, args); } /// /// To be documented. /// public static void AppendV(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, byte* fmt, nuint args) { fixed (ImPlotAnnotationCollection* pself = &self) { AppendVNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, fmt, args); } } /// /// To be documented. /// public static void AppendV(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ref byte fmt, nuint args) { fixed (byte* pfmt = &fmt) { AppendVNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt, args); } } /// /// To be documented. /// public static void AppendV(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ReadOnlySpan fmt, nuint args) { fixed (byte* pfmt = fmt) { AppendVNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt, args); } } /// /// To be documented. /// public static void AppendV(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, string fmt, nuint args) { byte* pStr0 = null; int pStrSize0 = 0; if (fmt != null) { pStrSize0 = Utils.GetByteCountUTF8(fmt); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } AppendVNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, pStr0, args); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } } /// /// To be documented. /// public static void AppendV(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ref byte fmt, nuint args) { fixed (ImPlotAnnotationCollection* pself = &self) { fixed (byte* pfmt = &fmt) { AppendVNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt, args); } } } /// /// To be documented. /// public static void AppendV(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ReadOnlySpan fmt, nuint args) { fixed (ImPlotAnnotationCollection* pself = &self) { fixed (byte* pfmt = fmt) { AppendVNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt, args); } } } /// /// To be documented. /// public static void AppendV(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, string fmt, nuint args) { fixed (ImPlotAnnotationCollection* pself = &self) { byte* pStr0 = null; int pStrSize0 = 0; if (fmt != null) { pStrSize0 = Utils.GetByteCountUTF8(fmt); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } AppendVNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, pStr0, args); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void AppendNative(ImPlotAnnotationCollection* self, Vector2 pos, Vector2 off, uint bg, uint fg, byte clamp, byte* fmt) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[509])(self, pos, off, bg, fg, clamp, fmt); #else ((delegate* unmanaged[Cdecl])funcTable[509])((nint)self, pos, off, bg, fg, clamp, (nint)fmt); #endif } /// /// To be documented. /// public static void Append(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, byte* fmt) { AppendNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, fmt); } /// /// To be documented. /// public static void Append(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, byte* fmt) { fixed (ImPlotAnnotationCollection* pself = &self) { AppendNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, fmt); } } /// /// To be documented. /// public static void Append(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ref byte fmt) { fixed (byte* pfmt = &fmt) { AppendNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt); } } /// /// To be documented. /// public static void Append(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ReadOnlySpan fmt) { fixed (byte* pfmt = fmt) { AppendNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt); } } /// /// To be documented. /// public static void Append(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, string fmt) { byte* pStr0 = null; int pStrSize0 = 0; if (fmt != null) { pStrSize0 = Utils.GetByteCountUTF8(fmt); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } AppendNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, pStr0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } } /// /// To be documented. /// public static void Append(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ref byte fmt) { fixed (ImPlotAnnotationCollection* pself = &self) { fixed (byte* pfmt = &fmt) { AppendNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt); } } } /// /// To be documented. /// public static void Append(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ReadOnlySpan fmt) { fixed (ImPlotAnnotationCollection* pself = &self) { fixed (byte* pfmt = fmt) { AppendNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt); } } } /// /// To be documented. /// public static void Append(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, string fmt) { fixed (ImPlotAnnotationCollection* pself = &self) { byte* pStr0 = null; int pStrSize0 = 0; if (fmt != null) { pStrSize0 = Utils.GetByteCountUTF8(fmt); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } AppendNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, pStr0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte* GetTextNative(ImPlotAnnotationCollection* self, int idx) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[510])(self, idx); #else return (byte*)((delegate* unmanaged[Cdecl])funcTable[510])((nint)self, idx); #endif } /// /// To be documented. /// public static byte* GetText(ImPlotAnnotationCollectionPtr self, int idx) { byte* ret = GetTextNative(self, idx); return ret; } /// /// To be documented. /// public static string GetTextS(ImPlotAnnotationCollectionPtr self, int idx) { string ret = Utils.DecodeStringUTF8(GetTextNative(self, idx)); return ret; } /// /// To be documented. /// public static byte* GetText(ref ImPlotAnnotationCollection self, int idx) { fixed (ImPlotAnnotationCollection* pself = &self) { byte* ret = GetTextNative((ImPlotAnnotationCollection*)pself, idx); return ret; } } /// /// To be documented. /// public static string GetTextS(ref ImPlotAnnotationCollection self, int idx) { fixed (ImPlotAnnotationCollection* pself = &self) { string ret = Utils.DecodeStringUTF8(GetTextNative((ImPlotAnnotationCollection*)pself, idx)); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void ResetNative(ImPlotAnnotationCollection* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[511])(self); #else ((delegate* unmanaged[Cdecl])funcTable[511])((nint)self); #endif } /// /// To be documented. /// public static void Reset(ImPlotAnnotationCollectionPtr self) { ResetNative(self); } /// /// To be documented. /// public static void Reset(ref ImPlotAnnotationCollection self) { fixed (ImPlotAnnotationCollection* pself = &self) { ResetNative((ImPlotAnnotationCollection*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotTagCollection* ImPlotTagCollectionNative() { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[512])(); #else return (ImPlotTagCollection*)((delegate* unmanaged[Cdecl])funcTable[512])(); #endif } /// /// To be documented. /// public static ImPlotTagCollectionPtr ImPlotTagCollection() { ImPlotTagCollectionPtr ret = ImPlotTagCollectionNative(); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotTagCollection* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[513])(self); #else ((delegate* unmanaged[Cdecl])funcTable[513])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotTagCollectionPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotTagCollection self) { fixed (ImPlotTagCollection* pself = &self) { DestroyNative((ImPlotTagCollection*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void AppendVNative(ImPlotTagCollection* self, ImAxis axis, double value, uint bg, uint fg, byte* fmt, nuint args) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[514])(self, axis, value, bg, fg, fmt, args); #else ((delegate* unmanaged[Cdecl])funcTable[514])((nint)self, axis, value, bg, fg, (nint)fmt, args); #endif } /// /// To be documented. /// public static void AppendV(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, byte* fmt, nuint args) { AppendVNative(self, axis, value, bg, fg, fmt, args); } /// /// To be documented. /// public static void AppendV(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, byte* fmt, nuint args) { fixed (ImPlotTagCollection* pself = &self) { AppendVNative((ImPlotTagCollection*)pself, axis, value, bg, fg, fmt, args); } } /// /// To be documented. /// public static void AppendV(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, ref byte fmt, nuint args) { fixed (byte* pfmt = &fmt) { AppendVNative(self, axis, value, bg, fg, (byte*)pfmt, args); } } /// /// To be documented. /// public static void AppendV(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, ReadOnlySpan fmt, nuint args) { fixed (byte* pfmt = fmt) { AppendVNative(self, axis, value, bg, fg, (byte*)pfmt, args); } } /// /// To be documented. /// public static void AppendV(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, string fmt, nuint args) { byte* pStr0 = null; int pStrSize0 = 0; if (fmt != null) { pStrSize0 = Utils.GetByteCountUTF8(fmt); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } AppendVNative(self, axis, value, bg, fg, pStr0, args); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } } /// /// To be documented. /// public static void AppendV(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, ref byte fmt, nuint args) { fixed (ImPlotTagCollection* pself = &self) { fixed (byte* pfmt = &fmt) { AppendVNative((ImPlotTagCollection*)pself, axis, value, bg, fg, (byte*)pfmt, args); } } } /// /// To be documented. /// public static void AppendV(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, ReadOnlySpan fmt, nuint args) { fixed (ImPlotTagCollection* pself = &self) { fixed (byte* pfmt = fmt) { AppendVNative((ImPlotTagCollection*)pself, axis, value, bg, fg, (byte*)pfmt, args); } } } /// /// To be documented. /// public static void AppendV(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, string fmt, nuint args) { fixed (ImPlotTagCollection* pself = &self) { byte* pStr0 = null; int pStrSize0 = 0; if (fmt != null) { pStrSize0 = Utils.GetByteCountUTF8(fmt); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } AppendVNative((ImPlotTagCollection*)pself, axis, value, bg, fg, pStr0, args); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void AppendNative(ImPlotTagCollection* self, ImAxis axis, double value, uint bg, uint fg, byte* fmt) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[515])(self, axis, value, bg, fg, fmt); #else ((delegate* unmanaged[Cdecl])funcTable[515])((nint)self, axis, value, bg, fg, (nint)fmt); #endif } /// /// To be documented. /// public static void Append(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, byte* fmt) { AppendNative(self, axis, value, bg, fg, fmt); } /// /// To be documented. /// public static void Append(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, byte* fmt) { fixed (ImPlotTagCollection* pself = &self) { AppendNative((ImPlotTagCollection*)pself, axis, value, bg, fg, fmt); } } /// /// To be documented. /// public static void Append(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, ref byte fmt) { fixed (byte* pfmt = &fmt) { AppendNative(self, axis, value, bg, fg, (byte*)pfmt); } } /// /// To be documented. /// public static void Append(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, ReadOnlySpan fmt) { fixed (byte* pfmt = fmt) { AppendNative(self, axis, value, bg, fg, (byte*)pfmt); } } /// /// To be documented. /// public static void Append(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, string fmt) { byte* pStr0 = null; int pStrSize0 = 0; if (fmt != null) { pStrSize0 = Utils.GetByteCountUTF8(fmt); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } AppendNative(self, axis, value, bg, fg, pStr0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } } /// /// To be documented. /// public static void Append(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, ref byte fmt) { fixed (ImPlotTagCollection* pself = &self) { fixed (byte* pfmt = &fmt) { AppendNative((ImPlotTagCollection*)pself, axis, value, bg, fg, (byte*)pfmt); } } } /// /// To be documented. /// public static void Append(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, ReadOnlySpan fmt) { fixed (ImPlotTagCollection* pself = &self) { fixed (byte* pfmt = fmt) { AppendNative((ImPlotTagCollection*)pself, axis, value, bg, fg, (byte*)pfmt); } } } /// /// To be documented. /// public static void Append(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, string fmt) { fixed (ImPlotTagCollection* pself = &self) { byte* pStr0 = null; int pStrSize0 = 0; if (fmt != null) { pStrSize0 = Utils.GetByteCountUTF8(fmt); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } AppendNative((ImPlotTagCollection*)pself, axis, value, bg, fg, pStr0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte* GetTextNative(ImPlotTagCollection* self, int idx) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[516])(self, idx); #else return (byte*)((delegate* unmanaged[Cdecl])funcTable[516])((nint)self, idx); #endif } /// /// To be documented. /// public static byte* GetText(ImPlotTagCollectionPtr self, int idx) { byte* ret = GetTextNative(self, idx); return ret; } /// /// To be documented. /// public static string GetTextS(ImPlotTagCollectionPtr self, int idx) { string ret = Utils.DecodeStringUTF8(GetTextNative(self, idx)); return ret; } /// /// To be documented. /// public static byte* GetText(ref ImPlotTagCollection self, int idx) { fixed (ImPlotTagCollection* pself = &self) { byte* ret = GetTextNative((ImPlotTagCollection*)pself, idx); return ret; } } /// /// To be documented. /// public static string GetTextS(ref ImPlotTagCollection self, int idx) { fixed (ImPlotTagCollection* pself = &self) { string ret = Utils.DecodeStringUTF8(GetTextNative((ImPlotTagCollection*)pself, idx)); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void ResetNative(ImPlotTagCollection* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[517])(self); #else ((delegate* unmanaged[Cdecl])funcTable[517])((nint)self); #endif } /// /// To be documented. /// public static void Reset(ImPlotTagCollectionPtr self) { ResetNative(self); } /// /// To be documented. /// public static void Reset(ref ImPlotTagCollection self) { fixed (ImPlotTagCollection* pself = &self) { ResetNative((ImPlotTagCollection*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotTick* ImPlotTickNative(double value, byte major, int level, byte showLabel) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[518])(value, major, level, showLabel); #else return (ImPlotTick*)((delegate* unmanaged[Cdecl])funcTable[518])(value, major, level, showLabel); #endif } /// /// To be documented. /// public static ImPlotTickPtr ImPlotTick(double value, bool major, int level, bool showLabel) { ImPlotTickPtr ret = ImPlotTickNative(value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotTick* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[519])(self); #else ((delegate* unmanaged[Cdecl])funcTable[519])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotTickPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotTick self) { fixed (ImPlotTick* pself = &self) { DestroyNative((ImPlotTick*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotTicker* ImPlotTickerNative() { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[520])(); #else return (ImPlotTicker*)((delegate* unmanaged[Cdecl])funcTable[520])(); #endif } /// /// To be documented. /// public static ImPlotTickerPtr ImPlotTicker() { ImPlotTickerPtr ret = ImPlotTickerNative(); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotTicker* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[521])(self); #else ((delegate* unmanaged[Cdecl])funcTable[521])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotTickerPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotTicker self) { fixed (ImPlotTicker* pself = &self) { DestroyNative((ImPlotTicker*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotTick* AddTickNative(ImPlotTicker* self, double value, byte major, int level, byte showLabel, byte* label) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[522])(self, value, major, level, showLabel, label); #else return (ImPlotTick*)((delegate* unmanaged[Cdecl])funcTable[522])((nint)self, value, major, level, showLabel, (nint)label); #endif } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ImPlotTickerPtr self, double value, bool major, int level, bool showLabel, byte* label) { ImPlotTickPtr ret = AddTickNative(self, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, label); return ret; } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ref ImPlotTicker self, double value, bool major, int level, bool showLabel, byte* label) { fixed (ImPlotTicker* pself = &self) { ImPlotTickPtr ret = AddTickNative((ImPlotTicker*)pself, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, label); return ret; } } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ImPlotTickerPtr self, double value, bool major, int level, bool showLabel, ref byte label) { fixed (byte* plabel = &label) { ImPlotTickPtr ret = AddTickNative(self, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, (byte*)plabel); return ret; } } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ImPlotTickerPtr self, double value, bool major, int level, bool showLabel, ReadOnlySpan label) { fixed (byte* plabel = label) { ImPlotTickPtr ret = AddTickNative(self, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, (byte*)plabel); return ret; } } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ImPlotTickerPtr self, double value, bool major, int level, bool showLabel, string label) { byte* pStr0 = null; int pStrSize0 = 0; if (label != null) { pStrSize0 = Utils.GetByteCountUTF8(label); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(label, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } ImPlotTickPtr ret = AddTickNative(self, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, pStr0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } return ret; } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ref ImPlotTicker self, double value, bool major, int level, bool showLabel, ref byte label) { fixed (ImPlotTicker* pself = &self) { fixed (byte* plabel = &label) { ImPlotTickPtr ret = AddTickNative((ImPlotTicker*)pself, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, (byte*)plabel); return ret; } } } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ref ImPlotTicker self, double value, bool major, int level, bool showLabel, ReadOnlySpan label) { fixed (ImPlotTicker* pself = &self) { fixed (byte* plabel = label) { ImPlotTickPtr ret = AddTickNative((ImPlotTicker*)pself, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, (byte*)plabel); return ret; } } } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ref ImPlotTicker self, double value, bool major, int level, bool showLabel, string label) { fixed (ImPlotTicker* pself = &self) { byte* pStr0 = null; int pStrSize0 = 0; if (label != null) { pStrSize0 = Utils.GetByteCountUTF8(label); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(label, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } ImPlotTickPtr ret = AddTickNative((ImPlotTicker*)pself, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, pStr0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotTick* AddTickNative(ImPlotTicker* self, double value, byte major, int level, byte showLabel, ImPlotFormatter formatter, void* data) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl], void*, ImPlotTick*>)funcTable[523])(self, value, major, level, showLabel, (delegate*)Utils.GetFunctionPointerForDelegate(formatter), data); #else return (ImPlotTick*)((delegate* unmanaged[Cdecl])funcTable[523])((nint)self, value, major, level, showLabel, (nint)Utils.GetFunctionPointerForDelegate(formatter), (nint)data); #endif } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ImPlotTickerPtr self, double value, bool major, int level, bool showLabel, ImPlotFormatter formatter, void* data) { ImPlotTickPtr ret = AddTickNative(self, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, formatter, data); return ret; } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ref ImPlotTicker self, double value, bool major, int level, bool showLabel, ImPlotFormatter formatter, void* data) { fixed (ImPlotTicker* pself = &self) { ImPlotTickPtr ret = AddTickNative((ImPlotTicker*)pself, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, formatter, data); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotTick* AddTickNative(ImPlotTicker* self, ImPlotTick tick) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[524])(self, tick); #else return (ImPlotTick*)((delegate* unmanaged[Cdecl])funcTable[524])((nint)self, tick); #endif } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ImPlotTickerPtr self, ImPlotTick tick) { ImPlotTickPtr ret = AddTickNative(self, tick); return ret; } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ref ImPlotTicker self, ImPlotTick tick) { fixed (ImPlotTicker* pself = &self) { ImPlotTickPtr ret = AddTickNative((ImPlotTicker*)pself, tick); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte* GetTextNative(ImPlotTicker* self, int idx) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[525])(self, idx); #else return (byte*)((delegate* unmanaged[Cdecl])funcTable[525])((nint)self, idx); #endif } /// /// To be documented. /// public static byte* GetText(ImPlotTickerPtr self, int idx) { byte* ret = GetTextNative(self, idx); return ret; } /// /// To be documented. /// public static string GetTextS(ImPlotTickerPtr self, int idx) { string ret = Utils.DecodeStringUTF8(GetTextNative(self, idx)); return ret; } /// /// To be documented. /// public static byte* GetText(ref ImPlotTicker self, int idx) { fixed (ImPlotTicker* pself = &self) { byte* ret = GetTextNative((ImPlotTicker*)pself, idx); return ret; } } /// /// To be documented. /// public static string GetTextS(ref ImPlotTicker self, int idx) { fixed (ImPlotTicker* pself = &self) { string ret = Utils.DecodeStringUTF8(GetTextNative((ImPlotTicker*)pself, idx)); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte* GetTextNative(ImPlotTicker* self, ImPlotTick tick) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[526])(self, tick); #else return (byte*)((delegate* unmanaged[Cdecl])funcTable[526])((nint)self, tick); #endif } /// /// To be documented. /// public static byte* GetText(ImPlotTickerPtr self, ImPlotTick tick) { byte* ret = GetTextNative(self, tick); return ret; } /// /// To be documented. /// public static string GetTextS(ImPlotTickerPtr self, ImPlotTick tick) { string ret = Utils.DecodeStringUTF8(GetTextNative(self, tick)); return ret; } /// /// To be documented. /// public static byte* GetText(ref ImPlotTicker self, ImPlotTick tick) { fixed (ImPlotTicker* pself = &self) { byte* ret = GetTextNative((ImPlotTicker*)pself, tick); return ret; } } /// /// To be documented. /// public static string GetTextS(ref ImPlotTicker self, ImPlotTick tick) { fixed (ImPlotTicker* pself = &self) { string ret = Utils.DecodeStringUTF8(GetTextNative((ImPlotTicker*)pself, tick)); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void OverrideSizeLateNative(ImPlotTicker* self, Vector2 size) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[527])(self, size); #else ((delegate* unmanaged[Cdecl])funcTable[527])((nint)self, size); #endif } /// /// To be documented. /// public static void OverrideSizeLate(ImPlotTickerPtr self, Vector2 size) { OverrideSizeLateNative(self, size); } /// /// To be documented. /// public static void OverrideSizeLate(ref ImPlotTicker self, Vector2 size) { fixed (ImPlotTicker* pself = &self) { OverrideSizeLateNative((ImPlotTicker*)pself, size); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void ResetNative(ImPlotTicker* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[528])(self); #else ((delegate* unmanaged[Cdecl])funcTable[528])((nint)self); #endif } /// /// To be documented. /// public static void Reset(ImPlotTickerPtr self) { ResetNative(self); } /// /// To be documented. /// public static void Reset(ref ImPlotTicker self) { fixed (ImPlotTicker* pself = &self) { ResetNative((ImPlotTicker*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static int TickCountNative(ImPlotTicker* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[529])(self); #else return (int)((delegate* unmanaged[Cdecl])funcTable[529])((nint)self); #endif } /// /// To be documented. /// public static int TickCount(ImPlotTickerPtr self) { int ret = TickCountNative(self); return ret; } /// /// To be documented. /// public static int TickCount(ref ImPlotTicker self) { fixed (ImPlotTicker* pself = &self) { int ret = TickCountNative((ImPlotTicker*)pself); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotAxis* ImPlotAxisNative() { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[530])(); #else return (ImPlotAxis*)((delegate* unmanaged[Cdecl])funcTable[530])(); #endif } /// /// To be documented. /// public static ImPlotAxisPtr ImPlotAxis() { ImPlotAxisPtr ret = ImPlotAxisNative(); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[531])(self); #else ((delegate* unmanaged[Cdecl])funcTable[531])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotAxisPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { DestroyNative((ImPlotAxis*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void ResetNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[532])(self); #else ((delegate* unmanaged[Cdecl])funcTable[532])((nint)self); #endif } /// /// To be documented. /// public static void Reset(ImPlotAxisPtr self) { ResetNative(self); } /// /// To be documented. /// public static void Reset(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { ResetNative((ImPlotAxis*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte SetMinNative(ImPlotAxis* self, double min, byte force) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[533])(self, min, force); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[533])((nint)self, min, force); #endif } /// /// To be documented. /// public static bool SetMin(ImPlotAxisPtr self, double min, bool force) { byte ret = SetMinNative(self, min, force ? (byte)1 : (byte)0); return ret != 0; } /// /// To be documented. /// public static bool SetMin(ImPlotAxisPtr self, double min) { byte ret = SetMinNative(self, min, (byte)(0)); return ret != 0; } /// /// To be documented. /// public static bool SetMin(ref ImPlotAxis self, double min, bool force) { fixed (ImPlotAxis* pself = &self) { byte ret = SetMinNative((ImPlotAxis*)pself, min, force ? (byte)1 : (byte)0); return ret != 0; } } /// /// To be documented. /// public static bool SetMin(ref ImPlotAxis self, double min) { fixed (ImPlotAxis* pself = &self) { byte ret = SetMinNative((ImPlotAxis*)pself, min, (byte)(0)); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte SetMaxNative(ImPlotAxis* self, double max, byte force) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[534])(self, max, force); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[534])((nint)self, max, force); #endif } /// /// To be documented. /// public static bool SetMax(ImPlotAxisPtr self, double max, bool force) { byte ret = SetMaxNative(self, max, force ? (byte)1 : (byte)0); return ret != 0; } /// /// To be documented. /// public static bool SetMax(ImPlotAxisPtr self, double max) { byte ret = SetMaxNative(self, max, (byte)(0)); return ret != 0; } /// /// To be documented. /// public static bool SetMax(ref ImPlotAxis self, double max, bool force) { fixed (ImPlotAxis* pself = &self) { byte ret = SetMaxNative((ImPlotAxis*)pself, max, force ? (byte)1 : (byte)0); return ret != 0; } } /// /// To be documented. /// public static bool SetMax(ref ImPlotAxis self, double max) { fixed (ImPlotAxis* pself = &self) { byte ret = SetMaxNative((ImPlotAxis*)pself, max, (byte)(0)); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void SetRangeNative(ImPlotAxis* self, double v1, double v2) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[535])(self, v1, v2); #else ((delegate* unmanaged[Cdecl])funcTable[535])((nint)self, v1, v2); #endif } /// /// To be documented. /// public static void SetRange(ImPlotAxisPtr self, double v1, double v2) { SetRangeNative(self, v1, v2); } /// /// To be documented. /// public static void SetRange(ref ImPlotAxis self, double v1, double v2) { fixed (ImPlotAxis* pself = &self) { SetRangeNative((ImPlotAxis*)pself, v1, v2); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void SetRangeNative(ImPlotAxis* self, ImPlotRange range) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[536])(self, range); #else ((delegate* unmanaged[Cdecl])funcTable[536])((nint)self, range); #endif } /// /// To be documented. /// public static void SetRange(ImPlotAxisPtr self, ImPlotRange range) { SetRangeNative(self, range); } /// /// To be documented. /// public static void SetRange(ref ImPlotAxis self, ImPlotRange range) { fixed (ImPlotAxis* pself = &self) { SetRangeNative((ImPlotAxis*)pself, range); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void SetAspectNative(ImPlotAxis* self, double unitPerPix) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[537])(self, unitPerPix); #else ((delegate* unmanaged[Cdecl])funcTable[537])((nint)self, unitPerPix); #endif } /// /// To be documented. /// public static void SetAspect(ImPlotAxisPtr self, double unitPerPix) { SetAspectNative(self, unitPerPix); } /// /// To be documented. /// public static void SetAspect(ref ImPlotAxis self, double unitPerPix) { fixed (ImPlotAxis* pself = &self) { SetAspectNative((ImPlotAxis*)pself, unitPerPix); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static float PixelSizeNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[538])(self); #else return (float)((delegate* unmanaged[Cdecl])funcTable[538])((nint)self); #endif } /// /// To be documented. /// public static float PixelSize(ImPlotAxisPtr self) { float ret = PixelSizeNative(self); return ret; } /// /// To be documented. /// public static float PixelSize(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { float ret = PixelSizeNative((ImPlotAxis*)pself); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static double GetAspectNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[539])(self); #else return (double)((delegate* unmanaged[Cdecl])funcTable[539])((nint)self); #endif } /// /// To be documented. /// public static double GetAspect(ImPlotAxisPtr self) { double ret = GetAspectNative(self); return ret; } /// /// To be documented. /// public static double GetAspect(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { double ret = GetAspectNative((ImPlotAxis*)pself); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void ConstrainNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[540])(self); #else ((delegate* unmanaged[Cdecl])funcTable[540])((nint)self); #endif } /// /// To be documented. /// public static void Constrain(ImPlotAxisPtr self) { ConstrainNative(self); } /// /// To be documented. /// public static void Constrain(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { ConstrainNative((ImPlotAxis*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void UpdateTransformCacheNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[541])(self); #else ((delegate* unmanaged[Cdecl])funcTable[541])((nint)self); #endif } /// /// To be documented. /// public static void UpdateTransformCache(ImPlotAxisPtr self) { UpdateTransformCacheNative(self); } /// /// To be documented. /// public static void UpdateTransformCache(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { UpdateTransformCacheNative((ImPlotAxis*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static float PlotToPixelsNative(ImPlotAxis* self, double plt) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[542])(self, plt); #else return (float)((delegate* unmanaged[Cdecl])funcTable[542])((nint)self, plt); #endif } /// /// To be documented. /// public static float PlotToPixels(ImPlotAxisPtr self, double plt) { float ret = PlotToPixelsNative(self, plt); return ret; } /// /// To be documented. /// public static float PlotToPixels(ref ImPlotAxis self, double plt) { fixed (ImPlotAxis* pself = &self) { float ret = PlotToPixelsNative((ImPlotAxis*)pself, plt); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static double PixelsToPlotNative(ImPlotAxis* self, float pix) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[543])(self, pix); #else return (double)((delegate* unmanaged[Cdecl])funcTable[543])((nint)self, pix); #endif } /// /// To be documented. /// public static double PixelsToPlot(ImPlotAxisPtr self, float pix) { double ret = PixelsToPlotNative(self, pix); return ret; } /// /// To be documented. /// public static double PixelsToPlot(ref ImPlotAxis self, float pix) { fixed (ImPlotAxis* pself = &self) { double ret = PixelsToPlotNative((ImPlotAxis*)pself, pix); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void ExtendFitNative(ImPlotAxis* self, double v) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[544])(self, v); #else ((delegate* unmanaged[Cdecl])funcTable[544])((nint)self, v); #endif } /// /// To be documented. /// public static void ExtendFit(ImPlotAxisPtr self, double v) { ExtendFitNative(self, v); } /// /// To be documented. /// public static void ExtendFit(ref ImPlotAxis self, double v) { fixed (ImPlotAxis* pself = &self) { ExtendFitNative((ImPlotAxis*)pself, v); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void ExtendFitWithNative(ImPlotAxis* self, ImPlotAxis* alt, double v, double vAlt) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[545])(self, alt, v, vAlt); #else ((delegate* unmanaged[Cdecl])funcTable[545])((nint)self, (nint)alt, v, vAlt); #endif } /// /// To be documented. /// public static void ExtendFitWith(ImPlotAxisPtr self, ImPlotAxisPtr alt, double v, double vAlt) { ExtendFitWithNative(self, alt, v, vAlt); } /// /// To be documented. /// public static void ExtendFitWith(ref ImPlotAxis self, ImPlotAxisPtr alt, double v, double vAlt) { fixed (ImPlotAxis* pself = &self) { ExtendFitWithNative((ImPlotAxis*)pself, alt, v, vAlt); } } /// /// To be documented. /// public static void ExtendFitWith(ImPlotAxisPtr self, ref ImPlotAxis alt, double v, double vAlt) { fixed (ImPlotAxis* palt = &alt) { ExtendFitWithNative(self, (ImPlotAxis*)palt, v, vAlt); } } /// /// To be documented. /// public static void ExtendFitWith(ref ImPlotAxis self, ref ImPlotAxis alt, double v, double vAlt) { fixed (ImPlotAxis* pself = &self) { fixed (ImPlotAxis* palt = &alt) { ExtendFitWithNative((ImPlotAxis*)pself, (ImPlotAxis*)palt, v, vAlt); } } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void ApplyFitNative(ImPlotAxis* self, float padding) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[546])(self, padding); #else ((delegate* unmanaged[Cdecl])funcTable[546])((nint)self, padding); #endif } /// /// To be documented. /// public static void ApplyFit(ImPlotAxisPtr self, float padding) { ApplyFitNative(self, padding); } /// /// To be documented. /// public static void ApplyFit(ref ImPlotAxis self, float padding) { fixed (ImPlotAxis* pself = &self) { ApplyFitNative((ImPlotAxis*)pself, padding); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte HasLabelNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[547])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[547])((nint)self); #endif } /// /// To be documented. /// public static bool HasLabel(ImPlotAxisPtr self) { byte ret = HasLabelNative(self); return ret != 0; } /// /// To be documented. /// public static bool HasLabel(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = HasLabelNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte HasGridLinesNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[548])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[548])((nint)self); #endif } /// /// To be documented. /// public static bool HasGridLines(ImPlotAxisPtr self) { byte ret = HasGridLinesNative(self); return ret != 0; } /// /// To be documented. /// public static bool HasGridLines(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = HasGridLinesNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte HasTickLabelsNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[549])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[549])((nint)self); #endif } /// /// To be documented. /// public static bool HasTickLabels(ImPlotAxisPtr self) { byte ret = HasTickLabelsNative(self); return ret != 0; } /// /// To be documented. /// public static bool HasTickLabels(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = HasTickLabelsNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte HasTickMarksNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[550])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[550])((nint)self); #endif } /// /// To be documented. /// public static bool HasTickMarks(ImPlotAxisPtr self) { byte ret = HasTickMarksNative(self); return ret != 0; } /// /// To be documented. /// public static bool HasTickMarks(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = HasTickMarksNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte WillRenderNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[551])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[551])((nint)self); #endif } /// /// To be documented. /// public static bool WillRender(ImPlotAxisPtr self) { byte ret = WillRenderNative(self); return ret != 0; } /// /// To be documented. /// public static bool WillRender(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = WillRenderNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsOppositeNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[552])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[552])((nint)self); #endif } /// /// To be documented. /// public static bool IsOpposite(ImPlotAxisPtr self) { byte ret = IsOppositeNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsOpposite(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsOppositeNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsInvertedNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[553])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[553])((nint)self); #endif } /// /// To be documented. /// public static bool IsInverted(ImPlotAxisPtr self) { byte ret = IsInvertedNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsInverted(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsInvertedNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsForegroundNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[554])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[554])((nint)self); #endif } /// /// To be documented. /// public static bool IsForeground(ImPlotAxisPtr self) { byte ret = IsForegroundNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsForeground(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsForegroundNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsAutoFittingNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[555])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[555])((nint)self); #endif } /// /// To be documented. /// public static bool IsAutoFitting(ImPlotAxisPtr self) { byte ret = IsAutoFittingNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsAutoFitting(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsAutoFittingNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte CanInitFitNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[556])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[556])((nint)self); #endif } /// /// To be documented. /// public static bool CanInitFit(ImPlotAxisPtr self) { byte ret = CanInitFitNative(self); return ret != 0; } /// /// To be documented. /// public static bool CanInitFit(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = CanInitFitNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsRangeLockedNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[557])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[557])((nint)self); #endif } /// /// To be documented. /// public static bool IsRangeLocked(ImPlotAxisPtr self) { byte ret = IsRangeLockedNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsRangeLocked(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsRangeLockedNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsLockedMinNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[558])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[558])((nint)self); #endif } /// /// To be documented. /// public static bool IsLockedMin(ImPlotAxisPtr self) { byte ret = IsLockedMinNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsLockedMin(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsLockedMinNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsLockedMaxNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[559])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[559])((nint)self); #endif } /// /// To be documented. /// public static bool IsLockedMax(ImPlotAxisPtr self) { byte ret = IsLockedMaxNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsLockedMax(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsLockedMaxNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsLockedNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[560])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[560])((nint)self); #endif } /// /// To be documented. /// public static bool IsLocked(ImPlotAxisPtr self) { byte ret = IsLockedNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsLocked(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsLockedNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsInputLockedMinNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[561])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[561])((nint)self); #endif } /// /// To be documented. /// public static bool IsInputLockedMin(ImPlotAxisPtr self) { byte ret = IsInputLockedMinNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsInputLockedMin(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsInputLockedMinNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsInputLockedMaxNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[562])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[562])((nint)self); #endif } /// /// To be documented. /// public static bool IsInputLockedMax(ImPlotAxisPtr self) { byte ret = IsInputLockedMaxNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsInputLockedMax(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsInputLockedMaxNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsInputLockedNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[563])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[563])((nint)self); #endif } /// /// To be documented. /// public static bool IsInputLocked(ImPlotAxisPtr self) { byte ret = IsInputLockedNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsInputLocked(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsInputLockedNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte HasMenusNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[564])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[564])((nint)self); #endif } /// /// To be documented. /// public static bool HasMenus(ImPlotAxisPtr self) { byte ret = HasMenusNative(self); return ret != 0; } /// /// To be documented. /// public static bool HasMenus(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = HasMenusNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsPanLockedNative(ImPlotAxis* self, byte increasing) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[565])(self, increasing); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[565])((nint)self, increasing); #endif } /// /// To be documented. /// public static bool IsPanLocked(ImPlotAxisPtr self, bool increasing) { byte ret = IsPanLockedNative(self, increasing ? (byte)1 : (byte)0); return ret != 0; } /// /// To be documented. /// public static bool IsPanLocked(ref ImPlotAxis self, bool increasing) { fixed (ImPlotAxis* pself = &self) { byte ret = IsPanLockedNative((ImPlotAxis*)pself, increasing ? (byte)1 : (byte)0); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void PushLinksNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[566])(self); #else ((delegate* unmanaged[Cdecl])funcTable[566])((nint)self); #endif } /// /// To be documented. /// public static void PushLinks(ImPlotAxisPtr self) { PushLinksNative(self); } /// /// To be documented. /// public static void PushLinks(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { PushLinksNative((ImPlotAxis*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void PullLinksNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[567])(self); #else ((delegate* unmanaged[Cdecl])funcTable[567])((nint)self); #endif } /// /// To be documented. /// public static void PullLinks(ImPlotAxisPtr self) { PullLinksNative(self); } /// /// To be documented. /// public static void PullLinks(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { PullLinksNative((ImPlotAxis*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotAlignmentData* ImPlotAlignmentDataNative() { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[568])(); #else return (ImPlotAlignmentData*)((delegate* unmanaged[Cdecl])funcTable[568])(); #endif } /// /// To be documented. /// public static ImPlotAlignmentDataPtr ImPlotAlignmentData() { ImPlotAlignmentDataPtr ret = ImPlotAlignmentDataNative(); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotAlignmentData* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[569])(self); #else ((delegate* unmanaged[Cdecl])funcTable[569])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotAlignmentDataPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotAlignmentData self) { fixed (ImPlotAlignmentData* pself = &self) { DestroyNative((ImPlotAlignmentData*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void BeginNative(ImPlotAlignmentData* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[570])(self); #else ((delegate* unmanaged[Cdecl])funcTable[570])((nint)self); #endif } /// /// To be documented. /// public static void Begin(ImPlotAlignmentDataPtr self) { BeginNative(self); } /// /// To be documented. /// public static void Begin(ref ImPlotAlignmentData self) { fixed (ImPlotAlignmentData* pself = &self) { BeginNative((ImPlotAlignmentData*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void UpdateNative(ImPlotAlignmentData* self, float* padA, float* padB, float* deltaA, float* deltaB) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[571])(self, padA, padB, deltaA, deltaB); #else ((delegate* unmanaged[Cdecl])funcTable[571])((nint)self, (nint)padA, (nint)padB, (nint)deltaA, (nint)deltaB); #endif } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, float* padA, float* padB, float* deltaA, float* deltaB) { UpdateNative(self, padA, padB, deltaA, deltaB); } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, float* padA, float* padB, float* deltaA, float* deltaB) { fixed (ImPlotAlignmentData* pself = &self) { UpdateNative((ImPlotAlignmentData*)pself, padA, padB, deltaA, deltaB); } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, ref float padA, float* padB, float* deltaA, float* deltaB) { fixed (float* ppadA = &padA) { UpdateNative(self, (float*)ppadA, padB, deltaA, deltaB); } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, ref float padA, float* padB, float* deltaA, float* deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* ppadA = &padA) { UpdateNative((ImPlotAlignmentData*)pself, (float*)ppadA, padB, deltaA, deltaB); } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, float* padA, ref float padB, float* deltaA, float* deltaB) { fixed (float* ppadB = &padB) { UpdateNative(self, padA, (float*)ppadB, deltaA, deltaB); } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, float* padA, ref float padB, float* deltaA, float* deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* ppadB = &padB) { UpdateNative((ImPlotAlignmentData*)pself, padA, (float*)ppadB, deltaA, deltaB); } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, ref float padA, ref float padB, float* deltaA, float* deltaB) { fixed (float* ppadA = &padA) { fixed (float* ppadB = &padB) { UpdateNative(self, (float*)ppadA, (float*)ppadB, deltaA, deltaB); } } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, ref float padA, ref float padB, float* deltaA, float* deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* ppadA = &padA) { fixed (float* ppadB = &padB) { UpdateNative((ImPlotAlignmentData*)pself, (float*)ppadA, (float*)ppadB, deltaA, deltaB); } } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, float* padA, float* padB, ref float deltaA, float* deltaB) { fixed (float* pdeltaA = &deltaA) { UpdateNative(self, padA, padB, (float*)pdeltaA, deltaB); } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, float* padA, float* padB, ref float deltaA, float* deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* pdeltaA = &deltaA) { UpdateNative((ImPlotAlignmentData*)pself, padA, padB, (float*)pdeltaA, deltaB); } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, ref float padA, float* padB, ref float deltaA, float* deltaB) { fixed (float* ppadA = &padA) { fixed (float* pdeltaA = &deltaA) { UpdateNative(self, (float*)ppadA, padB, (float*)pdeltaA, deltaB); } } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, ref float padA, float* padB, ref float deltaA, float* deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* ppadA = &padA) { fixed (float* pdeltaA = &deltaA) { UpdateNative((ImPlotAlignmentData*)pself, (float*)ppadA, padB, (float*)pdeltaA, deltaB); } } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, float* padA, ref float padB, ref float deltaA, float* deltaB) { fixed (float* ppadB = &padB) { fixed (float* pdeltaA = &deltaA) { UpdateNative(self, padA, (float*)ppadB, (float*)pdeltaA, deltaB); } } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, float* padA, ref float padB, ref float deltaA, float* deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* ppadB = &padB) { fixed (float* pdeltaA = &deltaA) { UpdateNative((ImPlotAlignmentData*)pself, padA, (float*)ppadB, (float*)pdeltaA, deltaB); } } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, ref float padA, ref float padB, ref float deltaA, float* deltaB) { fixed (float* ppadA = &padA) { fixed (float* ppadB = &padB) { fixed (float* pdeltaA = &deltaA) { UpdateNative(self, (float*)ppadA, (float*)ppadB, (float*)pdeltaA, deltaB); } } } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, ref float padA, ref float padB, ref float deltaA, float* deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* ppadA = &padA) { fixed (float* ppadB = &padB) { fixed (float* pdeltaA = &deltaA) { UpdateNative((ImPlotAlignmentData*)pself, (float*)ppadA, (float*)ppadB, (float*)pdeltaA, deltaB); } } } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, float* padA, float* padB, float* deltaA, ref float deltaB) { fixed (float* pdeltaB = &deltaB) { UpdateNative(self, padA, padB, deltaA, (float*)pdeltaB); } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, float* padA, float* padB, float* deltaA, ref float deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* pdeltaB = &deltaB) { UpdateNative((ImPlotAlignmentData*)pself, padA, padB, deltaA, (float*)pdeltaB); } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, ref float padA, float* padB, float* deltaA, ref float deltaB) { fixed (float* ppadA = &padA) { fixed (float* pdeltaB = &deltaB) { UpdateNative(self, (float*)ppadA, padB, deltaA, (float*)pdeltaB); } } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, ref float padA, float* padB, float* deltaA, ref float deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* ppadA = &padA) { fixed (float* pdeltaB = &deltaB) { UpdateNative((ImPlotAlignmentData*)pself, (float*)ppadA, padB, deltaA, (float*)pdeltaB); } } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, float* padA, ref float padB, float* deltaA, ref float deltaB) { fixed (float* ppadB = &padB) { fixed (float* pdeltaB = &deltaB) { UpdateNative(self, padA, (float*)ppadB, deltaA, (float*)pdeltaB); } } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, float* padA, ref float padB, float* deltaA, ref float deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* ppadB = &padB) { fixed (float* pdeltaB = &deltaB) { UpdateNative((ImPlotAlignmentData*)pself, padA, (float*)ppadB, deltaA, (float*)pdeltaB); } } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, ref float padA, ref float padB, float* deltaA, ref float deltaB) { fixed (float* ppadA = &padA) { fixed (float* ppadB = &padB) { fixed (float* pdeltaB = &deltaB) { UpdateNative(self, (float*)ppadA, (float*)ppadB, deltaA, (float*)pdeltaB); } } } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, ref float padA, ref float padB, float* deltaA, ref float deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* ppadA = &padA) { fixed (float* ppadB = &padB) { fixed (float* pdeltaB = &deltaB) { UpdateNative((ImPlotAlignmentData*)pself, (float*)ppadA, (float*)ppadB, deltaA, (float*)pdeltaB); } } } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, float* padA, float* padB, ref float deltaA, ref float deltaB) { fixed (float* pdeltaA = &deltaA) { fixed (float* pdeltaB = &deltaB) { UpdateNative(self, padA, padB, (float*)pdeltaA, (float*)pdeltaB); } } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, float* padA, float* padB, ref float deltaA, ref float deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* pdeltaA = &deltaA) { fixed (float* pdeltaB = &deltaB) { UpdateNative((ImPlotAlignmentData*)pself, padA, padB, (float*)pdeltaA, (float*)pdeltaB); } } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, ref float padA, float* padB, ref float deltaA, ref float deltaB) { fixed (float* ppadA = &padA) { fixed (float* pdeltaA = &deltaA) { fixed (float* pdeltaB = &deltaB) { UpdateNative(self, (float*)ppadA, padB, (float*)pdeltaA, (float*)pdeltaB); } } } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, ref float padA, float* padB, ref float deltaA, ref float deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* ppadA = &padA) { fixed (float* pdeltaA = &deltaA) { fixed (float* pdeltaB = &deltaB) { UpdateNative((ImPlotAlignmentData*)pself, (float*)ppadA, padB, (float*)pdeltaA, (float*)pdeltaB); } } } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, float* padA, ref float padB, ref float deltaA, ref float deltaB) { fixed (float* ppadB = &padB) { fixed (float* pdeltaA = &deltaA) { fixed (float* pdeltaB = &deltaB) { UpdateNative(self, padA, (float*)ppadB, (float*)pdeltaA, (float*)pdeltaB); } } } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, float* padA, ref float padB, ref float deltaA, ref float deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* ppadB = &padB) { fixed (float* pdeltaA = &deltaA) { fixed (float* pdeltaB = &deltaB) { UpdateNative((ImPlotAlignmentData*)pself, padA, (float*)ppadB, (float*)pdeltaA, (float*)pdeltaB); } } } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, ref float padA, ref float padB, ref float deltaA, ref float deltaB) { fixed (float* ppadA = &padA) { fixed (float* ppadB = &padB) { fixed (float* pdeltaA = &deltaA) { fixed (float* pdeltaB = &deltaB) { UpdateNative(self, (float*)ppadA, (float*)ppadB, (float*)pdeltaA, (float*)pdeltaB); } } } } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, ref float padA, ref float padB, ref float deltaA, ref float deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* ppadA = &padA) { fixed (float* ppadB = &padB) { fixed (float* pdeltaA = &deltaA) { fixed (float* pdeltaB = &deltaB) { UpdateNative((ImPlotAlignmentData*)pself, (float*)ppadA, (float*)ppadB, (float*)pdeltaA, (float*)pdeltaB); } } } } } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void EndNative(ImPlotAlignmentData* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[572])(self); #else ((delegate* unmanaged[Cdecl])funcTable[572])((nint)self); #endif } /// /// To be documented. /// public static void End(ImPlotAlignmentDataPtr self) { EndNative(self); } /// /// To be documented. /// public static void End(ref ImPlotAlignmentData self) { fixed (ImPlotAlignmentData* pself = &self) { EndNative((ImPlotAlignmentData*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void ResetNative(ImPlotAlignmentData* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[573])(self); #else ((delegate* unmanaged[Cdecl])funcTable[573])((nint)self); #endif } /// /// To be documented. /// public static void Reset(ImPlotAlignmentDataPtr self) { ResetNative(self); } /// /// To be documented. /// public static void Reset(ref ImPlotAlignmentData self) { fixed (ImPlotAlignmentData* pself = &self) { ResetNative((ImPlotAlignmentData*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotItem* ImPlotItemNative() { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[574])(); #else return (ImPlotItem*)((delegate* unmanaged[Cdecl])funcTable[574])(); #endif } /// /// To be documented. /// public static ImPlotItemPtr ImPlotItem() { ImPlotItemPtr ret = ImPlotItemNative(); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotItem* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[575])(self); #else ((delegate* unmanaged[Cdecl])funcTable[575])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotItemPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotItem self) { fixed (ImPlotItem* pself = &self) { DestroyNative((ImPlotItem*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotLegend* ImPlotLegendNative() { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[576])(); #else return (ImPlotLegend*)((delegate* unmanaged[Cdecl])funcTable[576])(); #endif } /// /// To be documented. /// public static ImPlotLegendPtr ImPlotLegend() { ImPlotLegendPtr ret = ImPlotLegendNative(); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotLegend* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[577])(self); #else ((delegate* unmanaged[Cdecl])funcTable[577])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotLegendPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotLegend self) { fixed (ImPlotLegend* pself = &self) { DestroyNative((ImPlotLegend*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void ResetNative(ImPlotLegend* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[578])(self); #else ((delegate* unmanaged[Cdecl])funcTable[578])((nint)self); #endif } /// /// To be documented. /// public static void Reset(ImPlotLegendPtr self) { ResetNative(self); } /// /// To be documented. /// public static void Reset(ref ImPlotLegend self) { fixed (ImPlotLegend* pself = &self) { ResetNative((ImPlotLegend*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotItemGroup* ImPlotItemGroupNative() { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[579])(); #else return (ImPlotItemGroup*)((delegate* unmanaged[Cdecl])funcTable[579])(); #endif } /// /// To be documented. /// public static ImPlotItemGroupPtr ImPlotItemGroup() { ImPlotItemGroupPtr ret = ImPlotItemGroupNative(); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotItemGroup* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[580])(self); #else ((delegate* unmanaged[Cdecl])funcTable[580])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotItemGroupPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotItemGroup self) { fixed (ImPlotItemGroup* pself = &self) { DestroyNative((ImPlotItemGroup*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static int GetItemCountNative(ImPlotItemGroup* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[581])(self); #else return (int)((delegate* unmanaged[Cdecl])funcTable[581])((nint)self); #endif } /// /// To be documented. /// public static int GetItemCount(ImPlotItemGroupPtr self) { int ret = GetItemCountNative(self); return ret; } /// /// To be documented. /// public static int GetItemCount(ref ImPlotItemGroup self) { fixed (ImPlotItemGroup* pself = &self) { int ret = GetItemCountNative((ImPlotItemGroup*)pself); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static uint GetItemIDNative(ImPlotItemGroup* self, byte* labelId) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[582])(self, labelId); #else return (uint)((delegate* unmanaged[Cdecl])funcTable[582])((nint)self, (nint)labelId); #endif } /// /// To be documented. /// public static uint GetItemID(ImPlotItemGroupPtr self, byte* labelId) { uint ret = GetItemIDNative(self, labelId); return ret; } /// /// To be documented. /// public static uint GetItemID(ref ImPlotItemGroup self, byte* labelId) { fixed (ImPlotItemGroup* pself = &self) { uint ret = GetItemIDNative((ImPlotItemGroup*)pself, labelId); return ret; } } /// /// To be documented. /// public static uint GetItemID(ImPlotItemGroupPtr self, ref byte labelId) { fixed (byte* plabelId = &labelId) { uint ret = GetItemIDNative(self, (byte*)plabelId); return ret; } } /// /// To be documented. /// public static uint GetItemID(ImPlotItemGroupPtr self, ReadOnlySpan labelId) { fixed (byte* plabelId = labelId) { uint ret = GetItemIDNative(self, (byte*)plabelId); return ret; } } /// /// To be documented. /// public static uint GetItemID(ImPlotItemGroupPtr self, string labelId) { byte* pStr0 = null; int pStrSize0 = 0; if (labelId != null) { pStrSize0 = Utils.GetByteCountUTF8(labelId); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(labelId, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } uint ret = GetItemIDNative(self, pStr0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } return ret; } /// /// To be documented. /// public static uint GetItemID(ref ImPlotItemGroup self, ref byte labelId) { fixed (ImPlotItemGroup* pself = &self) { fixed (byte* plabelId = &labelId) { uint ret = GetItemIDNative((ImPlotItemGroup*)pself, (byte*)plabelId); return ret; } } } /// /// To be documented. /// public static uint GetItemID(ref ImPlotItemGroup self, ReadOnlySpan labelId) { fixed (ImPlotItemGroup* pself = &self) { fixed (byte* plabelId = labelId) { uint ret = GetItemIDNative((ImPlotItemGroup*)pself, (byte*)plabelId); return ret; } } } /// /// To be documented. /// public static uint GetItemID(ref ImPlotItemGroup self, string labelId) { fixed (ImPlotItemGroup* pself = &self) { byte* pStr0 = null; int pStrSize0 = 0; if (labelId != null) { pStrSize0 = Utils.GetByteCountUTF8(labelId); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(labelId, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } uint ret = GetItemIDNative((ImPlotItemGroup*)pself, pStr0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotItem* GetItemNative(ImPlotItemGroup* self, uint id) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[583])(self, id); #else return (ImPlotItem*)((delegate* unmanaged[Cdecl])funcTable[583])((nint)self, id); #endif } /// /// To be documented. /// public static ImPlotItemPtr GetItem(ImPlotItemGroupPtr self, uint id) { ImPlotItemPtr ret = GetItemNative(self, id); return ret; } /// /// To be documented. /// public static ImPlotItemPtr GetItem(ref ImPlotItemGroup self, uint id) { fixed (ImPlotItemGroup* pself = &self) { ImPlotItemPtr ret = GetItemNative((ImPlotItemGroup*)pself, id); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotItem* GetItemNative(ImPlotItemGroup* self, byte* labelId) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[584])(self, labelId); #else return (ImPlotItem*)((delegate* unmanaged[Cdecl])funcTable[584])((nint)self, (nint)labelId); #endif } /// /// To be documented. /// public static ImPlotItemPtr GetItem(ImPlotItemGroupPtr self, byte* labelId) { ImPlotItemPtr ret = GetItemNative(self, labelId); return ret; } } }