// ------------------------------------------------------------------------------ // // This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // // ------------------------------------------------------------------------------ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using HexaGen.Runtime; using System.Numerics; using Dalamud.Bindings.ImGui; namespace Dalamud.Bindings.ImPlot { public unsafe partial class ImPlot { /// /// To be documented. /// public static double ImMean(long* values, int count) { double ret = ImMeanNative(values, count); return ret; } /// /// To be documented. /// public static double ImMean(ref long values, int count) { fixed (long* pvalues = &values) { double ret = ImMeanNative((long*)pvalues, count); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static double ImMeanNative(ulong* values, int count) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[426])(values, count); #else return (double)((delegate* unmanaged[Cdecl])funcTable[426])((nint)values, count); #endif } /// /// To be documented. /// public static double ImMean(ulong* values, int count) { double ret = ImMeanNative(values, count); return ret; } /// /// To be documented. /// public static double ImMean(ref ulong values, int count) { fixed (ulong* pvalues = &values) { double ret = ImMeanNative((ulong*)pvalues, count); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static double ImStdDevNative(float* values, int count) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[427])(values, count); #else return (double)((delegate* unmanaged[Cdecl])funcTable[427])((nint)values, count); #endif } /// /// To be documented. /// public static double ImStdDev(float* values, int count) { double ret = ImStdDevNative(values, count); return ret; } /// /// To be documented. /// public static double ImStdDev(ref float values, int count) { fixed (float* pvalues = &values) { double ret = ImStdDevNative((float*)pvalues, count); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static double ImStdDevNative(double* values, int count) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[428])(values, count); #else return (double)((delegate* unmanaged[Cdecl])funcTable[428])((nint)values, count); #endif } /// /// To be documented. /// public static double ImStdDev(double* values, int count) { double ret = ImStdDevNative(values, count); return ret; } /// /// To be documented. /// public static double ImStdDev(ref double values, int count) { fixed (double* pvalues = &values) { double ret = ImStdDevNative((double*)pvalues, count); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static double ImStdDevNative(byte* values, int count) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[429])(values, count); #else return (double)((delegate* unmanaged[Cdecl])funcTable[429])((nint)values, count); #endif } /// /// To be documented. /// public static double ImStdDev(byte* values, int count) { double ret = ImStdDevNative(values, count); return ret; } /// /// To be documented. /// public static double ImStdDev(ref byte values, int count) { fixed (byte* pvalues = &values) { double ret = ImStdDevNative((byte*)pvalues, count); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static double ImStdDevNative(short* values, int count) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[430])(values, count); #else return (double)((delegate* unmanaged[Cdecl])funcTable[430])((nint)values, count); #endif } /// /// To be documented. /// public static double ImStdDev(short* values, int count) { double ret = ImStdDevNative(values, count); return ret; } /// /// To be documented. /// public static double ImStdDev(ref short values, int count) { fixed (short* pvalues = &values) { double ret = ImStdDevNative((short*)pvalues, count); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static double ImStdDevNative(ushort* values, int count) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[431])(values, count); #else return (double)((delegate* unmanaged[Cdecl])funcTable[431])((nint)values, count); #endif } /// /// To be documented. /// public static double ImStdDev(ushort* values, int count) { double ret = ImStdDevNative(values, count); return ret; } /// /// To be documented. /// public static double ImStdDev(ref ushort values, int count) { fixed (ushort* pvalues = &values) { double ret = ImStdDevNative((ushort*)pvalues, count); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static double ImStdDevNative(int* values, int count) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[432])(values, count); #else return (double)((delegate* unmanaged[Cdecl])funcTable[432])((nint)values, count); #endif } /// /// To be documented. /// public static double ImStdDev(int* values, int count) { double ret = ImStdDevNative(values, count); return ret; } /// /// To be documented. /// public static double ImStdDev(ref int values, int count) { fixed (int* pvalues = &values) { double ret = ImStdDevNative((int*)pvalues, count); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static double ImStdDevNative(uint* values, int count) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[433])(values, count); #else return (double)((delegate* unmanaged[Cdecl])funcTable[433])((nint)values, count); #endif } /// /// To be documented. /// public static double ImStdDev(uint* values, int count) { double ret = ImStdDevNative(values, count); return ret; } /// /// To be documented. /// public static double ImStdDev(ref uint values, int count) { fixed (uint* pvalues = &values) { double ret = ImStdDevNative((uint*)pvalues, count); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static double ImStdDevNative(long* values, int count) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[434])(values, count); #else return (double)((delegate* unmanaged[Cdecl])funcTable[434])((nint)values, count); #endif } /// /// To be documented. /// public static double ImStdDev(long* values, int count) { double ret = ImStdDevNative(values, count); return ret; } /// /// To be documented. /// public static double ImStdDev(ref long values, int count) { fixed (long* pvalues = &values) { double ret = ImStdDevNative((long*)pvalues, count); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static double ImStdDevNative(ulong* values, int count) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[435])(values, count); #else return (double)((delegate* unmanaged[Cdecl])funcTable[435])((nint)values, count); #endif } /// /// To be documented. /// public static double ImStdDev(ulong* values, int count) { double ret = ImStdDevNative(values, count); return ret; } /// /// To be documented. /// public static double ImStdDev(ref ulong values, int count) { fixed (ulong* pvalues = &values) { double ret = ImStdDevNative((ulong*)pvalues, count); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static uint ImMixU32Native(uint a, uint b, uint s) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[436])(a, b, s); #else return (uint)((delegate* unmanaged[Cdecl])funcTable[436])(a, b, s); #endif } /// /// To be documented. /// public static uint ImMixU32(uint a, uint b, uint s) { uint ret = ImMixU32Native(a, b, s); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static uint ImLerpU32Native(uint* colors, int size, float t) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[437])(colors, size, t); #else return (uint)((delegate* unmanaged[Cdecl])funcTable[437])((nint)colors, size, t); #endif } /// /// To be documented. /// public static uint ImLerpU32(uint* colors, int size, float t) { uint ret = ImLerpU32Native(colors, size, t); return ret; } /// /// To be documented. /// public static uint ImLerpU32(ref uint colors, int size, float t) { fixed (uint* pcolors = &colors) { uint ret = ImLerpU32Native((uint*)pcolors, size, t); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static uint ImAlphaU32Native(uint col, float alpha) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[438])(col, alpha); #else return (uint)((delegate* unmanaged[Cdecl])funcTable[438])(col, alpha); #endif } /// /// To be documented. /// public static uint ImAlphaU32(uint col, float alpha) { uint ret = ImAlphaU32Native(col, alpha); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte ImOverlapsNative(float minA, float maxA, float minB, float maxB) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[439])(minA, maxA, minB, maxB); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[439])(minA, maxA, minB, maxB); #endif } /// /// To be documented. /// public static bool ImOverlaps(float minA, float maxA, float minB, float maxB) { byte ret = ImOverlapsNative(minA, maxA, minB, maxB); return ret != 0; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte ImOverlapsNative(double minA, double maxA, double minB, double maxB) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[440])(minA, maxA, minB, maxB); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[440])(minA, maxA, minB, maxB); #endif } /// /// To be documented. /// public static bool ImOverlaps(double minA, double maxA, double minB, double maxB) { byte ret = ImOverlapsNative(minA, maxA, minB, maxB); return ret != 0; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte ImOverlapsNative(byte minA, byte maxA, byte minB, byte maxB) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[441])(minA, maxA, minB, maxB); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[441])(minA, maxA, minB, maxB); #endif } /// /// To be documented. /// public static bool ImOverlaps(byte minA, byte maxA, byte minB, byte maxB) { byte ret = ImOverlapsNative(minA, maxA, minB, maxB); return ret != 0; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte ImOverlapsNative(short minA, short maxA, short minB, short maxB) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[442])(minA, maxA, minB, maxB); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[442])(minA, maxA, minB, maxB); #endif } /// /// To be documented. /// public static bool ImOverlaps(short minA, short maxA, short minB, short maxB) { byte ret = ImOverlapsNative(minA, maxA, minB, maxB); return ret != 0; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte ImOverlapsNative(ushort minA, ushort maxA, ushort minB, ushort maxB) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[443])(minA, maxA, minB, maxB); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[443])(minA, maxA, minB, maxB); #endif } /// /// To be documented. /// public static bool ImOverlaps(ushort minA, ushort maxA, ushort minB, ushort maxB) { byte ret = ImOverlapsNative(minA, maxA, minB, maxB); return ret != 0; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte ImOverlapsNative(int minA, int maxA, int minB, int maxB) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[444])(minA, maxA, minB, maxB); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[444])(minA, maxA, minB, maxB); #endif } /// /// To be documented. /// public static bool ImOverlaps(int minA, int maxA, int minB, int maxB) { byte ret = ImOverlapsNative(minA, maxA, minB, maxB); return ret != 0; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte ImOverlapsNative(uint minA, uint maxA, uint minB, uint maxB) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[445])(minA, maxA, minB, maxB); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[445])(minA, maxA, minB, maxB); #endif } /// /// To be documented. /// public static bool ImOverlaps(uint minA, uint maxA, uint minB, uint maxB) { byte ret = ImOverlapsNative(minA, maxA, minB, maxB); return ret != 0; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte ImOverlapsNative(long minA, long maxA, long minB, long maxB) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[446])(minA, maxA, minB, maxB); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[446])(minA, maxA, minB, maxB); #endif } /// /// To be documented. /// public static bool ImOverlaps(long minA, long maxA, long minB, long maxB) { byte ret = ImOverlapsNative(minA, maxA, minB, maxB); return ret != 0; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte ImOverlapsNative(ulong minA, ulong maxA, ulong minB, ulong maxB) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[447])(minA, maxA, minB, maxB); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[447])(minA, maxA, minB, maxB); #endif } /// /// To be documented. /// public static bool ImOverlaps(ulong minA, ulong maxA, ulong minB, ulong maxB) { byte ret = ImOverlapsNative(minA, maxA, minB, maxB); return ret != 0; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotDateTimeSpec* ImPlotDateTimeSpecNative() { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[448])(); #else return (ImPlotDateTimeSpec*)((delegate* unmanaged[Cdecl])funcTable[448])(); #endif } /// /// To be documented. /// public static ImPlotDateTimeSpecPtr ImPlotDateTimeSpec() { ImPlotDateTimeSpecPtr ret = ImPlotDateTimeSpecNative(); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotDateTimeSpec* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[449])(self); #else ((delegate* unmanaged[Cdecl])funcTable[449])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotDateTimeSpecPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotDateTimeSpec self) { fixed (ImPlotDateTimeSpec* pself = &self) { DestroyNative((ImPlotDateTimeSpec*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotDateTimeSpec* ImPlotDateTimeSpecNative(ImPlotDateFmt dateFmt, ImPlotTimeFmt timeFmt, byte use24HrClk, byte useIso8601) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[450])(dateFmt, timeFmt, use24HrClk, useIso8601); #else return (ImPlotDateTimeSpec*)((delegate* unmanaged[Cdecl])funcTable[450])(dateFmt, timeFmt, use24HrClk, useIso8601); #endif } /// /// To be documented. /// public static ImPlotDateTimeSpecPtr ImPlotDateTimeSpec(ImPlotDateFmt dateFmt, ImPlotTimeFmt timeFmt, bool use24HrClk, bool useIso8601) { ImPlotDateTimeSpecPtr ret = ImPlotDateTimeSpecNative(dateFmt, timeFmt, use24HrClk ? (byte)1 : (byte)0, useIso8601 ? (byte)1 : (byte)0); return ret; } /// /// To be documented. /// public static ImPlotDateTimeSpecPtr ImPlotDateTimeSpec(ImPlotDateFmt dateFmt, ImPlotTimeFmt timeFmt, bool use24HrClk) { ImPlotDateTimeSpecPtr ret = ImPlotDateTimeSpecNative(dateFmt, timeFmt, use24HrClk ? (byte)1 : (byte)0, (byte)(0)); return ret; } /// /// To be documented. /// public static ImPlotDateTimeSpecPtr ImPlotDateTimeSpec(ImPlotDateFmt dateFmt, ImPlotTimeFmt timeFmt) { ImPlotDateTimeSpecPtr ret = ImPlotDateTimeSpecNative(dateFmt, timeFmt, (byte)(0), (byte)(0)); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotTime* ImPlotTimeNative() { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[451])(); #else return (ImPlotTime*)((delegate* unmanaged[Cdecl])funcTable[451])(); #endif } /// /// To be documented. /// public static ImPlotTimePtr ImPlotTime() { ImPlotTimePtr ret = ImPlotTimeNative(); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotTime* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[452])(self); #else ((delegate* unmanaged[Cdecl])funcTable[452])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotTimePtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotTime self) { fixed (ImPlotTime* pself = &self) { DestroyNative((ImPlotTime*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotTime* ImPlotTimeNative(long s, int us) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[453])(s, us); #else return (ImPlotTime*)((delegate* unmanaged[Cdecl])funcTable[453])(s, us); #endif } /// /// To be documented. /// public static ImPlotTimePtr ImPlotTime(long s, int us) { ImPlotTimePtr ret = ImPlotTimeNative(s, us); return ret; } /// /// To be documented. /// public static ImPlotTimePtr ImPlotTime(long s) { ImPlotTimePtr ret = ImPlotTimeNative(s, (int)(0)); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void RollOverNative(ImPlotTime* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[454])(self); #else ((delegate* unmanaged[Cdecl])funcTable[454])((nint)self); #endif } /// /// To be documented. /// public static void RollOver(ImPlotTimePtr self) { RollOverNative(self); } /// /// To be documented. /// public static void RollOver(ref ImPlotTime self) { fixed (ImPlotTime* pself = &self) { RollOverNative((ImPlotTime*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static double ToDoubleNative(ImPlotTime* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[455])(self); #else return (double)((delegate* unmanaged[Cdecl])funcTable[455])((nint)self); #endif } /// /// To be documented. /// public static double ToDouble(ImPlotTimePtr self) { double ret = ToDoubleNative(self); return ret; } /// /// To be documented. /// public static double ToDouble(ref ImPlotTime self) { fixed (ImPlotTime* pself = &self) { double ret = ToDoubleNative((ImPlotTime*)pself); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void FromDoubleNative(ImPlotTime* pOut, double t) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[456])(pOut, t); #else ((delegate* unmanaged[Cdecl])funcTable[456])((nint)pOut, t); #endif } /// /// To be documented. /// public static ImPlotTime FromDouble(double t) { ImPlotTime ret; FromDoubleNative(&ret, t); return ret; } /// /// To be documented. /// public static void FromDouble(ImPlotTimePtr pOut, double t) { FromDoubleNative(pOut, t); } /// /// To be documented. /// public static void FromDouble(ref ImPlotTime pOut, double t) { fixed (ImPlotTime* ppOut = &pOut) { FromDoubleNative((ImPlotTime*)ppOut, t); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotColormapData* ImPlotColormapDataNative() { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[457])(); #else return (ImPlotColormapData*)((delegate* unmanaged[Cdecl])funcTable[457])(); #endif } /// /// To be documented. /// public static ImPlotColormapDataPtr ImPlotColormapData() { ImPlotColormapDataPtr ret = ImPlotColormapDataNative(); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotColormapData* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[458])(self); #else ((delegate* unmanaged[Cdecl])funcTable[458])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotColormapDataPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotColormapData self) { fixed (ImPlotColormapData* pself = &self) { DestroyNative((ImPlotColormapData*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static int AppendNative(ImPlotColormapData* self, byte* name, uint* keys, int count, byte qual) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[459])(self, name, keys, count, qual); #else return (int)((delegate* unmanaged[Cdecl])funcTable[459])((nint)self, (nint)name, (nint)keys, count, qual); #endif } /// /// To be documented. /// public static int Append(ImPlotColormapDataPtr self, byte* name, uint* keys, int count, bool qual) { int ret = AppendNative(self, name, keys, count, qual ? (byte)1 : (byte)0); return ret; } /// /// To be documented. /// public static int Append(ref ImPlotColormapData self, byte* name, uint* keys, int count, bool qual) { fixed (ImPlotColormapData* pself = &self) { int ret = AppendNative((ImPlotColormapData*)pself, name, keys, count, qual ? (byte)1 : (byte)0); return ret; } } /// /// To be documented. /// public static int Append(ImPlotColormapDataPtr self, ref byte name, uint* keys, int count, bool qual) { fixed (byte* pname = &name) { int ret = AppendNative(self, (byte*)pname, keys, count, qual ? (byte)1 : (byte)0); return ret; } } /// /// To be documented. /// public static int Append(ImPlotColormapDataPtr self, ReadOnlySpan name, uint* keys, int count, bool qual) { fixed (byte* pname = name) { int ret = AppendNative(self, (byte*)pname, keys, count, qual ? (byte)1 : (byte)0); return ret; } } /// /// To be documented. /// public static int Append(ImPlotColormapDataPtr self, string name, uint* keys, int count, bool qual) { byte* pStr0 = null; int pStrSize0 = 0; if (name != null) { pStrSize0 = Utils.GetByteCountUTF8(name); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(name, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } int ret = AppendNative(self, pStr0, keys, count, qual ? (byte)1 : (byte)0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } return ret; } /// /// To be documented. /// public static int Append(ref ImPlotColormapData self, ref byte name, uint* keys, int count, bool qual) { fixed (ImPlotColormapData* pself = &self) { fixed (byte* pname = &name) { int ret = AppendNative((ImPlotColormapData*)pself, (byte*)pname, keys, count, qual ? (byte)1 : (byte)0); return ret; } } } /// /// To be documented. /// public static int Append(ref ImPlotColormapData self, ReadOnlySpan name, uint* keys, int count, bool qual) { fixed (ImPlotColormapData* pself = &self) { fixed (byte* pname = name) { int ret = AppendNative((ImPlotColormapData*)pself, (byte*)pname, keys, count, qual ? (byte)1 : (byte)0); return ret; } } } /// /// To be documented. /// public static int Append(ref ImPlotColormapData self, string name, uint* keys, int count, bool qual) { fixed (ImPlotColormapData* pself = &self) { byte* pStr0 = null; int pStrSize0 = 0; if (name != null) { pStrSize0 = Utils.GetByteCountUTF8(name); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(name, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } int ret = AppendNative((ImPlotColormapData*)pself, pStr0, keys, count, qual ? (byte)1 : (byte)0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } return ret; } } /// /// To be documented. /// public static int Append(ImPlotColormapDataPtr self, byte* name, ref uint keys, int count, bool qual) { fixed (uint* pkeys = &keys) { int ret = AppendNative(self, name, (uint*)pkeys, count, qual ? (byte)1 : (byte)0); return ret; } } /// /// To be documented. /// public static int Append(ref ImPlotColormapData self, byte* name, ref uint keys, int count, bool qual) { fixed (ImPlotColormapData* pself = &self) { fixed (uint* pkeys = &keys) { int ret = AppendNative((ImPlotColormapData*)pself, name, (uint*)pkeys, count, qual ? (byte)1 : (byte)0); return ret; } } } /// /// To be documented. /// public static int Append(ImPlotColormapDataPtr self, ref byte name, ref uint keys, int count, bool qual) { fixed (byte* pname = &name) { fixed (uint* pkeys = &keys) { int ret = AppendNative(self, (byte*)pname, (uint*)pkeys, count, qual ? (byte)1 : (byte)0); return ret; } } } /// /// To be documented. /// public static int Append(ImPlotColormapDataPtr self, ReadOnlySpan name, ref uint keys, int count, bool qual) { fixed (byte* pname = name) { fixed (uint* pkeys = &keys) { int ret = AppendNative(self, (byte*)pname, (uint*)pkeys, count, qual ? (byte)1 : (byte)0); return ret; } } } /// /// To be documented. /// public static int Append(ImPlotColormapDataPtr self, string name, ref uint keys, int count, bool qual) { byte* pStr0 = null; int pStrSize0 = 0; if (name != null) { pStrSize0 = Utils.GetByteCountUTF8(name); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(name, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } fixed (uint* pkeys = &keys) { int ret = AppendNative(self, pStr0, (uint*)pkeys, count, qual ? (byte)1 : (byte)0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } return ret; } } /// /// To be documented. /// public static int Append(ref ImPlotColormapData self, ref byte name, ref uint keys, int count, bool qual) { fixed (ImPlotColormapData* pself = &self) { fixed (byte* pname = &name) { fixed (uint* pkeys = &keys) { int ret = AppendNative((ImPlotColormapData*)pself, (byte*)pname, (uint*)pkeys, count, qual ? (byte)1 : (byte)0); return ret; } } } } /// /// To be documented. /// public static int Append(ref ImPlotColormapData self, ReadOnlySpan name, ref uint keys, int count, bool qual) { fixed (ImPlotColormapData* pself = &self) { fixed (byte* pname = name) { fixed (uint* pkeys = &keys) { int ret = AppendNative((ImPlotColormapData*)pself, (byte*)pname, (uint*)pkeys, count, qual ? (byte)1 : (byte)0); return ret; } } } } /// /// To be documented. /// public static int Append(ref ImPlotColormapData self, string name, ref uint keys, int count, bool qual) { fixed (ImPlotColormapData* pself = &self) { byte* pStr0 = null; int pStrSize0 = 0; if (name != null) { pStrSize0 = Utils.GetByteCountUTF8(name); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(name, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } fixed (uint* pkeys = &keys) { int ret = AppendNative((ImPlotColormapData*)pself, pStr0, (uint*)pkeys, count, qual ? (byte)1 : (byte)0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } return ret; } } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void _AppendTableNative(ImPlotColormapData* self, ImPlotColormap cmap) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[460])(self, cmap); #else ((delegate* unmanaged[Cdecl])funcTable[460])((nint)self, cmap); #endif } /// /// To be documented. /// public static void _AppendTable(ImPlotColormapDataPtr self, ImPlotColormap cmap) { _AppendTableNative(self, cmap); } /// /// To be documented. /// public static void _AppendTable(ref ImPlotColormapData self, ImPlotColormap cmap) { fixed (ImPlotColormapData* pself = &self) { _AppendTableNative((ImPlotColormapData*)pself, cmap); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void RebuildTablesNative(ImPlotColormapData* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[461])(self); #else ((delegate* unmanaged[Cdecl])funcTable[461])((nint)self); #endif } /// /// To be documented. /// public static void RebuildTables(ImPlotColormapDataPtr self) { RebuildTablesNative(self); } /// /// To be documented. /// public static void RebuildTables(ref ImPlotColormapData self) { fixed (ImPlotColormapData* pself = &self) { RebuildTablesNative((ImPlotColormapData*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsQualNative(ImPlotColormapData* self, ImPlotColormap cmap) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[462])(self, cmap); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[462])((nint)self, cmap); #endif } /// /// To be documented. /// public static bool IsQual(ImPlotColormapDataPtr self, ImPlotColormap cmap) { byte ret = IsQualNative(self, cmap); return ret != 0; } /// /// To be documented. /// public static bool IsQual(ref ImPlotColormapData self, ImPlotColormap cmap) { fixed (ImPlotColormapData* pself = &self) { byte ret = IsQualNative((ImPlotColormapData*)pself, cmap); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte* GetNameNative(ImPlotColormapData* self, ImPlotColormap cmap) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[463])(self, cmap); #else return (byte*)((delegate* unmanaged[Cdecl])funcTable[463])((nint)self, cmap); #endif } /// /// To be documented. /// public static byte* GetName(ImPlotColormapDataPtr self, ImPlotColormap cmap) { byte* ret = GetNameNative(self, cmap); return ret; } /// /// To be documented. /// public static string GetNameS(ImPlotColormapDataPtr self, ImPlotColormap cmap) { string ret = Utils.DecodeStringUTF8(GetNameNative(self, cmap)); return ret; } /// /// To be documented. /// public static byte* GetName(ref ImPlotColormapData self, ImPlotColormap cmap) { fixed (ImPlotColormapData* pself = &self) { byte* ret = GetNameNative((ImPlotColormapData*)pself, cmap); return ret; } } /// /// To be documented. /// public static string GetNameS(ref ImPlotColormapData self, ImPlotColormap cmap) { fixed (ImPlotColormapData* pself = &self) { string ret = Utils.DecodeStringUTF8(GetNameNative((ImPlotColormapData*)pself, cmap)); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotColormap GetIndexNative(ImPlotColormapData* self, byte* name) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[464])(self, name); #else return (ImPlotColormap)((delegate* unmanaged[Cdecl])funcTable[464])((nint)self, (nint)name); #endif } /// /// To be documented. /// public static ImPlotColormap GetIndex(ImPlotColormapDataPtr self, byte* name) { ImPlotColormap ret = GetIndexNative(self, name); return ret; } /// /// To be documented. /// public static ImPlotColormap GetIndex(ref ImPlotColormapData self, byte* name) { fixed (ImPlotColormapData* pself = &self) { ImPlotColormap ret = GetIndexNative((ImPlotColormapData*)pself, name); return ret; } } /// /// To be documented. /// public static ImPlotColormap GetIndex(ImPlotColormapDataPtr self, ref byte name) { fixed (byte* pname = &name) { ImPlotColormap ret = GetIndexNative(self, (byte*)pname); return ret; } } /// /// To be documented. /// public static ImPlotColormap GetIndex(ImPlotColormapDataPtr self, ReadOnlySpan name) { fixed (byte* pname = name) { ImPlotColormap ret = GetIndexNative(self, (byte*)pname); return ret; } } /// /// To be documented. /// public static ImPlotColormap GetIndex(ImPlotColormapDataPtr self, string name) { byte* pStr0 = null; int pStrSize0 = 0; if (name != null) { pStrSize0 = Utils.GetByteCountUTF8(name); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(name, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } ImPlotColormap ret = GetIndexNative(self, pStr0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } return ret; } /// /// To be documented. /// public static ImPlotColormap GetIndex(ref ImPlotColormapData self, ref byte name) { fixed (ImPlotColormapData* pself = &self) { fixed (byte* pname = &name) { ImPlotColormap ret = GetIndexNative((ImPlotColormapData*)pself, (byte*)pname); return ret; } } } /// /// To be documented. /// public static ImPlotColormap GetIndex(ref ImPlotColormapData self, ReadOnlySpan name) { fixed (ImPlotColormapData* pself = &self) { fixed (byte* pname = name) { ImPlotColormap ret = GetIndexNative((ImPlotColormapData*)pself, (byte*)pname); return ret; } } } /// /// To be documented. /// public static ImPlotColormap GetIndex(ref ImPlotColormapData self, string name) { fixed (ImPlotColormapData* pself = &self) { byte* pStr0 = null; int pStrSize0 = 0; if (name != null) { pStrSize0 = Utils.GetByteCountUTF8(name); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(name, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } ImPlotColormap ret = GetIndexNative((ImPlotColormapData*)pself, pStr0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static uint* GetKeysNative(ImPlotColormapData* self, ImPlotColormap cmap) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[465])(self, cmap); #else return (uint*)((delegate* unmanaged[Cdecl])funcTable[465])((nint)self, cmap); #endif } /// /// To be documented. /// public static uint* GetKeys(ImPlotColormapDataPtr self, ImPlotColormap cmap) { uint* ret = GetKeysNative(self, cmap); return ret; } /// /// To be documented. /// public static uint* GetKeys(ref ImPlotColormapData self, ImPlotColormap cmap) { fixed (ImPlotColormapData* pself = &self) { uint* ret = GetKeysNative((ImPlotColormapData*)pself, cmap); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static int GetKeyCountNative(ImPlotColormapData* self, ImPlotColormap cmap) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[466])(self, cmap); #else return (int)((delegate* unmanaged[Cdecl])funcTable[466])((nint)self, cmap); #endif } /// /// To be documented. /// public static int GetKeyCount(ImPlotColormapDataPtr self, ImPlotColormap cmap) { int ret = GetKeyCountNative(self, cmap); return ret; } /// /// To be documented. /// public static int GetKeyCount(ref ImPlotColormapData self, ImPlotColormap cmap) { fixed (ImPlotColormapData* pself = &self) { int ret = GetKeyCountNative((ImPlotColormapData*)pself, cmap); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static uint GetKeyColorNative(ImPlotColormapData* self, ImPlotColormap cmap, int idx) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[467])(self, cmap, idx); #else return (uint)((delegate* unmanaged[Cdecl])funcTable[467])((nint)self, cmap, idx); #endif } /// /// To be documented. /// public static uint GetKeyColor(ImPlotColormapDataPtr self, ImPlotColormap cmap, int idx) { uint ret = GetKeyColorNative(self, cmap, idx); return ret; } /// /// To be documented. /// public static uint GetKeyColor(ref ImPlotColormapData self, ImPlotColormap cmap, int idx) { fixed (ImPlotColormapData* pself = &self) { uint ret = GetKeyColorNative((ImPlotColormapData*)pself, cmap, idx); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void SetKeyColorNative(ImPlotColormapData* self, ImPlotColormap cmap, int idx, uint value) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[468])(self, cmap, idx, value); #else ((delegate* unmanaged[Cdecl])funcTable[468])((nint)self, cmap, idx, value); #endif } /// /// To be documented. /// public static void SetKeyColor(ImPlotColormapDataPtr self, ImPlotColormap cmap, int idx, uint value) { SetKeyColorNative(self, cmap, idx, value); } /// /// To be documented. /// public static void SetKeyColor(ref ImPlotColormapData self, ImPlotColormap cmap, int idx, uint value) { fixed (ImPlotColormapData* pself = &self) { SetKeyColorNative((ImPlotColormapData*)pself, cmap, idx, value); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static uint* GetTableNative(ImPlotColormapData* self, ImPlotColormap cmap) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[469])(self, cmap); #else return (uint*)((delegate* unmanaged[Cdecl])funcTable[469])((nint)self, cmap); #endif } /// /// To be documented. /// public static uint* GetTable(ImPlotColormapDataPtr self, ImPlotColormap cmap) { uint* ret = GetTableNative(self, cmap); return ret; } /// /// To be documented. /// public static uint* GetTable(ref ImPlotColormapData self, ImPlotColormap cmap) { fixed (ImPlotColormapData* pself = &self) { uint* ret = GetTableNative((ImPlotColormapData*)pself, cmap); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static int GetTableSizeNative(ImPlotColormapData* self, ImPlotColormap cmap) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[470])(self, cmap); #else return (int)((delegate* unmanaged[Cdecl])funcTable[470])((nint)self, cmap); #endif } /// /// To be documented. /// public static int GetTableSize(ImPlotColormapDataPtr self, ImPlotColormap cmap) { int ret = GetTableSizeNative(self, cmap); return ret; } /// /// To be documented. /// public static int GetTableSize(ref ImPlotColormapData self, ImPlotColormap cmap) { fixed (ImPlotColormapData* pself = &self) { int ret = GetTableSizeNative((ImPlotColormapData*)pself, cmap); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static uint GetTableColorNative(ImPlotColormapData* self, ImPlotColormap cmap, int idx) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[471])(self, cmap, idx); #else return (uint)((delegate* unmanaged[Cdecl])funcTable[471])((nint)self, cmap, idx); #endif } /// /// To be documented. /// public static uint GetTableColor(ImPlotColormapDataPtr self, ImPlotColormap cmap, int idx) { uint ret = GetTableColorNative(self, cmap, idx); return ret; } /// /// To be documented. /// public static uint GetTableColor(ref ImPlotColormapData self, ImPlotColormap cmap, int idx) { fixed (ImPlotColormapData* pself = &self) { uint ret = GetTableColorNative((ImPlotColormapData*)pself, cmap, idx); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static uint LerpTableNative(ImPlotColormapData* self, ImPlotColormap cmap, float t) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[472])(self, cmap, t); #else return (uint)((delegate* unmanaged[Cdecl])funcTable[472])((nint)self, cmap, t); #endif } /// /// To be documented. /// public static uint LerpTable(ImPlotColormapDataPtr self, ImPlotColormap cmap, float t) { uint ret = LerpTableNative(self, cmap, t); return ret; } /// /// To be documented. /// public static uint LerpTable(ref ImPlotColormapData self, ImPlotColormap cmap, float t) { fixed (ImPlotColormapData* pself = &self) { uint ret = LerpTableNative((ImPlotColormapData*)pself, cmap, t); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotPointError* ImPlotPointErrorNative(double x, double y, double neg, double pos) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[473])(x, y, neg, pos); #else return (ImPlotPointError*)((delegate* unmanaged[Cdecl])funcTable[473])(x, y, neg, pos); #endif } /// /// To be documented. /// public static ImPlotPointErrorPtr ImPlotPointError(double x, double y, double neg, double pos) { ImPlotPointErrorPtr ret = ImPlotPointErrorNative(x, y, neg, pos); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotPointError* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[474])(self); #else ((delegate* unmanaged[Cdecl])funcTable[474])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotPointErrorPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotPointError self) { fixed (ImPlotPointError* pself = &self) { DestroyNative((ImPlotPointError*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotAnnotationCollection* ImPlotAnnotationCollectionNative() { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[475])(); #else return (ImPlotAnnotationCollection*)((delegate* unmanaged[Cdecl])funcTable[475])(); #endif } /// /// To be documented. /// public static ImPlotAnnotationCollectionPtr ImPlotAnnotationCollection() { ImPlotAnnotationCollectionPtr ret = ImPlotAnnotationCollectionNative(); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotAnnotationCollection* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[476])(self); #else ((delegate* unmanaged[Cdecl])funcTable[476])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotAnnotationCollectionPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotAnnotationCollection self) { fixed (ImPlotAnnotationCollection* pself = &self) { DestroyNative((ImPlotAnnotationCollection*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void AppendVNative(ImPlotAnnotationCollection* self, Vector2 pos, Vector2 off, uint bg, uint fg, byte clamp, byte* fmt, nuint args) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[477])(self, pos, off, bg, fg, clamp, fmt, args); #else ((delegate* unmanaged[Cdecl])funcTable[477])((nint)self, pos, off, bg, fg, clamp, (nint)fmt, args); #endif } /// /// To be documented. /// public static void AppendV(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, byte* fmt, nuint args) { AppendVNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, fmt, args); } /// /// To be documented. /// public static void AppendV(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, byte* fmt, nuint args) { fixed (ImPlotAnnotationCollection* pself = &self) { AppendVNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, fmt, args); } } /// /// To be documented. /// public static void AppendV(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ref byte fmt, nuint args) { fixed (byte* pfmt = &fmt) { AppendVNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt, args); } } /// /// To be documented. /// public static void AppendV(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ReadOnlySpan fmt, nuint args) { fixed (byte* pfmt = fmt) { AppendVNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt, args); } } /// /// To be documented. /// public static void AppendV(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, string fmt, nuint args) { byte* pStr0 = null; int pStrSize0 = 0; if (fmt != null) { pStrSize0 = Utils.GetByteCountUTF8(fmt); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } AppendVNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, pStr0, args); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } } /// /// To be documented. /// public static void AppendV(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ref byte fmt, nuint args) { fixed (ImPlotAnnotationCollection* pself = &self) { fixed (byte* pfmt = &fmt) { AppendVNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt, args); } } } /// /// To be documented. /// public static void AppendV(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ReadOnlySpan fmt, nuint args) { fixed (ImPlotAnnotationCollection* pself = &self) { fixed (byte* pfmt = fmt) { AppendVNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt, args); } } } /// /// To be documented. /// public static void AppendV(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, string fmt, nuint args) { fixed (ImPlotAnnotationCollection* pself = &self) { byte* pStr0 = null; int pStrSize0 = 0; if (fmt != null) { pStrSize0 = Utils.GetByteCountUTF8(fmt); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } AppendVNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, pStr0, args); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void AppendNative(ImPlotAnnotationCollection* self, Vector2 pos, Vector2 off, uint bg, uint fg, byte clamp, byte* fmt) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[478])(self, pos, off, bg, fg, clamp, fmt); #else ((delegate* unmanaged[Cdecl])funcTable[478])((nint)self, pos, off, bg, fg, clamp, (nint)fmt); #endif } /// /// To be documented. /// public static void Append(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, byte* fmt) { AppendNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, fmt); } /// /// To be documented. /// public static void Append(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, byte* fmt) { fixed (ImPlotAnnotationCollection* pself = &self) { AppendNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, fmt); } } /// /// To be documented. /// public static void Append(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ref byte fmt) { fixed (byte* pfmt = &fmt) { AppendNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt); } } /// /// To be documented. /// public static void Append(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ReadOnlySpan fmt) { fixed (byte* pfmt = fmt) { AppendNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt); } } /// /// To be documented. /// public static void Append(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, string fmt) { byte* pStr0 = null; int pStrSize0 = 0; if (fmt != null) { pStrSize0 = Utils.GetByteCountUTF8(fmt); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } AppendNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, pStr0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } } /// /// To be documented. /// public static void Append(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ref byte fmt) { fixed (ImPlotAnnotationCollection* pself = &self) { fixed (byte* pfmt = &fmt) { AppendNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt); } } } /// /// To be documented. /// public static void Append(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ReadOnlySpan fmt) { fixed (ImPlotAnnotationCollection* pself = &self) { fixed (byte* pfmt = fmt) { AppendNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt); } } } /// /// To be documented. /// public static void Append(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, string fmt) { fixed (ImPlotAnnotationCollection* pself = &self) { byte* pStr0 = null; int pStrSize0 = 0; if (fmt != null) { pStrSize0 = Utils.GetByteCountUTF8(fmt); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } AppendNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, pStr0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte* GetTextNative(ImPlotAnnotationCollection* self, int idx) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[479])(self, idx); #else return (byte*)((delegate* unmanaged[Cdecl])funcTable[479])((nint)self, idx); #endif } /// /// To be documented. /// public static byte* GetText(ImPlotAnnotationCollectionPtr self, int idx) { byte* ret = GetTextNative(self, idx); return ret; } /// /// To be documented. /// public static string GetTextS(ImPlotAnnotationCollectionPtr self, int idx) { string ret = Utils.DecodeStringUTF8(GetTextNative(self, idx)); return ret; } /// /// To be documented. /// public static byte* GetText(ref ImPlotAnnotationCollection self, int idx) { fixed (ImPlotAnnotationCollection* pself = &self) { byte* ret = GetTextNative((ImPlotAnnotationCollection*)pself, idx); return ret; } } /// /// To be documented. /// public static string GetTextS(ref ImPlotAnnotationCollection self, int idx) { fixed (ImPlotAnnotationCollection* pself = &self) { string ret = Utils.DecodeStringUTF8(GetTextNative((ImPlotAnnotationCollection*)pself, idx)); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void ResetNative(ImPlotAnnotationCollection* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[480])(self); #else ((delegate* unmanaged[Cdecl])funcTable[480])((nint)self); #endif } /// /// To be documented. /// public static void Reset(ImPlotAnnotationCollectionPtr self) { ResetNative(self); } /// /// To be documented. /// public static void Reset(ref ImPlotAnnotationCollection self) { fixed (ImPlotAnnotationCollection* pself = &self) { ResetNative((ImPlotAnnotationCollection*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotTagCollection* ImPlotTagCollectionNative() { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[481])(); #else return (ImPlotTagCollection*)((delegate* unmanaged[Cdecl])funcTable[481])(); #endif } /// /// To be documented. /// public static ImPlotTagCollectionPtr ImPlotTagCollection() { ImPlotTagCollectionPtr ret = ImPlotTagCollectionNative(); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotTagCollection* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[482])(self); #else ((delegate* unmanaged[Cdecl])funcTable[482])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotTagCollectionPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotTagCollection self) { fixed (ImPlotTagCollection* pself = &self) { DestroyNative((ImPlotTagCollection*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void AppendVNative(ImPlotTagCollection* self, ImAxis axis, double value, uint bg, uint fg, byte* fmt, nuint args) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[483])(self, axis, value, bg, fg, fmt, args); #else ((delegate* unmanaged[Cdecl])funcTable[483])((nint)self, axis, value, bg, fg, (nint)fmt, args); #endif } /// /// To be documented. /// public static void AppendV(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, byte* fmt, nuint args) { AppendVNative(self, axis, value, bg, fg, fmt, args); } /// /// To be documented. /// public static void AppendV(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, byte* fmt, nuint args) { fixed (ImPlotTagCollection* pself = &self) { AppendVNative((ImPlotTagCollection*)pself, axis, value, bg, fg, fmt, args); } } /// /// To be documented. /// public static void AppendV(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, ref byte fmt, nuint args) { fixed (byte* pfmt = &fmt) { AppendVNative(self, axis, value, bg, fg, (byte*)pfmt, args); } } /// /// To be documented. /// public static void AppendV(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, ReadOnlySpan fmt, nuint args) { fixed (byte* pfmt = fmt) { AppendVNative(self, axis, value, bg, fg, (byte*)pfmt, args); } } /// /// To be documented. /// public static void AppendV(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, string fmt, nuint args) { byte* pStr0 = null; int pStrSize0 = 0; if (fmt != null) { pStrSize0 = Utils.GetByteCountUTF8(fmt); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } AppendVNative(self, axis, value, bg, fg, pStr0, args); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } } /// /// To be documented. /// public static void AppendV(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, ref byte fmt, nuint args) { fixed (ImPlotTagCollection* pself = &self) { fixed (byte* pfmt = &fmt) { AppendVNative((ImPlotTagCollection*)pself, axis, value, bg, fg, (byte*)pfmt, args); } } } /// /// To be documented. /// public static void AppendV(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, ReadOnlySpan fmt, nuint args) { fixed (ImPlotTagCollection* pself = &self) { fixed (byte* pfmt = fmt) { AppendVNative((ImPlotTagCollection*)pself, axis, value, bg, fg, (byte*)pfmt, args); } } } /// /// To be documented. /// public static void AppendV(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, string fmt, nuint args) { fixed (ImPlotTagCollection* pself = &self) { byte* pStr0 = null; int pStrSize0 = 0; if (fmt != null) { pStrSize0 = Utils.GetByteCountUTF8(fmt); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } AppendVNative((ImPlotTagCollection*)pself, axis, value, bg, fg, pStr0, args); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void AppendNative(ImPlotTagCollection* self, ImAxis axis, double value, uint bg, uint fg, byte* fmt) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[484])(self, axis, value, bg, fg, fmt); #else ((delegate* unmanaged[Cdecl])funcTable[484])((nint)self, axis, value, bg, fg, (nint)fmt); #endif } /// /// To be documented. /// public static void Append(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, byte* fmt) { AppendNative(self, axis, value, bg, fg, fmt); } /// /// To be documented. /// public static void Append(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, byte* fmt) { fixed (ImPlotTagCollection* pself = &self) { AppendNative((ImPlotTagCollection*)pself, axis, value, bg, fg, fmt); } } /// /// To be documented. /// public static void Append(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, ref byte fmt) { fixed (byte* pfmt = &fmt) { AppendNative(self, axis, value, bg, fg, (byte*)pfmt); } } /// /// To be documented. /// public static void Append(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, ReadOnlySpan fmt) { fixed (byte* pfmt = fmt) { AppendNative(self, axis, value, bg, fg, (byte*)pfmt); } } /// /// To be documented. /// public static void Append(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, string fmt) { byte* pStr0 = null; int pStrSize0 = 0; if (fmt != null) { pStrSize0 = Utils.GetByteCountUTF8(fmt); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } AppendNative(self, axis, value, bg, fg, pStr0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } } /// /// To be documented. /// public static void Append(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, ref byte fmt) { fixed (ImPlotTagCollection* pself = &self) { fixed (byte* pfmt = &fmt) { AppendNative((ImPlotTagCollection*)pself, axis, value, bg, fg, (byte*)pfmt); } } } /// /// To be documented. /// public static void Append(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, ReadOnlySpan fmt) { fixed (ImPlotTagCollection* pself = &self) { fixed (byte* pfmt = fmt) { AppendNative((ImPlotTagCollection*)pself, axis, value, bg, fg, (byte*)pfmt); } } } /// /// To be documented. /// public static void Append(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, string fmt) { fixed (ImPlotTagCollection* pself = &self) { byte* pStr0 = null; int pStrSize0 = 0; if (fmt != null) { pStrSize0 = Utils.GetByteCountUTF8(fmt); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } AppendNative((ImPlotTagCollection*)pself, axis, value, bg, fg, pStr0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte* GetTextNative(ImPlotTagCollection* self, int idx) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[485])(self, idx); #else return (byte*)((delegate* unmanaged[Cdecl])funcTable[485])((nint)self, idx); #endif } /// /// To be documented. /// public static byte* GetText(ImPlotTagCollectionPtr self, int idx) { byte* ret = GetTextNative(self, idx); return ret; } /// /// To be documented. /// public static string GetTextS(ImPlotTagCollectionPtr self, int idx) { string ret = Utils.DecodeStringUTF8(GetTextNative(self, idx)); return ret; } /// /// To be documented. /// public static byte* GetText(ref ImPlotTagCollection self, int idx) { fixed (ImPlotTagCollection* pself = &self) { byte* ret = GetTextNative((ImPlotTagCollection*)pself, idx); return ret; } } /// /// To be documented. /// public static string GetTextS(ref ImPlotTagCollection self, int idx) { fixed (ImPlotTagCollection* pself = &self) { string ret = Utils.DecodeStringUTF8(GetTextNative((ImPlotTagCollection*)pself, idx)); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void ResetNative(ImPlotTagCollection* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[486])(self); #else ((delegate* unmanaged[Cdecl])funcTable[486])((nint)self); #endif } /// /// To be documented. /// public static void Reset(ImPlotTagCollectionPtr self) { ResetNative(self); } /// /// To be documented. /// public static void Reset(ref ImPlotTagCollection self) { fixed (ImPlotTagCollection* pself = &self) { ResetNative((ImPlotTagCollection*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotTick* ImPlotTickNative(double value, byte major, int level, byte showLabel) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[487])(value, major, level, showLabel); #else return (ImPlotTick*)((delegate* unmanaged[Cdecl])funcTable[487])(value, major, level, showLabel); #endif } /// /// To be documented. /// public static ImPlotTickPtr ImPlotTick(double value, bool major, int level, bool showLabel) { ImPlotTickPtr ret = ImPlotTickNative(value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotTick* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[488])(self); #else ((delegate* unmanaged[Cdecl])funcTable[488])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotTickPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotTick self) { fixed (ImPlotTick* pself = &self) { DestroyNative((ImPlotTick*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotTicker* ImPlotTickerNative() { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[489])(); #else return (ImPlotTicker*)((delegate* unmanaged[Cdecl])funcTable[489])(); #endif } /// /// To be documented. /// public static ImPlotTickerPtr ImPlotTicker() { ImPlotTickerPtr ret = ImPlotTickerNative(); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotTicker* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[490])(self); #else ((delegate* unmanaged[Cdecl])funcTable[490])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotTickerPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotTicker self) { fixed (ImPlotTicker* pself = &self) { DestroyNative((ImPlotTicker*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotTick* AddTickNative(ImPlotTicker* self, double value, byte major, int level, byte showLabel, byte* label) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[491])(self, value, major, level, showLabel, label); #else return (ImPlotTick*)((delegate* unmanaged[Cdecl])funcTable[491])((nint)self, value, major, level, showLabel, (nint)label); #endif } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ImPlotTickerPtr self, double value, bool major, int level, bool showLabel, byte* label) { ImPlotTickPtr ret = AddTickNative(self, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, label); return ret; } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ref ImPlotTicker self, double value, bool major, int level, bool showLabel, byte* label) { fixed (ImPlotTicker* pself = &self) { ImPlotTickPtr ret = AddTickNative((ImPlotTicker*)pself, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, label); return ret; } } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ImPlotTickerPtr self, double value, bool major, int level, bool showLabel, ref byte label) { fixed (byte* plabel = &label) { ImPlotTickPtr ret = AddTickNative(self, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, (byte*)plabel); return ret; } } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ImPlotTickerPtr self, double value, bool major, int level, bool showLabel, ReadOnlySpan label) { fixed (byte* plabel = label) { ImPlotTickPtr ret = AddTickNative(self, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, (byte*)plabel); return ret; } } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ImPlotTickerPtr self, double value, bool major, int level, bool showLabel, string label) { byte* pStr0 = null; int pStrSize0 = 0; if (label != null) { pStrSize0 = Utils.GetByteCountUTF8(label); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(label, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } ImPlotTickPtr ret = AddTickNative(self, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, pStr0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } return ret; } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ref ImPlotTicker self, double value, bool major, int level, bool showLabel, ref byte label) { fixed (ImPlotTicker* pself = &self) { fixed (byte* plabel = &label) { ImPlotTickPtr ret = AddTickNative((ImPlotTicker*)pself, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, (byte*)plabel); return ret; } } } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ref ImPlotTicker self, double value, bool major, int level, bool showLabel, ReadOnlySpan label) { fixed (ImPlotTicker* pself = &self) { fixed (byte* plabel = label) { ImPlotTickPtr ret = AddTickNative((ImPlotTicker*)pself, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, (byte*)plabel); return ret; } } } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ref ImPlotTicker self, double value, bool major, int level, bool showLabel, string label) { fixed (ImPlotTicker* pself = &self) { byte* pStr0 = null; int pStrSize0 = 0; if (label != null) { pStrSize0 = Utils.GetByteCountUTF8(label); if (pStrSize0 >= Utils.MaxStackallocSize) { pStr0 = Utils.Alloc(pStrSize0 + 1); } else { byte* pStrStack0 = stackalloc byte[pStrSize0 + 1]; pStr0 = pStrStack0; } int pStrOffset0 = Utils.EncodeStringUTF8(label, pStr0, pStrSize0); pStr0[pStrOffset0] = 0; } ImPlotTickPtr ret = AddTickNative((ImPlotTicker*)pself, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, pStr0); if (pStrSize0 >= Utils.MaxStackallocSize) { Utils.Free(pStr0); } return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotTick* AddTickNative(ImPlotTicker* self, double value, byte major, int level, byte showLabel, ImPlotFormatter formatter, void* data) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl], void*, ImPlotTick*>)funcTable[492])(self, value, major, level, showLabel, (delegate*)Utils.GetFunctionPointerForDelegate(formatter), data); #else return (ImPlotTick*)((delegate* unmanaged[Cdecl])funcTable[492])((nint)self, value, major, level, showLabel, (nint)Utils.GetFunctionPointerForDelegate(formatter), (nint)data); #endif } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ImPlotTickerPtr self, double value, bool major, int level, bool showLabel, ImPlotFormatter formatter, void* data) { ImPlotTickPtr ret = AddTickNative(self, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, formatter, data); return ret; } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ref ImPlotTicker self, double value, bool major, int level, bool showLabel, ImPlotFormatter formatter, void* data) { fixed (ImPlotTicker* pself = &self) { ImPlotTickPtr ret = AddTickNative((ImPlotTicker*)pself, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, formatter, data); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotTick* AddTickNative(ImPlotTicker* self, ImPlotTick tick) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[493])(self, tick); #else return (ImPlotTick*)((delegate* unmanaged[Cdecl])funcTable[493])((nint)self, tick); #endif } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ImPlotTickerPtr self, ImPlotTick tick) { ImPlotTickPtr ret = AddTickNative(self, tick); return ret; } /// /// To be documented. /// public static ImPlotTickPtr AddTick(ref ImPlotTicker self, ImPlotTick tick) { fixed (ImPlotTicker* pself = &self) { ImPlotTickPtr ret = AddTickNative((ImPlotTicker*)pself, tick); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte* GetTextNative(ImPlotTicker* self, int idx) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[494])(self, idx); #else return (byte*)((delegate* unmanaged[Cdecl])funcTable[494])((nint)self, idx); #endif } /// /// To be documented. /// public static byte* GetText(ImPlotTickerPtr self, int idx) { byte* ret = GetTextNative(self, idx); return ret; } /// /// To be documented. /// public static string GetTextS(ImPlotTickerPtr self, int idx) { string ret = Utils.DecodeStringUTF8(GetTextNative(self, idx)); return ret; } /// /// To be documented. /// public static byte* GetText(ref ImPlotTicker self, int idx) { fixed (ImPlotTicker* pself = &self) { byte* ret = GetTextNative((ImPlotTicker*)pself, idx); return ret; } } /// /// To be documented. /// public static string GetTextS(ref ImPlotTicker self, int idx) { fixed (ImPlotTicker* pself = &self) { string ret = Utils.DecodeStringUTF8(GetTextNative((ImPlotTicker*)pself, idx)); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte* GetTextNative(ImPlotTicker* self, ImPlotTick tick) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[495])(self, tick); #else return (byte*)((delegate* unmanaged[Cdecl])funcTable[495])((nint)self, tick); #endif } /// /// To be documented. /// public static byte* GetText(ImPlotTickerPtr self, ImPlotTick tick) { byte* ret = GetTextNative(self, tick); return ret; } /// /// To be documented. /// public static string GetTextS(ImPlotTickerPtr self, ImPlotTick tick) { string ret = Utils.DecodeStringUTF8(GetTextNative(self, tick)); return ret; } /// /// To be documented. /// public static byte* GetText(ref ImPlotTicker self, ImPlotTick tick) { fixed (ImPlotTicker* pself = &self) { byte* ret = GetTextNative((ImPlotTicker*)pself, tick); return ret; } } /// /// To be documented. /// public static string GetTextS(ref ImPlotTicker self, ImPlotTick tick) { fixed (ImPlotTicker* pself = &self) { string ret = Utils.DecodeStringUTF8(GetTextNative((ImPlotTicker*)pself, tick)); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void OverrideSizeLateNative(ImPlotTicker* self, Vector2 size) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[496])(self, size); #else ((delegate* unmanaged[Cdecl])funcTable[496])((nint)self, size); #endif } /// /// To be documented. /// public static void OverrideSizeLate(ImPlotTickerPtr self, Vector2 size) { OverrideSizeLateNative(self, size); } /// /// To be documented. /// public static void OverrideSizeLate(ref ImPlotTicker self, Vector2 size) { fixed (ImPlotTicker* pself = &self) { OverrideSizeLateNative((ImPlotTicker*)pself, size); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void ResetNative(ImPlotTicker* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[497])(self); #else ((delegate* unmanaged[Cdecl])funcTable[497])((nint)self); #endif } /// /// To be documented. /// public static void Reset(ImPlotTickerPtr self) { ResetNative(self); } /// /// To be documented. /// public static void Reset(ref ImPlotTicker self) { fixed (ImPlotTicker* pself = &self) { ResetNative((ImPlotTicker*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static int TickCountNative(ImPlotTicker* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[498])(self); #else return (int)((delegate* unmanaged[Cdecl])funcTable[498])((nint)self); #endif } /// /// To be documented. /// public static int TickCount(ImPlotTickerPtr self) { int ret = TickCountNative(self); return ret; } /// /// To be documented. /// public static int TickCount(ref ImPlotTicker self) { fixed (ImPlotTicker* pself = &self) { int ret = TickCountNative((ImPlotTicker*)pself); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotAxis* ImPlotAxisNative() { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[499])(); #else return (ImPlotAxis*)((delegate* unmanaged[Cdecl])funcTable[499])(); #endif } /// /// To be documented. /// public static ImPlotAxisPtr ImPlotAxis() { ImPlotAxisPtr ret = ImPlotAxisNative(); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[500])(self); #else ((delegate* unmanaged[Cdecl])funcTable[500])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotAxisPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { DestroyNative((ImPlotAxis*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void ResetNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[501])(self); #else ((delegate* unmanaged[Cdecl])funcTable[501])((nint)self); #endif } /// /// To be documented. /// public static void Reset(ImPlotAxisPtr self) { ResetNative(self); } /// /// To be documented. /// public static void Reset(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { ResetNative((ImPlotAxis*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte SetMinNative(ImPlotAxis* self, double min, byte force) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[502])(self, min, force); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[502])((nint)self, min, force); #endif } /// /// To be documented. /// public static bool SetMin(ImPlotAxisPtr self, double min, bool force) { byte ret = SetMinNative(self, min, force ? (byte)1 : (byte)0); return ret != 0; } /// /// To be documented. /// public static bool SetMin(ImPlotAxisPtr self, double min) { byte ret = SetMinNative(self, min, (byte)(0)); return ret != 0; } /// /// To be documented. /// public static bool SetMin(ref ImPlotAxis self, double min, bool force) { fixed (ImPlotAxis* pself = &self) { byte ret = SetMinNative((ImPlotAxis*)pself, min, force ? (byte)1 : (byte)0); return ret != 0; } } /// /// To be documented. /// public static bool SetMin(ref ImPlotAxis self, double min) { fixed (ImPlotAxis* pself = &self) { byte ret = SetMinNative((ImPlotAxis*)pself, min, (byte)(0)); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte SetMaxNative(ImPlotAxis* self, double max, byte force) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[503])(self, max, force); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[503])((nint)self, max, force); #endif } /// /// To be documented. /// public static bool SetMax(ImPlotAxisPtr self, double max, bool force) { byte ret = SetMaxNative(self, max, force ? (byte)1 : (byte)0); return ret != 0; } /// /// To be documented. /// public static bool SetMax(ImPlotAxisPtr self, double max) { byte ret = SetMaxNative(self, max, (byte)(0)); return ret != 0; } /// /// To be documented. /// public static bool SetMax(ref ImPlotAxis self, double max, bool force) { fixed (ImPlotAxis* pself = &self) { byte ret = SetMaxNative((ImPlotAxis*)pself, max, force ? (byte)1 : (byte)0); return ret != 0; } } /// /// To be documented. /// public static bool SetMax(ref ImPlotAxis self, double max) { fixed (ImPlotAxis* pself = &self) { byte ret = SetMaxNative((ImPlotAxis*)pself, max, (byte)(0)); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void SetRangeNative(ImPlotAxis* self, double v1, double v2) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[504])(self, v1, v2); #else ((delegate* unmanaged[Cdecl])funcTable[504])((nint)self, v1, v2); #endif } /// /// To be documented. /// public static void SetRange(ImPlotAxisPtr self, double v1, double v2) { SetRangeNative(self, v1, v2); } /// /// To be documented. /// public static void SetRange(ref ImPlotAxis self, double v1, double v2) { fixed (ImPlotAxis* pself = &self) { SetRangeNative((ImPlotAxis*)pself, v1, v2); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void SetRangeNative(ImPlotAxis* self, ImPlotRange range) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[505])(self, range); #else ((delegate* unmanaged[Cdecl])funcTable[505])((nint)self, range); #endif } /// /// To be documented. /// public static void SetRange(ImPlotAxisPtr self, ImPlotRange range) { SetRangeNative(self, range); } /// /// To be documented. /// public static void SetRange(ref ImPlotAxis self, ImPlotRange range) { fixed (ImPlotAxis* pself = &self) { SetRangeNative((ImPlotAxis*)pself, range); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void SetAspectNative(ImPlotAxis* self, double unitPerPix) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[506])(self, unitPerPix); #else ((delegate* unmanaged[Cdecl])funcTable[506])((nint)self, unitPerPix); #endif } /// /// To be documented. /// public static void SetAspect(ImPlotAxisPtr self, double unitPerPix) { SetAspectNative(self, unitPerPix); } /// /// To be documented. /// public static void SetAspect(ref ImPlotAxis self, double unitPerPix) { fixed (ImPlotAxis* pself = &self) { SetAspectNative((ImPlotAxis*)pself, unitPerPix); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static float PixelSizeNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[507])(self); #else return (float)((delegate* unmanaged[Cdecl])funcTable[507])((nint)self); #endif } /// /// To be documented. /// public static float PixelSize(ImPlotAxisPtr self) { float ret = PixelSizeNative(self); return ret; } /// /// To be documented. /// public static float PixelSize(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { float ret = PixelSizeNative((ImPlotAxis*)pself); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static double GetAspectNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[508])(self); #else return (double)((delegate* unmanaged[Cdecl])funcTable[508])((nint)self); #endif } /// /// To be documented. /// public static double GetAspect(ImPlotAxisPtr self) { double ret = GetAspectNative(self); return ret; } /// /// To be documented. /// public static double GetAspect(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { double ret = GetAspectNative((ImPlotAxis*)pself); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void ConstrainNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[509])(self); #else ((delegate* unmanaged[Cdecl])funcTable[509])((nint)self); #endif } /// /// To be documented. /// public static void Constrain(ImPlotAxisPtr self) { ConstrainNative(self); } /// /// To be documented. /// public static void Constrain(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { ConstrainNative((ImPlotAxis*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void UpdateTransformCacheNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[510])(self); #else ((delegate* unmanaged[Cdecl])funcTable[510])((nint)self); #endif } /// /// To be documented. /// public static void UpdateTransformCache(ImPlotAxisPtr self) { UpdateTransformCacheNative(self); } /// /// To be documented. /// public static void UpdateTransformCache(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { UpdateTransformCacheNative((ImPlotAxis*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static float PlotToPixelsNative(ImPlotAxis* self, double plt) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[511])(self, plt); #else return (float)((delegate* unmanaged[Cdecl])funcTable[511])((nint)self, plt); #endif } /// /// To be documented. /// public static float PlotToPixels(ImPlotAxisPtr self, double plt) { float ret = PlotToPixelsNative(self, plt); return ret; } /// /// To be documented. /// public static float PlotToPixels(ref ImPlotAxis self, double plt) { fixed (ImPlotAxis* pself = &self) { float ret = PlotToPixelsNative((ImPlotAxis*)pself, plt); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static double PixelsToPlotNative(ImPlotAxis* self, float pix) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[512])(self, pix); #else return (double)((delegate* unmanaged[Cdecl])funcTable[512])((nint)self, pix); #endif } /// /// To be documented. /// public static double PixelsToPlot(ImPlotAxisPtr self, float pix) { double ret = PixelsToPlotNative(self, pix); return ret; } /// /// To be documented. /// public static double PixelsToPlot(ref ImPlotAxis self, float pix) { fixed (ImPlotAxis* pself = &self) { double ret = PixelsToPlotNative((ImPlotAxis*)pself, pix); return ret; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void ExtendFitNative(ImPlotAxis* self, double v) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[513])(self, v); #else ((delegate* unmanaged[Cdecl])funcTable[513])((nint)self, v); #endif } /// /// To be documented. /// public static void ExtendFit(ImPlotAxisPtr self, double v) { ExtendFitNative(self, v); } /// /// To be documented. /// public static void ExtendFit(ref ImPlotAxis self, double v) { fixed (ImPlotAxis* pself = &self) { ExtendFitNative((ImPlotAxis*)pself, v); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void ExtendFitWithNative(ImPlotAxis* self, ImPlotAxis* alt, double v, double vAlt) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[514])(self, alt, v, vAlt); #else ((delegate* unmanaged[Cdecl])funcTable[514])((nint)self, (nint)alt, v, vAlt); #endif } /// /// To be documented. /// public static void ExtendFitWith(ImPlotAxisPtr self, ImPlotAxisPtr alt, double v, double vAlt) { ExtendFitWithNative(self, alt, v, vAlt); } /// /// To be documented. /// public static void ExtendFitWith(ref ImPlotAxis self, ImPlotAxisPtr alt, double v, double vAlt) { fixed (ImPlotAxis* pself = &self) { ExtendFitWithNative((ImPlotAxis*)pself, alt, v, vAlt); } } /// /// To be documented. /// public static void ExtendFitWith(ImPlotAxisPtr self, ref ImPlotAxis alt, double v, double vAlt) { fixed (ImPlotAxis* palt = &alt) { ExtendFitWithNative(self, (ImPlotAxis*)palt, v, vAlt); } } /// /// To be documented. /// public static void ExtendFitWith(ref ImPlotAxis self, ref ImPlotAxis alt, double v, double vAlt) { fixed (ImPlotAxis* pself = &self) { fixed (ImPlotAxis* palt = &alt) { ExtendFitWithNative((ImPlotAxis*)pself, (ImPlotAxis*)palt, v, vAlt); } } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void ApplyFitNative(ImPlotAxis* self, float padding) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[515])(self, padding); #else ((delegate* unmanaged[Cdecl])funcTable[515])((nint)self, padding); #endif } /// /// To be documented. /// public static void ApplyFit(ImPlotAxisPtr self, float padding) { ApplyFitNative(self, padding); } /// /// To be documented. /// public static void ApplyFit(ref ImPlotAxis self, float padding) { fixed (ImPlotAxis* pself = &self) { ApplyFitNative((ImPlotAxis*)pself, padding); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte HasLabelNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[516])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[516])((nint)self); #endif } /// /// To be documented. /// public static bool HasLabel(ImPlotAxisPtr self) { byte ret = HasLabelNative(self); return ret != 0; } /// /// To be documented. /// public static bool HasLabel(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = HasLabelNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte HasGridLinesNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[517])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[517])((nint)self); #endif } /// /// To be documented. /// public static bool HasGridLines(ImPlotAxisPtr self) { byte ret = HasGridLinesNative(self); return ret != 0; } /// /// To be documented. /// public static bool HasGridLines(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = HasGridLinesNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte HasTickLabelsNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[518])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[518])((nint)self); #endif } /// /// To be documented. /// public static bool HasTickLabels(ImPlotAxisPtr self) { byte ret = HasTickLabelsNative(self); return ret != 0; } /// /// To be documented. /// public static bool HasTickLabels(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = HasTickLabelsNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte HasTickMarksNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[519])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[519])((nint)self); #endif } /// /// To be documented. /// public static bool HasTickMarks(ImPlotAxisPtr self) { byte ret = HasTickMarksNative(self); return ret != 0; } /// /// To be documented. /// public static bool HasTickMarks(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = HasTickMarksNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte WillRenderNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[520])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[520])((nint)self); #endif } /// /// To be documented. /// public static bool WillRender(ImPlotAxisPtr self) { byte ret = WillRenderNative(self); return ret != 0; } /// /// To be documented. /// public static bool WillRender(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = WillRenderNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsOppositeNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[521])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[521])((nint)self); #endif } /// /// To be documented. /// public static bool IsOpposite(ImPlotAxisPtr self) { byte ret = IsOppositeNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsOpposite(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsOppositeNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsInvertedNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[522])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[522])((nint)self); #endif } /// /// To be documented. /// public static bool IsInverted(ImPlotAxisPtr self) { byte ret = IsInvertedNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsInverted(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsInvertedNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsForegroundNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[523])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[523])((nint)self); #endif } /// /// To be documented. /// public static bool IsForeground(ImPlotAxisPtr self) { byte ret = IsForegroundNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsForeground(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsForegroundNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsAutoFittingNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[524])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[524])((nint)self); #endif } /// /// To be documented. /// public static bool IsAutoFitting(ImPlotAxisPtr self) { byte ret = IsAutoFittingNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsAutoFitting(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsAutoFittingNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte CanInitFitNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[525])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[525])((nint)self); #endif } /// /// To be documented. /// public static bool CanInitFit(ImPlotAxisPtr self) { byte ret = CanInitFitNative(self); return ret != 0; } /// /// To be documented. /// public static bool CanInitFit(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = CanInitFitNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsRangeLockedNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[526])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[526])((nint)self); #endif } /// /// To be documented. /// public static bool IsRangeLocked(ImPlotAxisPtr self) { byte ret = IsRangeLockedNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsRangeLocked(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsRangeLockedNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsLockedMinNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[527])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[527])((nint)self); #endif } /// /// To be documented. /// public static bool IsLockedMin(ImPlotAxisPtr self) { byte ret = IsLockedMinNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsLockedMin(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsLockedMinNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsLockedMaxNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[528])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[528])((nint)self); #endif } /// /// To be documented. /// public static bool IsLockedMax(ImPlotAxisPtr self) { byte ret = IsLockedMaxNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsLockedMax(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsLockedMaxNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsLockedNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[529])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[529])((nint)self); #endif } /// /// To be documented. /// public static bool IsLocked(ImPlotAxisPtr self) { byte ret = IsLockedNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsLocked(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsLockedNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsInputLockedMinNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[530])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[530])((nint)self); #endif } /// /// To be documented. /// public static bool IsInputLockedMin(ImPlotAxisPtr self) { byte ret = IsInputLockedMinNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsInputLockedMin(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsInputLockedMinNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsInputLockedMaxNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[531])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[531])((nint)self); #endif } /// /// To be documented. /// public static bool IsInputLockedMax(ImPlotAxisPtr self) { byte ret = IsInputLockedMaxNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsInputLockedMax(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsInputLockedMaxNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsInputLockedNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[532])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[532])((nint)self); #endif } /// /// To be documented. /// public static bool IsInputLocked(ImPlotAxisPtr self) { byte ret = IsInputLockedNative(self); return ret != 0; } /// /// To be documented. /// public static bool IsInputLocked(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = IsInputLockedNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte HasMenusNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[533])(self); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[533])((nint)self); #endif } /// /// To be documented. /// public static bool HasMenus(ImPlotAxisPtr self) { byte ret = HasMenusNative(self); return ret != 0; } /// /// To be documented. /// public static bool HasMenus(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { byte ret = HasMenusNative((ImPlotAxis*)pself); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static byte IsPanLockedNative(ImPlotAxis* self, byte increasing) { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[534])(self, increasing); #else return (byte)((delegate* unmanaged[Cdecl])funcTable[534])((nint)self, increasing); #endif } /// /// To be documented. /// public static bool IsPanLocked(ImPlotAxisPtr self, bool increasing) { byte ret = IsPanLockedNative(self, increasing ? (byte)1 : (byte)0); return ret != 0; } /// /// To be documented. /// public static bool IsPanLocked(ref ImPlotAxis self, bool increasing) { fixed (ImPlotAxis* pself = &self) { byte ret = IsPanLockedNative((ImPlotAxis*)pself, increasing ? (byte)1 : (byte)0); return ret != 0; } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void PushLinksNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[535])(self); #else ((delegate* unmanaged[Cdecl])funcTable[535])((nint)self); #endif } /// /// To be documented. /// public static void PushLinks(ImPlotAxisPtr self) { PushLinksNative(self); } /// /// To be documented. /// public static void PushLinks(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { PushLinksNative((ImPlotAxis*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void PullLinksNative(ImPlotAxis* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[536])(self); #else ((delegate* unmanaged[Cdecl])funcTable[536])((nint)self); #endif } /// /// To be documented. /// public static void PullLinks(ImPlotAxisPtr self) { PullLinksNative(self); } /// /// To be documented. /// public static void PullLinks(ref ImPlotAxis self) { fixed (ImPlotAxis* pself = &self) { PullLinksNative((ImPlotAxis*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static ImPlotAlignmentData* ImPlotAlignmentDataNative() { #if NET5_0_OR_GREATER return ((delegate* unmanaged[Cdecl])funcTable[537])(); #else return (ImPlotAlignmentData*)((delegate* unmanaged[Cdecl])funcTable[537])(); #endif } /// /// To be documented. /// public static ImPlotAlignmentDataPtr ImPlotAlignmentData() { ImPlotAlignmentDataPtr ret = ImPlotAlignmentDataNative(); return ret; } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void DestroyNative(ImPlotAlignmentData* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[538])(self); #else ((delegate* unmanaged[Cdecl])funcTable[538])((nint)self); #endif } /// /// To be documented. /// public static void Destroy(ImPlotAlignmentDataPtr self) { DestroyNative(self); } /// /// To be documented. /// public static void Destroy(ref ImPlotAlignmentData self) { fixed (ImPlotAlignmentData* pself = &self) { DestroyNative((ImPlotAlignmentData*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void BeginNative(ImPlotAlignmentData* self) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[539])(self); #else ((delegate* unmanaged[Cdecl])funcTable[539])((nint)self); #endif } /// /// To be documented. /// public static void Begin(ImPlotAlignmentDataPtr self) { BeginNative(self); } /// /// To be documented. /// public static void Begin(ref ImPlotAlignmentData self) { fixed (ImPlotAlignmentData* pself = &self) { BeginNative((ImPlotAlignmentData*)pself); } } /// /// To be documented. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void UpdateNative(ImPlotAlignmentData* self, float* padA, float* padB, float* deltaA, float* deltaB) { #if NET5_0_OR_GREATER ((delegate* unmanaged[Cdecl])funcTable[540])(self, padA, padB, deltaA, deltaB); #else ((delegate* unmanaged[Cdecl])funcTable[540])((nint)self, (nint)padA, (nint)padB, (nint)deltaA, (nint)deltaB); #endif } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, float* padA, float* padB, float* deltaA, float* deltaB) { UpdateNative(self, padA, padB, deltaA, deltaB); } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, float* padA, float* padB, float* deltaA, float* deltaB) { fixed (ImPlotAlignmentData* pself = &self) { UpdateNative((ImPlotAlignmentData*)pself, padA, padB, deltaA, deltaB); } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, ref float padA, float* padB, float* deltaA, float* deltaB) { fixed (float* ppadA = &padA) { UpdateNative(self, (float*)ppadA, padB, deltaA, deltaB); } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, ref float padA, float* padB, float* deltaA, float* deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* ppadA = &padA) { UpdateNative((ImPlotAlignmentData*)pself, (float*)ppadA, padB, deltaA, deltaB); } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, float* padA, ref float padB, float* deltaA, float* deltaB) { fixed (float* ppadB = &padB) { UpdateNative(self, padA, (float*)ppadB, deltaA, deltaB); } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, float* padA, ref float padB, float* deltaA, float* deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* ppadB = &padB) { UpdateNative((ImPlotAlignmentData*)pself, padA, (float*)ppadB, deltaA, deltaB); } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, ref float padA, ref float padB, float* deltaA, float* deltaB) { fixed (float* ppadA = &padA) { fixed (float* ppadB = &padB) { UpdateNative(self, (float*)ppadA, (float*)ppadB, deltaA, deltaB); } } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, ref float padA, ref float padB, float* deltaA, float* deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* ppadA = &padA) { fixed (float* ppadB = &padB) { UpdateNative((ImPlotAlignmentData*)pself, (float*)ppadA, (float*)ppadB, deltaA, deltaB); } } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, float* padA, float* padB, ref float deltaA, float* deltaB) { fixed (float* pdeltaA = &deltaA) { UpdateNative(self, padA, padB, (float*)pdeltaA, deltaB); } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, float* padA, float* padB, ref float deltaA, float* deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* pdeltaA = &deltaA) { UpdateNative((ImPlotAlignmentData*)pself, padA, padB, (float*)pdeltaA, deltaB); } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, ref float padA, float* padB, ref float deltaA, float* deltaB) { fixed (float* ppadA = &padA) { fixed (float* pdeltaA = &deltaA) { UpdateNative(self, (float*)ppadA, padB, (float*)pdeltaA, deltaB); } } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, ref float padA, float* padB, ref float deltaA, float* deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* ppadA = &padA) { fixed (float* pdeltaA = &deltaA) { UpdateNative((ImPlotAlignmentData*)pself, (float*)ppadA, padB, (float*)pdeltaA, deltaB); } } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, float* padA, ref float padB, ref float deltaA, float* deltaB) { fixed (float* ppadB = &padB) { fixed (float* pdeltaA = &deltaA) { UpdateNative(self, padA, (float*)ppadB, (float*)pdeltaA, deltaB); } } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, float* padA, ref float padB, ref float deltaA, float* deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* ppadB = &padB) { fixed (float* pdeltaA = &deltaA) { UpdateNative((ImPlotAlignmentData*)pself, padA, (float*)ppadB, (float*)pdeltaA, deltaB); } } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, ref float padA, ref float padB, ref float deltaA, float* deltaB) { fixed (float* ppadA = &padA) { fixed (float* ppadB = &padB) { fixed (float* pdeltaA = &deltaA) { UpdateNative(self, (float*)ppadA, (float*)ppadB, (float*)pdeltaA, deltaB); } } } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, ref float padA, ref float padB, ref float deltaA, float* deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* ppadA = &padA) { fixed (float* ppadB = &padB) { fixed (float* pdeltaA = &deltaA) { UpdateNative((ImPlotAlignmentData*)pself, (float*)ppadA, (float*)ppadB, (float*)pdeltaA, deltaB); } } } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, float* padA, float* padB, float* deltaA, ref float deltaB) { fixed (float* pdeltaB = &deltaB) { UpdateNative(self, padA, padB, deltaA, (float*)pdeltaB); } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, float* padA, float* padB, float* deltaA, ref float deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* pdeltaB = &deltaB) { UpdateNative((ImPlotAlignmentData*)pself, padA, padB, deltaA, (float*)pdeltaB); } } } /// /// To be documented. /// public static void Update(ImPlotAlignmentDataPtr self, ref float padA, float* padB, float* deltaA, ref float deltaB) { fixed (float* ppadA = &padA) { fixed (float* pdeltaB = &deltaB) { UpdateNative(self, (float*)ppadA, padB, deltaA, (float*)pdeltaB); } } } /// /// To be documented. /// public static void Update(ref ImPlotAlignmentData self, ref float padA, float* padB, float* deltaA, ref float deltaB) { fixed (ImPlotAlignmentData* pself = &self) { fixed (float* ppadA = &padA) { fixed (float* pdeltaB = &deltaB) { UpdateNative((ImPlotAlignmentData*)pself, (float*)ppadA, padB, deltaA, (float*)pdeltaB); } } } } } }