mirror of
https://github.com/goatcorp/Dalamud.git
synced 2025-12-15 05:04:15 +01:00
5032 lines
131 KiB
C#
5032 lines
131 KiB
C#
// ------------------------------------------------------------------------------
|
|
// <auto-generated>
|
|
// This code was generated by a tool.
|
|
//
|
|
// Changes to this file may cause incorrect behavior and will be lost if
|
|
// the code is regenerated.
|
|
// </auto-generated>
|
|
// ------------------------------------------------------------------------------
|
|
|
|
using System;
|
|
using System.Runtime.CompilerServices;
|
|
using System.Runtime.InteropServices;
|
|
using HexaGen.Runtime;
|
|
using System.Numerics;
|
|
using Dalamud.Bindings.ImGui;
|
|
|
|
namespace Dalamud.Bindings.ImPlot
|
|
{
|
|
public unsafe partial class ImPlot
|
|
{
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImMean(long* values, int count)
|
|
{
|
|
double ret = ImMeanNative(values, count);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImMean(ref long values, int count)
|
|
{
|
|
fixed (long* pvalues = &values)
|
|
{
|
|
double ret = ImMeanNative((long*)pvalues, count);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static double ImMeanNative(ulong* values, int count)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ulong*, int, double>)funcTable[426])(values, count);
|
|
#else
|
|
return (double)((delegate* unmanaged[Cdecl]<nint, int, double>)funcTable[426])((nint)values, count);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImMean(ulong* values, int count)
|
|
{
|
|
double ret = ImMeanNative(values, count);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImMean(ref ulong values, int count)
|
|
{
|
|
fixed (ulong* pvalues = &values)
|
|
{
|
|
double ret = ImMeanNative((ulong*)pvalues, count);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static double ImStdDevNative(float* values, int count)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<float*, int, double>)funcTable[427])(values, count);
|
|
#else
|
|
return (double)((delegate* unmanaged[Cdecl]<nint, int, double>)funcTable[427])((nint)values, count);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImStdDev(float* values, int count)
|
|
{
|
|
double ret = ImStdDevNative(values, count);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImStdDev(ref float values, int count)
|
|
{
|
|
fixed (float* pvalues = &values)
|
|
{
|
|
double ret = ImStdDevNative((float*)pvalues, count);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static double ImStdDevNative(double* values, int count)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<double*, int, double>)funcTable[428])(values, count);
|
|
#else
|
|
return (double)((delegate* unmanaged[Cdecl]<nint, int, double>)funcTable[428])((nint)values, count);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImStdDev(double* values, int count)
|
|
{
|
|
double ret = ImStdDevNative(values, count);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImStdDev(ref double values, int count)
|
|
{
|
|
fixed (double* pvalues = &values)
|
|
{
|
|
double ret = ImStdDevNative((double*)pvalues, count);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static double ImStdDevNative(byte* values, int count)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<byte*, int, double>)funcTable[429])(values, count);
|
|
#else
|
|
return (double)((delegate* unmanaged[Cdecl]<nint, int, double>)funcTable[429])((nint)values, count);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImStdDev(byte* values, int count)
|
|
{
|
|
double ret = ImStdDevNative(values, count);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImStdDev(ref byte values, int count)
|
|
{
|
|
fixed (byte* pvalues = &values)
|
|
{
|
|
double ret = ImStdDevNative((byte*)pvalues, count);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static double ImStdDevNative(short* values, int count)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<short*, int, double>)funcTable[430])(values, count);
|
|
#else
|
|
return (double)((delegate* unmanaged[Cdecl]<nint, int, double>)funcTable[430])((nint)values, count);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImStdDev(short* values, int count)
|
|
{
|
|
double ret = ImStdDevNative(values, count);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImStdDev(ref short values, int count)
|
|
{
|
|
fixed (short* pvalues = &values)
|
|
{
|
|
double ret = ImStdDevNative((short*)pvalues, count);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static double ImStdDevNative(ushort* values, int count)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ushort*, int, double>)funcTable[431])(values, count);
|
|
#else
|
|
return (double)((delegate* unmanaged[Cdecl]<nint, int, double>)funcTable[431])((nint)values, count);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImStdDev(ushort* values, int count)
|
|
{
|
|
double ret = ImStdDevNative(values, count);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImStdDev(ref ushort values, int count)
|
|
{
|
|
fixed (ushort* pvalues = &values)
|
|
{
|
|
double ret = ImStdDevNative((ushort*)pvalues, count);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static double ImStdDevNative(int* values, int count)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<int*, int, double>)funcTable[432])(values, count);
|
|
#else
|
|
return (double)((delegate* unmanaged[Cdecl]<nint, int, double>)funcTable[432])((nint)values, count);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImStdDev(int* values, int count)
|
|
{
|
|
double ret = ImStdDevNative(values, count);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImStdDev(ref int values, int count)
|
|
{
|
|
fixed (int* pvalues = &values)
|
|
{
|
|
double ret = ImStdDevNative((int*)pvalues, count);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static double ImStdDevNative(uint* values, int count)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<uint*, int, double>)funcTable[433])(values, count);
|
|
#else
|
|
return (double)((delegate* unmanaged[Cdecl]<nint, int, double>)funcTable[433])((nint)values, count);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImStdDev(uint* values, int count)
|
|
{
|
|
double ret = ImStdDevNative(values, count);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImStdDev(ref uint values, int count)
|
|
{
|
|
fixed (uint* pvalues = &values)
|
|
{
|
|
double ret = ImStdDevNative((uint*)pvalues, count);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static double ImStdDevNative(long* values, int count)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<long*, int, double>)funcTable[434])(values, count);
|
|
#else
|
|
return (double)((delegate* unmanaged[Cdecl]<nint, int, double>)funcTable[434])((nint)values, count);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImStdDev(long* values, int count)
|
|
{
|
|
double ret = ImStdDevNative(values, count);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImStdDev(ref long values, int count)
|
|
{
|
|
fixed (long* pvalues = &values)
|
|
{
|
|
double ret = ImStdDevNative((long*)pvalues, count);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static double ImStdDevNative(ulong* values, int count)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ulong*, int, double>)funcTable[435])(values, count);
|
|
#else
|
|
return (double)((delegate* unmanaged[Cdecl]<nint, int, double>)funcTable[435])((nint)values, count);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImStdDev(ulong* values, int count)
|
|
{
|
|
double ret = ImStdDevNative(values, count);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ImStdDev(ref ulong values, int count)
|
|
{
|
|
fixed (ulong* pvalues = &values)
|
|
{
|
|
double ret = ImStdDevNative((ulong*)pvalues, count);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static uint ImMixU32Native(uint a, uint b, uint s)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<uint, uint, uint, uint>)funcTable[436])(a, b, s);
|
|
#else
|
|
return (uint)((delegate* unmanaged[Cdecl]<uint, uint, uint, uint>)funcTable[436])(a, b, s);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static uint ImMixU32(uint a, uint b, uint s)
|
|
{
|
|
uint ret = ImMixU32Native(a, b, s);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static uint ImLerpU32Native(uint* colors, int size, float t)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<uint*, int, float, uint>)funcTable[437])(colors, size, t);
|
|
#else
|
|
return (uint)((delegate* unmanaged[Cdecl]<nint, int, float, uint>)funcTable[437])((nint)colors, size, t);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static uint ImLerpU32(uint* colors, int size, float t)
|
|
{
|
|
uint ret = ImLerpU32Native(colors, size, t);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static uint ImLerpU32(ref uint colors, int size, float t)
|
|
{
|
|
fixed (uint* pcolors = &colors)
|
|
{
|
|
uint ret = ImLerpU32Native((uint*)pcolors, size, t);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static uint ImAlphaU32Native(uint col, float alpha)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<uint, float, uint>)funcTable[438])(col, alpha);
|
|
#else
|
|
return (uint)((delegate* unmanaged[Cdecl]<uint, float, uint>)funcTable[438])(col, alpha);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static uint ImAlphaU32(uint col, float alpha)
|
|
{
|
|
uint ret = ImAlphaU32Native(col, alpha);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte ImOverlapsNative(float minA, float maxA, float minB, float maxB)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<float, float, float, float, byte>)funcTable[439])(minA, maxA, minB, maxB);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<float, float, float, float, byte>)funcTable[439])(minA, maxA, minB, maxB);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool ImOverlaps(float minA, float maxA, float minB, float maxB)
|
|
{
|
|
byte ret = ImOverlapsNative(minA, maxA, minB, maxB);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte ImOverlapsNative(double minA, double maxA, double minB, double maxB)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<double, double, double, double, byte>)funcTable[440])(minA, maxA, minB, maxB);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<double, double, double, double, byte>)funcTable[440])(minA, maxA, minB, maxB);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool ImOverlaps(double minA, double maxA, double minB, double maxB)
|
|
{
|
|
byte ret = ImOverlapsNative(minA, maxA, minB, maxB);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte ImOverlapsNative(byte minA, byte maxA, byte minB, byte maxB)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<byte, byte, byte, byte, byte>)funcTable[441])(minA, maxA, minB, maxB);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<byte, byte, byte, byte, byte>)funcTable[441])(minA, maxA, minB, maxB);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool ImOverlaps(byte minA, byte maxA, byte minB, byte maxB)
|
|
{
|
|
byte ret = ImOverlapsNative(minA, maxA, minB, maxB);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte ImOverlapsNative(short minA, short maxA, short minB, short maxB)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<short, short, short, short, byte>)funcTable[442])(minA, maxA, minB, maxB);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<short, short, short, short, byte>)funcTable[442])(minA, maxA, minB, maxB);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool ImOverlaps(short minA, short maxA, short minB, short maxB)
|
|
{
|
|
byte ret = ImOverlapsNative(minA, maxA, minB, maxB);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte ImOverlapsNative(ushort minA, ushort maxA, ushort minB, ushort maxB)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ushort, ushort, ushort, ushort, byte>)funcTable[443])(minA, maxA, minB, maxB);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<ushort, ushort, ushort, ushort, byte>)funcTable[443])(minA, maxA, minB, maxB);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool ImOverlaps(ushort minA, ushort maxA, ushort minB, ushort maxB)
|
|
{
|
|
byte ret = ImOverlapsNative(minA, maxA, minB, maxB);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte ImOverlapsNative(int minA, int maxA, int minB, int maxB)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<int, int, int, int, byte>)funcTable[444])(minA, maxA, minB, maxB);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<int, int, int, int, byte>)funcTable[444])(minA, maxA, minB, maxB);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool ImOverlaps(int minA, int maxA, int minB, int maxB)
|
|
{
|
|
byte ret = ImOverlapsNative(minA, maxA, minB, maxB);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte ImOverlapsNative(uint minA, uint maxA, uint minB, uint maxB)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<uint, uint, uint, uint, byte>)funcTable[445])(minA, maxA, minB, maxB);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<uint, uint, uint, uint, byte>)funcTable[445])(minA, maxA, minB, maxB);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool ImOverlaps(uint minA, uint maxA, uint minB, uint maxB)
|
|
{
|
|
byte ret = ImOverlapsNative(minA, maxA, minB, maxB);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte ImOverlapsNative(long minA, long maxA, long minB, long maxB)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<long, long, long, long, byte>)funcTable[446])(minA, maxA, minB, maxB);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<long, long, long, long, byte>)funcTable[446])(minA, maxA, minB, maxB);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool ImOverlaps(long minA, long maxA, long minB, long maxB)
|
|
{
|
|
byte ret = ImOverlapsNative(minA, maxA, minB, maxB);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte ImOverlapsNative(ulong minA, ulong maxA, ulong minB, ulong maxB)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ulong, ulong, ulong, ulong, byte>)funcTable[447])(minA, maxA, minB, maxB);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<ulong, ulong, ulong, ulong, byte>)funcTable[447])(minA, maxA, minB, maxB);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool ImOverlaps(ulong minA, ulong maxA, ulong minB, ulong maxB)
|
|
{
|
|
byte ret = ImOverlapsNative(minA, maxA, minB, maxB);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static ImPlotDateTimeSpec* ImPlotDateTimeSpecNative()
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotDateTimeSpec*>)funcTable[448])();
|
|
#else
|
|
return (ImPlotDateTimeSpec*)((delegate* unmanaged[Cdecl]<nint>)funcTable[448])();
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotDateTimeSpecPtr ImPlotDateTimeSpec()
|
|
{
|
|
ImPlotDateTimeSpecPtr ret = ImPlotDateTimeSpecNative();
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void DestroyNative(ImPlotDateTimeSpec* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotDateTimeSpec*, void>)funcTable[449])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[449])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Destroy(ImPlotDateTimeSpecPtr self)
|
|
{
|
|
DestroyNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Destroy(ref ImPlotDateTimeSpec self)
|
|
{
|
|
fixed (ImPlotDateTimeSpec* pself = &self)
|
|
{
|
|
DestroyNative((ImPlotDateTimeSpec*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static ImPlotDateTimeSpec* ImPlotDateTimeSpecNative(ImPlotDateFmt dateFmt, ImPlotTimeFmt timeFmt, byte use24HrClk, byte useIso8601)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotDateFmt, ImPlotTimeFmt, byte, byte, ImPlotDateTimeSpec*>)funcTable[450])(dateFmt, timeFmt, use24HrClk, useIso8601);
|
|
#else
|
|
return (ImPlotDateTimeSpec*)((delegate* unmanaged[Cdecl]<ImPlotDateFmt, ImPlotTimeFmt, byte, byte, nint>)funcTable[450])(dateFmt, timeFmt, use24HrClk, useIso8601);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotDateTimeSpecPtr ImPlotDateTimeSpec(ImPlotDateFmt dateFmt, ImPlotTimeFmt timeFmt, bool use24HrClk, bool useIso8601)
|
|
{
|
|
ImPlotDateTimeSpecPtr ret = ImPlotDateTimeSpecNative(dateFmt, timeFmt, use24HrClk ? (byte)1 : (byte)0, useIso8601 ? (byte)1 : (byte)0);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotDateTimeSpecPtr ImPlotDateTimeSpec(ImPlotDateFmt dateFmt, ImPlotTimeFmt timeFmt, bool use24HrClk)
|
|
{
|
|
ImPlotDateTimeSpecPtr ret = ImPlotDateTimeSpecNative(dateFmt, timeFmt, use24HrClk ? (byte)1 : (byte)0, (byte)(0));
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotDateTimeSpecPtr ImPlotDateTimeSpec(ImPlotDateFmt dateFmt, ImPlotTimeFmt timeFmt)
|
|
{
|
|
ImPlotDateTimeSpecPtr ret = ImPlotDateTimeSpecNative(dateFmt, timeFmt, (byte)(0), (byte)(0));
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static ImPlotTime* ImPlotTimeNative()
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotTime*>)funcTable[451])();
|
|
#else
|
|
return (ImPlotTime*)((delegate* unmanaged[Cdecl]<nint>)funcTable[451])();
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotTimePtr ImPlotTime()
|
|
{
|
|
ImPlotTimePtr ret = ImPlotTimeNative();
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void DestroyNative(ImPlotTime* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotTime*, void>)funcTable[452])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[452])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Destroy(ImPlotTimePtr self)
|
|
{
|
|
DestroyNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Destroy(ref ImPlotTime self)
|
|
{
|
|
fixed (ImPlotTime* pself = &self)
|
|
{
|
|
DestroyNative((ImPlotTime*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static ImPlotTime* ImPlotTimeNative(long s, int us)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<long, int, ImPlotTime*>)funcTable[453])(s, us);
|
|
#else
|
|
return (ImPlotTime*)((delegate* unmanaged[Cdecl]<long, int, nint>)funcTable[453])(s, us);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotTimePtr ImPlotTime(long s, int us)
|
|
{
|
|
ImPlotTimePtr ret = ImPlotTimeNative(s, us);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotTimePtr ImPlotTime(long s)
|
|
{
|
|
ImPlotTimePtr ret = ImPlotTimeNative(s, (int)(0));
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void RollOverNative(ImPlotTime* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotTime*, void>)funcTable[454])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[454])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void RollOver(ImPlotTimePtr self)
|
|
{
|
|
RollOverNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void RollOver(ref ImPlotTime self)
|
|
{
|
|
fixed (ImPlotTime* pself = &self)
|
|
{
|
|
RollOverNative((ImPlotTime*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static double ToDoubleNative(ImPlotTime* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotTime*, double>)funcTable[455])(self);
|
|
#else
|
|
return (double)((delegate* unmanaged[Cdecl]<nint, double>)funcTable[455])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ToDouble(ImPlotTimePtr self)
|
|
{
|
|
double ret = ToDoubleNative(self);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double ToDouble(ref ImPlotTime self)
|
|
{
|
|
fixed (ImPlotTime* pself = &self)
|
|
{
|
|
double ret = ToDoubleNative((ImPlotTime*)pself);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void FromDoubleNative(ImPlotTime* pOut, double t)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotTime*, double, void>)funcTable[456])(pOut, t);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, double, void>)funcTable[456])((nint)pOut, t);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotTime FromDouble(double t)
|
|
{
|
|
ImPlotTime ret;
|
|
FromDoubleNative(&ret, t);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void FromDouble(ImPlotTimePtr pOut, double t)
|
|
{
|
|
FromDoubleNative(pOut, t);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void FromDouble(ref ImPlotTime pOut, double t)
|
|
{
|
|
fixed (ImPlotTime* ppOut = &pOut)
|
|
{
|
|
FromDoubleNative((ImPlotTime*)ppOut, t);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static ImPlotColormapData* ImPlotColormapDataNative()
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotColormapData*>)funcTable[457])();
|
|
#else
|
|
return (ImPlotColormapData*)((delegate* unmanaged[Cdecl]<nint>)funcTable[457])();
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotColormapDataPtr ImPlotColormapData()
|
|
{
|
|
ImPlotColormapDataPtr ret = ImPlotColormapDataNative();
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void DestroyNative(ImPlotColormapData* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotColormapData*, void>)funcTable[458])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[458])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Destroy(ImPlotColormapDataPtr self)
|
|
{
|
|
DestroyNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Destroy(ref ImPlotColormapData self)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
DestroyNative((ImPlotColormapData*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static int AppendNative(ImPlotColormapData* self, byte* name, uint* keys, int count, byte qual)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotColormapData*, byte*, uint*, int, byte, int>)funcTable[459])(self, name, keys, count, qual);
|
|
#else
|
|
return (int)((delegate* unmanaged[Cdecl]<nint, nint, nint, int, byte, int>)funcTable[459])((nint)self, (nint)name, (nint)keys, count, qual);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int Append(ImPlotColormapDataPtr self, byte* name, uint* keys, int count, bool qual)
|
|
{
|
|
int ret = AppendNative(self, name, keys, count, qual ? (byte)1 : (byte)0);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int Append(ref ImPlotColormapData self, byte* name, uint* keys, int count, bool qual)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
int ret = AppendNative((ImPlotColormapData*)pself, name, keys, count, qual ? (byte)1 : (byte)0);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int Append(ImPlotColormapDataPtr self, ref byte name, uint* keys, int count, bool qual)
|
|
{
|
|
fixed (byte* pname = &name)
|
|
{
|
|
int ret = AppendNative(self, (byte*)pname, keys, count, qual ? (byte)1 : (byte)0);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int Append(ImPlotColormapDataPtr self, ReadOnlySpan<byte> name, uint* keys, int count, bool qual)
|
|
{
|
|
fixed (byte* pname = name)
|
|
{
|
|
int ret = AppendNative(self, (byte*)pname, keys, count, qual ? (byte)1 : (byte)0);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int Append(ImPlotColormapDataPtr self, string name, uint* keys, int count, bool qual)
|
|
{
|
|
byte* pStr0 = null;
|
|
int pStrSize0 = 0;
|
|
if (name != null)
|
|
{
|
|
pStrSize0 = Utils.GetByteCountUTF8(name);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
pStr0 = Utils.Alloc<byte>(pStrSize0 + 1);
|
|
}
|
|
else
|
|
{
|
|
byte* pStrStack0 = stackalloc byte[pStrSize0 + 1];
|
|
pStr0 = pStrStack0;
|
|
}
|
|
int pStrOffset0 = Utils.EncodeStringUTF8(name, pStr0, pStrSize0);
|
|
pStr0[pStrOffset0] = 0;
|
|
}
|
|
int ret = AppendNative(self, pStr0, keys, count, qual ? (byte)1 : (byte)0);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
Utils.Free(pStr0);
|
|
}
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int Append(ref ImPlotColormapData self, ref byte name, uint* keys, int count, bool qual)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
fixed (byte* pname = &name)
|
|
{
|
|
int ret = AppendNative((ImPlotColormapData*)pself, (byte*)pname, keys, count, qual ? (byte)1 : (byte)0);
|
|
return ret;
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int Append(ref ImPlotColormapData self, ReadOnlySpan<byte> name, uint* keys, int count, bool qual)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
fixed (byte* pname = name)
|
|
{
|
|
int ret = AppendNative((ImPlotColormapData*)pself, (byte*)pname, keys, count, qual ? (byte)1 : (byte)0);
|
|
return ret;
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int Append(ref ImPlotColormapData self, string name, uint* keys, int count, bool qual)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
byte* pStr0 = null;
|
|
int pStrSize0 = 0;
|
|
if (name != null)
|
|
{
|
|
pStrSize0 = Utils.GetByteCountUTF8(name);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
pStr0 = Utils.Alloc<byte>(pStrSize0 + 1);
|
|
}
|
|
else
|
|
{
|
|
byte* pStrStack0 = stackalloc byte[pStrSize0 + 1];
|
|
pStr0 = pStrStack0;
|
|
}
|
|
int pStrOffset0 = Utils.EncodeStringUTF8(name, pStr0, pStrSize0);
|
|
pStr0[pStrOffset0] = 0;
|
|
}
|
|
int ret = AppendNative((ImPlotColormapData*)pself, pStr0, keys, count, qual ? (byte)1 : (byte)0);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
Utils.Free(pStr0);
|
|
}
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int Append(ImPlotColormapDataPtr self, byte* name, ref uint keys, int count, bool qual)
|
|
{
|
|
fixed (uint* pkeys = &keys)
|
|
{
|
|
int ret = AppendNative(self, name, (uint*)pkeys, count, qual ? (byte)1 : (byte)0);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int Append(ref ImPlotColormapData self, byte* name, ref uint keys, int count, bool qual)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
fixed (uint* pkeys = &keys)
|
|
{
|
|
int ret = AppendNative((ImPlotColormapData*)pself, name, (uint*)pkeys, count, qual ? (byte)1 : (byte)0);
|
|
return ret;
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int Append(ImPlotColormapDataPtr self, ref byte name, ref uint keys, int count, bool qual)
|
|
{
|
|
fixed (byte* pname = &name)
|
|
{
|
|
fixed (uint* pkeys = &keys)
|
|
{
|
|
int ret = AppendNative(self, (byte*)pname, (uint*)pkeys, count, qual ? (byte)1 : (byte)0);
|
|
return ret;
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int Append(ImPlotColormapDataPtr self, ReadOnlySpan<byte> name, ref uint keys, int count, bool qual)
|
|
{
|
|
fixed (byte* pname = name)
|
|
{
|
|
fixed (uint* pkeys = &keys)
|
|
{
|
|
int ret = AppendNative(self, (byte*)pname, (uint*)pkeys, count, qual ? (byte)1 : (byte)0);
|
|
return ret;
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int Append(ImPlotColormapDataPtr self, string name, ref uint keys, int count, bool qual)
|
|
{
|
|
byte* pStr0 = null;
|
|
int pStrSize0 = 0;
|
|
if (name != null)
|
|
{
|
|
pStrSize0 = Utils.GetByteCountUTF8(name);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
pStr0 = Utils.Alloc<byte>(pStrSize0 + 1);
|
|
}
|
|
else
|
|
{
|
|
byte* pStrStack0 = stackalloc byte[pStrSize0 + 1];
|
|
pStr0 = pStrStack0;
|
|
}
|
|
int pStrOffset0 = Utils.EncodeStringUTF8(name, pStr0, pStrSize0);
|
|
pStr0[pStrOffset0] = 0;
|
|
}
|
|
fixed (uint* pkeys = &keys)
|
|
{
|
|
int ret = AppendNative(self, pStr0, (uint*)pkeys, count, qual ? (byte)1 : (byte)0);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
Utils.Free(pStr0);
|
|
}
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int Append(ref ImPlotColormapData self, ref byte name, ref uint keys, int count, bool qual)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
fixed (byte* pname = &name)
|
|
{
|
|
fixed (uint* pkeys = &keys)
|
|
{
|
|
int ret = AppendNative((ImPlotColormapData*)pself, (byte*)pname, (uint*)pkeys, count, qual ? (byte)1 : (byte)0);
|
|
return ret;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int Append(ref ImPlotColormapData self, ReadOnlySpan<byte> name, ref uint keys, int count, bool qual)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
fixed (byte* pname = name)
|
|
{
|
|
fixed (uint* pkeys = &keys)
|
|
{
|
|
int ret = AppendNative((ImPlotColormapData*)pself, (byte*)pname, (uint*)pkeys, count, qual ? (byte)1 : (byte)0);
|
|
return ret;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int Append(ref ImPlotColormapData self, string name, ref uint keys, int count, bool qual)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
byte* pStr0 = null;
|
|
int pStrSize0 = 0;
|
|
if (name != null)
|
|
{
|
|
pStrSize0 = Utils.GetByteCountUTF8(name);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
pStr0 = Utils.Alloc<byte>(pStrSize0 + 1);
|
|
}
|
|
else
|
|
{
|
|
byte* pStrStack0 = stackalloc byte[pStrSize0 + 1];
|
|
pStr0 = pStrStack0;
|
|
}
|
|
int pStrOffset0 = Utils.EncodeStringUTF8(name, pStr0, pStrSize0);
|
|
pStr0[pStrOffset0] = 0;
|
|
}
|
|
fixed (uint* pkeys = &keys)
|
|
{
|
|
int ret = AppendNative((ImPlotColormapData*)pself, pStr0, (uint*)pkeys, count, qual ? (byte)1 : (byte)0);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
Utils.Free(pStr0);
|
|
}
|
|
return ret;
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void _AppendTableNative(ImPlotColormapData* self, ImPlotColormap cmap)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotColormapData*, ImPlotColormap, void>)funcTable[460])(self, cmap);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, ImPlotColormap, void>)funcTable[460])((nint)self, cmap);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void _AppendTable(ImPlotColormapDataPtr self, ImPlotColormap cmap)
|
|
{
|
|
_AppendTableNative(self, cmap);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void _AppendTable(ref ImPlotColormapData self, ImPlotColormap cmap)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
_AppendTableNative((ImPlotColormapData*)pself, cmap);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void RebuildTablesNative(ImPlotColormapData* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotColormapData*, void>)funcTable[461])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[461])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void RebuildTables(ImPlotColormapDataPtr self)
|
|
{
|
|
RebuildTablesNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void RebuildTables(ref ImPlotColormapData self)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
RebuildTablesNative((ImPlotColormapData*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte IsQualNative(ImPlotColormapData* self, ImPlotColormap cmap)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotColormapData*, ImPlotColormap, byte>)funcTable[462])(self, cmap);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, ImPlotColormap, byte>)funcTable[462])((nint)self, cmap);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsQual(ImPlotColormapDataPtr self, ImPlotColormap cmap)
|
|
{
|
|
byte ret = IsQualNative(self, cmap);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsQual(ref ImPlotColormapData self, ImPlotColormap cmap)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
byte ret = IsQualNative((ImPlotColormapData*)pself, cmap);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte* GetNameNative(ImPlotColormapData* self, ImPlotColormap cmap)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotColormapData*, ImPlotColormap, byte*>)funcTable[463])(self, cmap);
|
|
#else
|
|
return (byte*)((delegate* unmanaged[Cdecl]<nint, ImPlotColormap, nint>)funcTable[463])((nint)self, cmap);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static byte* GetName(ImPlotColormapDataPtr self, ImPlotColormap cmap)
|
|
{
|
|
byte* ret = GetNameNative(self, cmap);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static string GetNameS(ImPlotColormapDataPtr self, ImPlotColormap cmap)
|
|
{
|
|
string ret = Utils.DecodeStringUTF8(GetNameNative(self, cmap));
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static byte* GetName(ref ImPlotColormapData self, ImPlotColormap cmap)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
byte* ret = GetNameNative((ImPlotColormapData*)pself, cmap);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static string GetNameS(ref ImPlotColormapData self, ImPlotColormap cmap)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
string ret = Utils.DecodeStringUTF8(GetNameNative((ImPlotColormapData*)pself, cmap));
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static ImPlotColormap GetIndexNative(ImPlotColormapData* self, byte* name)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotColormapData*, byte*, ImPlotColormap>)funcTable[464])(self, name);
|
|
#else
|
|
return (ImPlotColormap)((delegate* unmanaged[Cdecl]<nint, nint, ImPlotColormap>)funcTable[464])((nint)self, (nint)name);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotColormap GetIndex(ImPlotColormapDataPtr self, byte* name)
|
|
{
|
|
ImPlotColormap ret = GetIndexNative(self, name);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotColormap GetIndex(ref ImPlotColormapData self, byte* name)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
ImPlotColormap ret = GetIndexNative((ImPlotColormapData*)pself, name);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotColormap GetIndex(ImPlotColormapDataPtr self, ref byte name)
|
|
{
|
|
fixed (byte* pname = &name)
|
|
{
|
|
ImPlotColormap ret = GetIndexNative(self, (byte*)pname);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotColormap GetIndex(ImPlotColormapDataPtr self, ReadOnlySpan<byte> name)
|
|
{
|
|
fixed (byte* pname = name)
|
|
{
|
|
ImPlotColormap ret = GetIndexNative(self, (byte*)pname);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotColormap GetIndex(ImPlotColormapDataPtr self, string name)
|
|
{
|
|
byte* pStr0 = null;
|
|
int pStrSize0 = 0;
|
|
if (name != null)
|
|
{
|
|
pStrSize0 = Utils.GetByteCountUTF8(name);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
pStr0 = Utils.Alloc<byte>(pStrSize0 + 1);
|
|
}
|
|
else
|
|
{
|
|
byte* pStrStack0 = stackalloc byte[pStrSize0 + 1];
|
|
pStr0 = pStrStack0;
|
|
}
|
|
int pStrOffset0 = Utils.EncodeStringUTF8(name, pStr0, pStrSize0);
|
|
pStr0[pStrOffset0] = 0;
|
|
}
|
|
ImPlotColormap ret = GetIndexNative(self, pStr0);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
Utils.Free(pStr0);
|
|
}
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotColormap GetIndex(ref ImPlotColormapData self, ref byte name)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
fixed (byte* pname = &name)
|
|
{
|
|
ImPlotColormap ret = GetIndexNative((ImPlotColormapData*)pself, (byte*)pname);
|
|
return ret;
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotColormap GetIndex(ref ImPlotColormapData self, ReadOnlySpan<byte> name)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
fixed (byte* pname = name)
|
|
{
|
|
ImPlotColormap ret = GetIndexNative((ImPlotColormapData*)pself, (byte*)pname);
|
|
return ret;
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotColormap GetIndex(ref ImPlotColormapData self, string name)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
byte* pStr0 = null;
|
|
int pStrSize0 = 0;
|
|
if (name != null)
|
|
{
|
|
pStrSize0 = Utils.GetByteCountUTF8(name);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
pStr0 = Utils.Alloc<byte>(pStrSize0 + 1);
|
|
}
|
|
else
|
|
{
|
|
byte* pStrStack0 = stackalloc byte[pStrSize0 + 1];
|
|
pStr0 = pStrStack0;
|
|
}
|
|
int pStrOffset0 = Utils.EncodeStringUTF8(name, pStr0, pStrSize0);
|
|
pStr0[pStrOffset0] = 0;
|
|
}
|
|
ImPlotColormap ret = GetIndexNative((ImPlotColormapData*)pself, pStr0);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
Utils.Free(pStr0);
|
|
}
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static uint* GetKeysNative(ImPlotColormapData* self, ImPlotColormap cmap)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotColormapData*, ImPlotColormap, uint*>)funcTable[465])(self, cmap);
|
|
#else
|
|
return (uint*)((delegate* unmanaged[Cdecl]<nint, ImPlotColormap, nint>)funcTable[465])((nint)self, cmap);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static uint* GetKeys(ImPlotColormapDataPtr self, ImPlotColormap cmap)
|
|
{
|
|
uint* ret = GetKeysNative(self, cmap);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static uint* GetKeys(ref ImPlotColormapData self, ImPlotColormap cmap)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
uint* ret = GetKeysNative((ImPlotColormapData*)pself, cmap);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static int GetKeyCountNative(ImPlotColormapData* self, ImPlotColormap cmap)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotColormapData*, ImPlotColormap, int>)funcTable[466])(self, cmap);
|
|
#else
|
|
return (int)((delegate* unmanaged[Cdecl]<nint, ImPlotColormap, int>)funcTable[466])((nint)self, cmap);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int GetKeyCount(ImPlotColormapDataPtr self, ImPlotColormap cmap)
|
|
{
|
|
int ret = GetKeyCountNative(self, cmap);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int GetKeyCount(ref ImPlotColormapData self, ImPlotColormap cmap)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
int ret = GetKeyCountNative((ImPlotColormapData*)pself, cmap);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static uint GetKeyColorNative(ImPlotColormapData* self, ImPlotColormap cmap, int idx)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotColormapData*, ImPlotColormap, int, uint>)funcTable[467])(self, cmap, idx);
|
|
#else
|
|
return (uint)((delegate* unmanaged[Cdecl]<nint, ImPlotColormap, int, uint>)funcTable[467])((nint)self, cmap, idx);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static uint GetKeyColor(ImPlotColormapDataPtr self, ImPlotColormap cmap, int idx)
|
|
{
|
|
uint ret = GetKeyColorNative(self, cmap, idx);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static uint GetKeyColor(ref ImPlotColormapData self, ImPlotColormap cmap, int idx)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
uint ret = GetKeyColorNative((ImPlotColormapData*)pself, cmap, idx);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void SetKeyColorNative(ImPlotColormapData* self, ImPlotColormap cmap, int idx, uint value)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotColormapData*, ImPlotColormap, int, uint, void>)funcTable[468])(self, cmap, idx, value);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, ImPlotColormap, int, uint, void>)funcTable[468])((nint)self, cmap, idx, value);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void SetKeyColor(ImPlotColormapDataPtr self, ImPlotColormap cmap, int idx, uint value)
|
|
{
|
|
SetKeyColorNative(self, cmap, idx, value);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void SetKeyColor(ref ImPlotColormapData self, ImPlotColormap cmap, int idx, uint value)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
SetKeyColorNative((ImPlotColormapData*)pself, cmap, idx, value);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static uint* GetTableNative(ImPlotColormapData* self, ImPlotColormap cmap)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotColormapData*, ImPlotColormap, uint*>)funcTable[469])(self, cmap);
|
|
#else
|
|
return (uint*)((delegate* unmanaged[Cdecl]<nint, ImPlotColormap, nint>)funcTable[469])((nint)self, cmap);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static uint* GetTable(ImPlotColormapDataPtr self, ImPlotColormap cmap)
|
|
{
|
|
uint* ret = GetTableNative(self, cmap);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static uint* GetTable(ref ImPlotColormapData self, ImPlotColormap cmap)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
uint* ret = GetTableNative((ImPlotColormapData*)pself, cmap);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static int GetTableSizeNative(ImPlotColormapData* self, ImPlotColormap cmap)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotColormapData*, ImPlotColormap, int>)funcTable[470])(self, cmap);
|
|
#else
|
|
return (int)((delegate* unmanaged[Cdecl]<nint, ImPlotColormap, int>)funcTable[470])((nint)self, cmap);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int GetTableSize(ImPlotColormapDataPtr self, ImPlotColormap cmap)
|
|
{
|
|
int ret = GetTableSizeNative(self, cmap);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int GetTableSize(ref ImPlotColormapData self, ImPlotColormap cmap)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
int ret = GetTableSizeNative((ImPlotColormapData*)pself, cmap);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static uint GetTableColorNative(ImPlotColormapData* self, ImPlotColormap cmap, int idx)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotColormapData*, ImPlotColormap, int, uint>)funcTable[471])(self, cmap, idx);
|
|
#else
|
|
return (uint)((delegate* unmanaged[Cdecl]<nint, ImPlotColormap, int, uint>)funcTable[471])((nint)self, cmap, idx);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static uint GetTableColor(ImPlotColormapDataPtr self, ImPlotColormap cmap, int idx)
|
|
{
|
|
uint ret = GetTableColorNative(self, cmap, idx);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static uint GetTableColor(ref ImPlotColormapData self, ImPlotColormap cmap, int idx)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
uint ret = GetTableColorNative((ImPlotColormapData*)pself, cmap, idx);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static uint LerpTableNative(ImPlotColormapData* self, ImPlotColormap cmap, float t)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotColormapData*, ImPlotColormap, float, uint>)funcTable[472])(self, cmap, t);
|
|
#else
|
|
return (uint)((delegate* unmanaged[Cdecl]<nint, ImPlotColormap, float, uint>)funcTable[472])((nint)self, cmap, t);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static uint LerpTable(ImPlotColormapDataPtr self, ImPlotColormap cmap, float t)
|
|
{
|
|
uint ret = LerpTableNative(self, cmap, t);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static uint LerpTable(ref ImPlotColormapData self, ImPlotColormap cmap, float t)
|
|
{
|
|
fixed (ImPlotColormapData* pself = &self)
|
|
{
|
|
uint ret = LerpTableNative((ImPlotColormapData*)pself, cmap, t);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static ImPlotPointError* ImPlotPointErrorNative(double x, double y, double neg, double pos)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<double, double, double, double, ImPlotPointError*>)funcTable[473])(x, y, neg, pos);
|
|
#else
|
|
return (ImPlotPointError*)((delegate* unmanaged[Cdecl]<double, double, double, double, nint>)funcTable[473])(x, y, neg, pos);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotPointErrorPtr ImPlotPointError(double x, double y, double neg, double pos)
|
|
{
|
|
ImPlotPointErrorPtr ret = ImPlotPointErrorNative(x, y, neg, pos);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void DestroyNative(ImPlotPointError* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotPointError*, void>)funcTable[474])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[474])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Destroy(ImPlotPointErrorPtr self)
|
|
{
|
|
DestroyNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Destroy(ref ImPlotPointError self)
|
|
{
|
|
fixed (ImPlotPointError* pself = &self)
|
|
{
|
|
DestroyNative((ImPlotPointError*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static ImPlotAnnotationCollection* ImPlotAnnotationCollectionNative()
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAnnotationCollection*>)funcTable[475])();
|
|
#else
|
|
return (ImPlotAnnotationCollection*)((delegate* unmanaged[Cdecl]<nint>)funcTable[475])();
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotAnnotationCollectionPtr ImPlotAnnotationCollection()
|
|
{
|
|
ImPlotAnnotationCollectionPtr ret = ImPlotAnnotationCollectionNative();
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void DestroyNative(ImPlotAnnotationCollection* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotAnnotationCollection*, void>)funcTable[476])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[476])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Destroy(ImPlotAnnotationCollectionPtr self)
|
|
{
|
|
DestroyNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Destroy(ref ImPlotAnnotationCollection self)
|
|
{
|
|
fixed (ImPlotAnnotationCollection* pself = &self)
|
|
{
|
|
DestroyNative((ImPlotAnnotationCollection*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void AppendVNative(ImPlotAnnotationCollection* self, Vector2 pos, Vector2 off, uint bg, uint fg, byte clamp, byte* fmt, nuint args)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotAnnotationCollection*, Vector2, Vector2, uint, uint, byte, byte*, nuint, void>)funcTable[477])(self, pos, off, bg, fg, clamp, fmt, args);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, Vector2, Vector2, uint, uint, byte, nint, nuint, void>)funcTable[477])((nint)self, pos, off, bg, fg, clamp, (nint)fmt, args);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void AppendV(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, byte* fmt, nuint args)
|
|
{
|
|
AppendVNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, fmt, args);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void AppendV(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, byte* fmt, nuint args)
|
|
{
|
|
fixed (ImPlotAnnotationCollection* pself = &self)
|
|
{
|
|
AppendVNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, fmt, args);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void AppendV(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ref byte fmt, nuint args)
|
|
{
|
|
fixed (byte* pfmt = &fmt)
|
|
{
|
|
AppendVNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt, args);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void AppendV(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ReadOnlySpan<byte> fmt, nuint args)
|
|
{
|
|
fixed (byte* pfmt = fmt)
|
|
{
|
|
AppendVNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt, args);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void AppendV(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, string fmt, nuint args)
|
|
{
|
|
byte* pStr0 = null;
|
|
int pStrSize0 = 0;
|
|
if (fmt != null)
|
|
{
|
|
pStrSize0 = Utils.GetByteCountUTF8(fmt);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
pStr0 = Utils.Alloc<byte>(pStrSize0 + 1);
|
|
}
|
|
else
|
|
{
|
|
byte* pStrStack0 = stackalloc byte[pStrSize0 + 1];
|
|
pStr0 = pStrStack0;
|
|
}
|
|
int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0);
|
|
pStr0[pStrOffset0] = 0;
|
|
}
|
|
AppendVNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, pStr0, args);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
Utils.Free(pStr0);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void AppendV(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ref byte fmt, nuint args)
|
|
{
|
|
fixed (ImPlotAnnotationCollection* pself = &self)
|
|
{
|
|
fixed (byte* pfmt = &fmt)
|
|
{
|
|
AppendVNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt, args);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void AppendV(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ReadOnlySpan<byte> fmt, nuint args)
|
|
{
|
|
fixed (ImPlotAnnotationCollection* pself = &self)
|
|
{
|
|
fixed (byte* pfmt = fmt)
|
|
{
|
|
AppendVNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt, args);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void AppendV(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, string fmt, nuint args)
|
|
{
|
|
fixed (ImPlotAnnotationCollection* pself = &self)
|
|
{
|
|
byte* pStr0 = null;
|
|
int pStrSize0 = 0;
|
|
if (fmt != null)
|
|
{
|
|
pStrSize0 = Utils.GetByteCountUTF8(fmt);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
pStr0 = Utils.Alloc<byte>(pStrSize0 + 1);
|
|
}
|
|
else
|
|
{
|
|
byte* pStrStack0 = stackalloc byte[pStrSize0 + 1];
|
|
pStr0 = pStrStack0;
|
|
}
|
|
int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0);
|
|
pStr0[pStrOffset0] = 0;
|
|
}
|
|
AppendVNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, pStr0, args);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
Utils.Free(pStr0);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void AppendNative(ImPlotAnnotationCollection* self, Vector2 pos, Vector2 off, uint bg, uint fg, byte clamp, byte* fmt)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotAnnotationCollection*, Vector2, Vector2, uint, uint, byte, byte*, void>)funcTable[478])(self, pos, off, bg, fg, clamp, fmt);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, Vector2, Vector2, uint, uint, byte, nint, void>)funcTable[478])((nint)self, pos, off, bg, fg, clamp, (nint)fmt);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Append(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, byte* fmt)
|
|
{
|
|
AppendNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, fmt);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Append(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, byte* fmt)
|
|
{
|
|
fixed (ImPlotAnnotationCollection* pself = &self)
|
|
{
|
|
AppendNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, fmt);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Append(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ref byte fmt)
|
|
{
|
|
fixed (byte* pfmt = &fmt)
|
|
{
|
|
AppendNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Append(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ReadOnlySpan<byte> fmt)
|
|
{
|
|
fixed (byte* pfmt = fmt)
|
|
{
|
|
AppendNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Append(ImPlotAnnotationCollectionPtr self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, string fmt)
|
|
{
|
|
byte* pStr0 = null;
|
|
int pStrSize0 = 0;
|
|
if (fmt != null)
|
|
{
|
|
pStrSize0 = Utils.GetByteCountUTF8(fmt);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
pStr0 = Utils.Alloc<byte>(pStrSize0 + 1);
|
|
}
|
|
else
|
|
{
|
|
byte* pStrStack0 = stackalloc byte[pStrSize0 + 1];
|
|
pStr0 = pStrStack0;
|
|
}
|
|
int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0);
|
|
pStr0[pStrOffset0] = 0;
|
|
}
|
|
AppendNative(self, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, pStr0);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
Utils.Free(pStr0);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Append(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ref byte fmt)
|
|
{
|
|
fixed (ImPlotAnnotationCollection* pself = &self)
|
|
{
|
|
fixed (byte* pfmt = &fmt)
|
|
{
|
|
AppendNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Append(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, ReadOnlySpan<byte> fmt)
|
|
{
|
|
fixed (ImPlotAnnotationCollection* pself = &self)
|
|
{
|
|
fixed (byte* pfmt = fmt)
|
|
{
|
|
AppendNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, (byte*)pfmt);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Append(ref ImPlotAnnotationCollection self, Vector2 pos, Vector2 off, uint bg, uint fg, bool clamp, string fmt)
|
|
{
|
|
fixed (ImPlotAnnotationCollection* pself = &self)
|
|
{
|
|
byte* pStr0 = null;
|
|
int pStrSize0 = 0;
|
|
if (fmt != null)
|
|
{
|
|
pStrSize0 = Utils.GetByteCountUTF8(fmt);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
pStr0 = Utils.Alloc<byte>(pStrSize0 + 1);
|
|
}
|
|
else
|
|
{
|
|
byte* pStrStack0 = stackalloc byte[pStrSize0 + 1];
|
|
pStr0 = pStrStack0;
|
|
}
|
|
int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0);
|
|
pStr0[pStrOffset0] = 0;
|
|
}
|
|
AppendNative((ImPlotAnnotationCollection*)pself, pos, off, bg, fg, clamp ? (byte)1 : (byte)0, pStr0);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
Utils.Free(pStr0);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte* GetTextNative(ImPlotAnnotationCollection* self, int idx)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAnnotationCollection*, int, byte*>)funcTable[479])(self, idx);
|
|
#else
|
|
return (byte*)((delegate* unmanaged[Cdecl]<nint, int, nint>)funcTable[479])((nint)self, idx);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static byte* GetText(ImPlotAnnotationCollectionPtr self, int idx)
|
|
{
|
|
byte* ret = GetTextNative(self, idx);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static string GetTextS(ImPlotAnnotationCollectionPtr self, int idx)
|
|
{
|
|
string ret = Utils.DecodeStringUTF8(GetTextNative(self, idx));
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static byte* GetText(ref ImPlotAnnotationCollection self, int idx)
|
|
{
|
|
fixed (ImPlotAnnotationCollection* pself = &self)
|
|
{
|
|
byte* ret = GetTextNative((ImPlotAnnotationCollection*)pself, idx);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static string GetTextS(ref ImPlotAnnotationCollection self, int idx)
|
|
{
|
|
fixed (ImPlotAnnotationCollection* pself = &self)
|
|
{
|
|
string ret = Utils.DecodeStringUTF8(GetTextNative((ImPlotAnnotationCollection*)pself, idx));
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void ResetNative(ImPlotAnnotationCollection* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotAnnotationCollection*, void>)funcTable[480])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[480])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Reset(ImPlotAnnotationCollectionPtr self)
|
|
{
|
|
ResetNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Reset(ref ImPlotAnnotationCollection self)
|
|
{
|
|
fixed (ImPlotAnnotationCollection* pself = &self)
|
|
{
|
|
ResetNative((ImPlotAnnotationCollection*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static ImPlotTagCollection* ImPlotTagCollectionNative()
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotTagCollection*>)funcTable[481])();
|
|
#else
|
|
return (ImPlotTagCollection*)((delegate* unmanaged[Cdecl]<nint>)funcTable[481])();
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotTagCollectionPtr ImPlotTagCollection()
|
|
{
|
|
ImPlotTagCollectionPtr ret = ImPlotTagCollectionNative();
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void DestroyNative(ImPlotTagCollection* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotTagCollection*, void>)funcTable[482])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[482])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Destroy(ImPlotTagCollectionPtr self)
|
|
{
|
|
DestroyNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Destroy(ref ImPlotTagCollection self)
|
|
{
|
|
fixed (ImPlotTagCollection* pself = &self)
|
|
{
|
|
DestroyNative((ImPlotTagCollection*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void AppendVNative(ImPlotTagCollection* self, ImAxis axis, double value, uint bg, uint fg, byte* fmt, nuint args)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotTagCollection*, ImAxis, double, uint, uint, byte*, nuint, void>)funcTable[483])(self, axis, value, bg, fg, fmt, args);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, ImAxis, double, uint, uint, nint, nuint, void>)funcTable[483])((nint)self, axis, value, bg, fg, (nint)fmt, args);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void AppendV(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, byte* fmt, nuint args)
|
|
{
|
|
AppendVNative(self, axis, value, bg, fg, fmt, args);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void AppendV(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, byte* fmt, nuint args)
|
|
{
|
|
fixed (ImPlotTagCollection* pself = &self)
|
|
{
|
|
AppendVNative((ImPlotTagCollection*)pself, axis, value, bg, fg, fmt, args);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void AppendV(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, ref byte fmt, nuint args)
|
|
{
|
|
fixed (byte* pfmt = &fmt)
|
|
{
|
|
AppendVNative(self, axis, value, bg, fg, (byte*)pfmt, args);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void AppendV(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, ReadOnlySpan<byte> fmt, nuint args)
|
|
{
|
|
fixed (byte* pfmt = fmt)
|
|
{
|
|
AppendVNative(self, axis, value, bg, fg, (byte*)pfmt, args);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void AppendV(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, string fmt, nuint args)
|
|
{
|
|
byte* pStr0 = null;
|
|
int pStrSize0 = 0;
|
|
if (fmt != null)
|
|
{
|
|
pStrSize0 = Utils.GetByteCountUTF8(fmt);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
pStr0 = Utils.Alloc<byte>(pStrSize0 + 1);
|
|
}
|
|
else
|
|
{
|
|
byte* pStrStack0 = stackalloc byte[pStrSize0 + 1];
|
|
pStr0 = pStrStack0;
|
|
}
|
|
int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0);
|
|
pStr0[pStrOffset0] = 0;
|
|
}
|
|
AppendVNative(self, axis, value, bg, fg, pStr0, args);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
Utils.Free(pStr0);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void AppendV(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, ref byte fmt, nuint args)
|
|
{
|
|
fixed (ImPlotTagCollection* pself = &self)
|
|
{
|
|
fixed (byte* pfmt = &fmt)
|
|
{
|
|
AppendVNative((ImPlotTagCollection*)pself, axis, value, bg, fg, (byte*)pfmt, args);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void AppendV(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, ReadOnlySpan<byte> fmt, nuint args)
|
|
{
|
|
fixed (ImPlotTagCollection* pself = &self)
|
|
{
|
|
fixed (byte* pfmt = fmt)
|
|
{
|
|
AppendVNative((ImPlotTagCollection*)pself, axis, value, bg, fg, (byte*)pfmt, args);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void AppendV(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, string fmt, nuint args)
|
|
{
|
|
fixed (ImPlotTagCollection* pself = &self)
|
|
{
|
|
byte* pStr0 = null;
|
|
int pStrSize0 = 0;
|
|
if (fmt != null)
|
|
{
|
|
pStrSize0 = Utils.GetByteCountUTF8(fmt);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
pStr0 = Utils.Alloc<byte>(pStrSize0 + 1);
|
|
}
|
|
else
|
|
{
|
|
byte* pStrStack0 = stackalloc byte[pStrSize0 + 1];
|
|
pStr0 = pStrStack0;
|
|
}
|
|
int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0);
|
|
pStr0[pStrOffset0] = 0;
|
|
}
|
|
AppendVNative((ImPlotTagCollection*)pself, axis, value, bg, fg, pStr0, args);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
Utils.Free(pStr0);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void AppendNative(ImPlotTagCollection* self, ImAxis axis, double value, uint bg, uint fg, byte* fmt)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotTagCollection*, ImAxis, double, uint, uint, byte*, void>)funcTable[484])(self, axis, value, bg, fg, fmt);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, ImAxis, double, uint, uint, nint, void>)funcTable[484])((nint)self, axis, value, bg, fg, (nint)fmt);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Append(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, byte* fmt)
|
|
{
|
|
AppendNative(self, axis, value, bg, fg, fmt);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Append(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, byte* fmt)
|
|
{
|
|
fixed (ImPlotTagCollection* pself = &self)
|
|
{
|
|
AppendNative((ImPlotTagCollection*)pself, axis, value, bg, fg, fmt);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Append(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, ref byte fmt)
|
|
{
|
|
fixed (byte* pfmt = &fmt)
|
|
{
|
|
AppendNative(self, axis, value, bg, fg, (byte*)pfmt);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Append(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, ReadOnlySpan<byte> fmt)
|
|
{
|
|
fixed (byte* pfmt = fmt)
|
|
{
|
|
AppendNative(self, axis, value, bg, fg, (byte*)pfmt);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Append(ImPlotTagCollectionPtr self, ImAxis axis, double value, uint bg, uint fg, string fmt)
|
|
{
|
|
byte* pStr0 = null;
|
|
int pStrSize0 = 0;
|
|
if (fmt != null)
|
|
{
|
|
pStrSize0 = Utils.GetByteCountUTF8(fmt);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
pStr0 = Utils.Alloc<byte>(pStrSize0 + 1);
|
|
}
|
|
else
|
|
{
|
|
byte* pStrStack0 = stackalloc byte[pStrSize0 + 1];
|
|
pStr0 = pStrStack0;
|
|
}
|
|
int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0);
|
|
pStr0[pStrOffset0] = 0;
|
|
}
|
|
AppendNative(self, axis, value, bg, fg, pStr0);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
Utils.Free(pStr0);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Append(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, ref byte fmt)
|
|
{
|
|
fixed (ImPlotTagCollection* pself = &self)
|
|
{
|
|
fixed (byte* pfmt = &fmt)
|
|
{
|
|
AppendNative((ImPlotTagCollection*)pself, axis, value, bg, fg, (byte*)pfmt);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Append(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, ReadOnlySpan<byte> fmt)
|
|
{
|
|
fixed (ImPlotTagCollection* pself = &self)
|
|
{
|
|
fixed (byte* pfmt = fmt)
|
|
{
|
|
AppendNative((ImPlotTagCollection*)pself, axis, value, bg, fg, (byte*)pfmt);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Append(ref ImPlotTagCollection self, ImAxis axis, double value, uint bg, uint fg, string fmt)
|
|
{
|
|
fixed (ImPlotTagCollection* pself = &self)
|
|
{
|
|
byte* pStr0 = null;
|
|
int pStrSize0 = 0;
|
|
if (fmt != null)
|
|
{
|
|
pStrSize0 = Utils.GetByteCountUTF8(fmt);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
pStr0 = Utils.Alloc<byte>(pStrSize0 + 1);
|
|
}
|
|
else
|
|
{
|
|
byte* pStrStack0 = stackalloc byte[pStrSize0 + 1];
|
|
pStr0 = pStrStack0;
|
|
}
|
|
int pStrOffset0 = Utils.EncodeStringUTF8(fmt, pStr0, pStrSize0);
|
|
pStr0[pStrOffset0] = 0;
|
|
}
|
|
AppendNative((ImPlotTagCollection*)pself, axis, value, bg, fg, pStr0);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
Utils.Free(pStr0);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte* GetTextNative(ImPlotTagCollection* self, int idx)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotTagCollection*, int, byte*>)funcTable[485])(self, idx);
|
|
#else
|
|
return (byte*)((delegate* unmanaged[Cdecl]<nint, int, nint>)funcTable[485])((nint)self, idx);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static byte* GetText(ImPlotTagCollectionPtr self, int idx)
|
|
{
|
|
byte* ret = GetTextNative(self, idx);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static string GetTextS(ImPlotTagCollectionPtr self, int idx)
|
|
{
|
|
string ret = Utils.DecodeStringUTF8(GetTextNative(self, idx));
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static byte* GetText(ref ImPlotTagCollection self, int idx)
|
|
{
|
|
fixed (ImPlotTagCollection* pself = &self)
|
|
{
|
|
byte* ret = GetTextNative((ImPlotTagCollection*)pself, idx);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static string GetTextS(ref ImPlotTagCollection self, int idx)
|
|
{
|
|
fixed (ImPlotTagCollection* pself = &self)
|
|
{
|
|
string ret = Utils.DecodeStringUTF8(GetTextNative((ImPlotTagCollection*)pself, idx));
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void ResetNative(ImPlotTagCollection* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotTagCollection*, void>)funcTable[486])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[486])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Reset(ImPlotTagCollectionPtr self)
|
|
{
|
|
ResetNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Reset(ref ImPlotTagCollection self)
|
|
{
|
|
fixed (ImPlotTagCollection* pself = &self)
|
|
{
|
|
ResetNative((ImPlotTagCollection*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static ImPlotTick* ImPlotTickNative(double value, byte major, int level, byte showLabel)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<double, byte, int, byte, ImPlotTick*>)funcTable[487])(value, major, level, showLabel);
|
|
#else
|
|
return (ImPlotTick*)((delegate* unmanaged[Cdecl]<double, byte, int, byte, nint>)funcTable[487])(value, major, level, showLabel);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotTickPtr ImPlotTick(double value, bool major, int level, bool showLabel)
|
|
{
|
|
ImPlotTickPtr ret = ImPlotTickNative(value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void DestroyNative(ImPlotTick* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotTick*, void>)funcTable[488])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[488])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Destroy(ImPlotTickPtr self)
|
|
{
|
|
DestroyNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Destroy(ref ImPlotTick self)
|
|
{
|
|
fixed (ImPlotTick* pself = &self)
|
|
{
|
|
DestroyNative((ImPlotTick*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static ImPlotTicker* ImPlotTickerNative()
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotTicker*>)funcTable[489])();
|
|
#else
|
|
return (ImPlotTicker*)((delegate* unmanaged[Cdecl]<nint>)funcTable[489])();
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotTickerPtr ImPlotTicker()
|
|
{
|
|
ImPlotTickerPtr ret = ImPlotTickerNative();
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void DestroyNative(ImPlotTicker* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotTicker*, void>)funcTable[490])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[490])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Destroy(ImPlotTickerPtr self)
|
|
{
|
|
DestroyNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Destroy(ref ImPlotTicker self)
|
|
{
|
|
fixed (ImPlotTicker* pself = &self)
|
|
{
|
|
DestroyNative((ImPlotTicker*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static ImPlotTick* AddTickNative(ImPlotTicker* self, double value, byte major, int level, byte showLabel, byte* label)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotTicker*, double, byte, int, byte, byte*, ImPlotTick*>)funcTable[491])(self, value, major, level, showLabel, label);
|
|
#else
|
|
return (ImPlotTick*)((delegate* unmanaged[Cdecl]<nint, double, byte, int, byte, nint, nint>)funcTable[491])((nint)self, value, major, level, showLabel, (nint)label);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotTickPtr AddTick(ImPlotTickerPtr self, double value, bool major, int level, bool showLabel, byte* label)
|
|
{
|
|
ImPlotTickPtr ret = AddTickNative(self, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, label);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotTickPtr AddTick(ref ImPlotTicker self, double value, bool major, int level, bool showLabel, byte* label)
|
|
{
|
|
fixed (ImPlotTicker* pself = &self)
|
|
{
|
|
ImPlotTickPtr ret = AddTickNative((ImPlotTicker*)pself, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, label);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotTickPtr AddTick(ImPlotTickerPtr self, double value, bool major, int level, bool showLabel, ref byte label)
|
|
{
|
|
fixed (byte* plabel = &label)
|
|
{
|
|
ImPlotTickPtr ret = AddTickNative(self, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, (byte*)plabel);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotTickPtr AddTick(ImPlotTickerPtr self, double value, bool major, int level, bool showLabel, ReadOnlySpan<byte> label)
|
|
{
|
|
fixed (byte* plabel = label)
|
|
{
|
|
ImPlotTickPtr ret = AddTickNative(self, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, (byte*)plabel);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotTickPtr AddTick(ImPlotTickerPtr self, double value, bool major, int level, bool showLabel, string label)
|
|
{
|
|
byte* pStr0 = null;
|
|
int pStrSize0 = 0;
|
|
if (label != null)
|
|
{
|
|
pStrSize0 = Utils.GetByteCountUTF8(label);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
pStr0 = Utils.Alloc<byte>(pStrSize0 + 1);
|
|
}
|
|
else
|
|
{
|
|
byte* pStrStack0 = stackalloc byte[pStrSize0 + 1];
|
|
pStr0 = pStrStack0;
|
|
}
|
|
int pStrOffset0 = Utils.EncodeStringUTF8(label, pStr0, pStrSize0);
|
|
pStr0[pStrOffset0] = 0;
|
|
}
|
|
ImPlotTickPtr ret = AddTickNative(self, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, pStr0);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
Utils.Free(pStr0);
|
|
}
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotTickPtr AddTick(ref ImPlotTicker self, double value, bool major, int level, bool showLabel, ref byte label)
|
|
{
|
|
fixed (ImPlotTicker* pself = &self)
|
|
{
|
|
fixed (byte* plabel = &label)
|
|
{
|
|
ImPlotTickPtr ret = AddTickNative((ImPlotTicker*)pself, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, (byte*)plabel);
|
|
return ret;
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotTickPtr AddTick(ref ImPlotTicker self, double value, bool major, int level, bool showLabel, ReadOnlySpan<byte> label)
|
|
{
|
|
fixed (ImPlotTicker* pself = &self)
|
|
{
|
|
fixed (byte* plabel = label)
|
|
{
|
|
ImPlotTickPtr ret = AddTickNative((ImPlotTicker*)pself, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, (byte*)plabel);
|
|
return ret;
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotTickPtr AddTick(ref ImPlotTicker self, double value, bool major, int level, bool showLabel, string label)
|
|
{
|
|
fixed (ImPlotTicker* pself = &self)
|
|
{
|
|
byte* pStr0 = null;
|
|
int pStrSize0 = 0;
|
|
if (label != null)
|
|
{
|
|
pStrSize0 = Utils.GetByteCountUTF8(label);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
pStr0 = Utils.Alloc<byte>(pStrSize0 + 1);
|
|
}
|
|
else
|
|
{
|
|
byte* pStrStack0 = stackalloc byte[pStrSize0 + 1];
|
|
pStr0 = pStrStack0;
|
|
}
|
|
int pStrOffset0 = Utils.EncodeStringUTF8(label, pStr0, pStrSize0);
|
|
pStr0[pStrOffset0] = 0;
|
|
}
|
|
ImPlotTickPtr ret = AddTickNative((ImPlotTicker*)pself, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, pStr0);
|
|
if (pStrSize0 >= Utils.MaxStackallocSize)
|
|
{
|
|
Utils.Free(pStr0);
|
|
}
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static ImPlotTick* AddTickNative(ImPlotTicker* self, double value, byte major, int level, byte showLabel, ImPlotFormatter formatter, void* data)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotTicker*, double, byte, int, byte, delegate*<double, byte*, int, void*, int>, void*, ImPlotTick*>)funcTable[492])(self, value, major, level, showLabel, (delegate*<double, byte*, int, void*, int>)Utils.GetFunctionPointerForDelegate(formatter), data);
|
|
#else
|
|
return (ImPlotTick*)((delegate* unmanaged[Cdecl]<nint, double, byte, int, byte, nint, nint, nint>)funcTable[492])((nint)self, value, major, level, showLabel, (nint)Utils.GetFunctionPointerForDelegate(formatter), (nint)data);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotTickPtr AddTick(ImPlotTickerPtr self, double value, bool major, int level, bool showLabel, ImPlotFormatter formatter, void* data)
|
|
{
|
|
ImPlotTickPtr ret = AddTickNative(self, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, formatter, data);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotTickPtr AddTick(ref ImPlotTicker self, double value, bool major, int level, bool showLabel, ImPlotFormatter formatter, void* data)
|
|
{
|
|
fixed (ImPlotTicker* pself = &self)
|
|
{
|
|
ImPlotTickPtr ret = AddTickNative((ImPlotTicker*)pself, value, major ? (byte)1 : (byte)0, level, showLabel ? (byte)1 : (byte)0, formatter, data);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static ImPlotTick* AddTickNative(ImPlotTicker* self, ImPlotTick tick)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotTicker*, ImPlotTick, ImPlotTick*>)funcTable[493])(self, tick);
|
|
#else
|
|
return (ImPlotTick*)((delegate* unmanaged[Cdecl]<nint, ImPlotTick, nint>)funcTable[493])((nint)self, tick);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotTickPtr AddTick(ImPlotTickerPtr self, ImPlotTick tick)
|
|
{
|
|
ImPlotTickPtr ret = AddTickNative(self, tick);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotTickPtr AddTick(ref ImPlotTicker self, ImPlotTick tick)
|
|
{
|
|
fixed (ImPlotTicker* pself = &self)
|
|
{
|
|
ImPlotTickPtr ret = AddTickNative((ImPlotTicker*)pself, tick);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte* GetTextNative(ImPlotTicker* self, int idx)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotTicker*, int, byte*>)funcTable[494])(self, idx);
|
|
#else
|
|
return (byte*)((delegate* unmanaged[Cdecl]<nint, int, nint>)funcTable[494])((nint)self, idx);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static byte* GetText(ImPlotTickerPtr self, int idx)
|
|
{
|
|
byte* ret = GetTextNative(self, idx);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static string GetTextS(ImPlotTickerPtr self, int idx)
|
|
{
|
|
string ret = Utils.DecodeStringUTF8(GetTextNative(self, idx));
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static byte* GetText(ref ImPlotTicker self, int idx)
|
|
{
|
|
fixed (ImPlotTicker* pself = &self)
|
|
{
|
|
byte* ret = GetTextNative((ImPlotTicker*)pself, idx);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static string GetTextS(ref ImPlotTicker self, int idx)
|
|
{
|
|
fixed (ImPlotTicker* pself = &self)
|
|
{
|
|
string ret = Utils.DecodeStringUTF8(GetTextNative((ImPlotTicker*)pself, idx));
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte* GetTextNative(ImPlotTicker* self, ImPlotTick tick)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotTicker*, ImPlotTick, byte*>)funcTable[495])(self, tick);
|
|
#else
|
|
return (byte*)((delegate* unmanaged[Cdecl]<nint, ImPlotTick, nint>)funcTable[495])((nint)self, tick);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static byte* GetText(ImPlotTickerPtr self, ImPlotTick tick)
|
|
{
|
|
byte* ret = GetTextNative(self, tick);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static string GetTextS(ImPlotTickerPtr self, ImPlotTick tick)
|
|
{
|
|
string ret = Utils.DecodeStringUTF8(GetTextNative(self, tick));
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static byte* GetText(ref ImPlotTicker self, ImPlotTick tick)
|
|
{
|
|
fixed (ImPlotTicker* pself = &self)
|
|
{
|
|
byte* ret = GetTextNative((ImPlotTicker*)pself, tick);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static string GetTextS(ref ImPlotTicker self, ImPlotTick tick)
|
|
{
|
|
fixed (ImPlotTicker* pself = &self)
|
|
{
|
|
string ret = Utils.DecodeStringUTF8(GetTextNative((ImPlotTicker*)pself, tick));
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void OverrideSizeLateNative(ImPlotTicker* self, Vector2 size)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotTicker*, Vector2, void>)funcTable[496])(self, size);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, Vector2, void>)funcTable[496])((nint)self, size);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void OverrideSizeLate(ImPlotTickerPtr self, Vector2 size)
|
|
{
|
|
OverrideSizeLateNative(self, size);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void OverrideSizeLate(ref ImPlotTicker self, Vector2 size)
|
|
{
|
|
fixed (ImPlotTicker* pself = &self)
|
|
{
|
|
OverrideSizeLateNative((ImPlotTicker*)pself, size);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void ResetNative(ImPlotTicker* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotTicker*, void>)funcTable[497])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[497])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Reset(ImPlotTickerPtr self)
|
|
{
|
|
ResetNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Reset(ref ImPlotTicker self)
|
|
{
|
|
fixed (ImPlotTicker* pself = &self)
|
|
{
|
|
ResetNative((ImPlotTicker*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static int TickCountNative(ImPlotTicker* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotTicker*, int>)funcTable[498])(self);
|
|
#else
|
|
return (int)((delegate* unmanaged[Cdecl]<nint, int>)funcTable[498])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int TickCount(ImPlotTickerPtr self)
|
|
{
|
|
int ret = TickCountNative(self);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static int TickCount(ref ImPlotTicker self)
|
|
{
|
|
fixed (ImPlotTicker* pself = &self)
|
|
{
|
|
int ret = TickCountNative((ImPlotTicker*)pself);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static ImPlotAxis* ImPlotAxisNative()
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*>)funcTable[499])();
|
|
#else
|
|
return (ImPlotAxis*)((delegate* unmanaged[Cdecl]<nint>)funcTable[499])();
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotAxisPtr ImPlotAxis()
|
|
{
|
|
ImPlotAxisPtr ret = ImPlotAxisNative();
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void DestroyNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotAxis*, void>)funcTable[500])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[500])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Destroy(ImPlotAxisPtr self)
|
|
{
|
|
DestroyNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Destroy(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
DestroyNative((ImPlotAxis*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void ResetNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotAxis*, void>)funcTable[501])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[501])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Reset(ImPlotAxisPtr self)
|
|
{
|
|
ResetNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Reset(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
ResetNative((ImPlotAxis*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte SetMinNative(ImPlotAxis* self, double min, byte force)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, double, byte, byte>)funcTable[502])(self, min, force);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, double, byte, byte>)funcTable[502])((nint)self, min, force);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool SetMin(ImPlotAxisPtr self, double min, bool force)
|
|
{
|
|
byte ret = SetMinNative(self, min, force ? (byte)1 : (byte)0);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool SetMin(ImPlotAxisPtr self, double min)
|
|
{
|
|
byte ret = SetMinNative(self, min, (byte)(0));
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool SetMin(ref ImPlotAxis self, double min, bool force)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = SetMinNative((ImPlotAxis*)pself, min, force ? (byte)1 : (byte)0);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool SetMin(ref ImPlotAxis self, double min)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = SetMinNative((ImPlotAxis*)pself, min, (byte)(0));
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte SetMaxNative(ImPlotAxis* self, double max, byte force)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, double, byte, byte>)funcTable[503])(self, max, force);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, double, byte, byte>)funcTable[503])((nint)self, max, force);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool SetMax(ImPlotAxisPtr self, double max, bool force)
|
|
{
|
|
byte ret = SetMaxNative(self, max, force ? (byte)1 : (byte)0);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool SetMax(ImPlotAxisPtr self, double max)
|
|
{
|
|
byte ret = SetMaxNative(self, max, (byte)(0));
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool SetMax(ref ImPlotAxis self, double max, bool force)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = SetMaxNative((ImPlotAxis*)pself, max, force ? (byte)1 : (byte)0);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool SetMax(ref ImPlotAxis self, double max)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = SetMaxNative((ImPlotAxis*)pself, max, (byte)(0));
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void SetRangeNative(ImPlotAxis* self, double v1, double v2)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotAxis*, double, double, void>)funcTable[504])(self, v1, v2);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, double, double, void>)funcTable[504])((nint)self, v1, v2);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void SetRange(ImPlotAxisPtr self, double v1, double v2)
|
|
{
|
|
SetRangeNative(self, v1, v2);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void SetRange(ref ImPlotAxis self, double v1, double v2)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
SetRangeNative((ImPlotAxis*)pself, v1, v2);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void SetRangeNative(ImPlotAxis* self, ImPlotRange range)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotAxis*, ImPlotRange, void>)funcTable[505])(self, range);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, ImPlotRange, void>)funcTable[505])((nint)self, range);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void SetRange(ImPlotAxisPtr self, ImPlotRange range)
|
|
{
|
|
SetRangeNative(self, range);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void SetRange(ref ImPlotAxis self, ImPlotRange range)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
SetRangeNative((ImPlotAxis*)pself, range);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void SetAspectNative(ImPlotAxis* self, double unitPerPix)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotAxis*, double, void>)funcTable[506])(self, unitPerPix);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, double, void>)funcTable[506])((nint)self, unitPerPix);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void SetAspect(ImPlotAxisPtr self, double unitPerPix)
|
|
{
|
|
SetAspectNative(self, unitPerPix);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void SetAspect(ref ImPlotAxis self, double unitPerPix)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
SetAspectNative((ImPlotAxis*)pself, unitPerPix);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static float PixelSizeNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, float>)funcTable[507])(self);
|
|
#else
|
|
return (float)((delegate* unmanaged[Cdecl]<nint, float>)funcTable[507])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static float PixelSize(ImPlotAxisPtr self)
|
|
{
|
|
float ret = PixelSizeNative(self);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static float PixelSize(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
float ret = PixelSizeNative((ImPlotAxis*)pself);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static double GetAspectNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, double>)funcTable[508])(self);
|
|
#else
|
|
return (double)((delegate* unmanaged[Cdecl]<nint, double>)funcTable[508])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double GetAspect(ImPlotAxisPtr self)
|
|
{
|
|
double ret = GetAspectNative(self);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double GetAspect(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
double ret = GetAspectNative((ImPlotAxis*)pself);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void ConstrainNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotAxis*, void>)funcTable[509])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[509])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Constrain(ImPlotAxisPtr self)
|
|
{
|
|
ConstrainNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Constrain(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
ConstrainNative((ImPlotAxis*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void UpdateTransformCacheNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotAxis*, void>)funcTable[510])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[510])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void UpdateTransformCache(ImPlotAxisPtr self)
|
|
{
|
|
UpdateTransformCacheNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void UpdateTransformCache(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
UpdateTransformCacheNative((ImPlotAxis*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static float PlotToPixelsNative(ImPlotAxis* self, double plt)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, double, float>)funcTable[511])(self, plt);
|
|
#else
|
|
return (float)((delegate* unmanaged[Cdecl]<nint, double, float>)funcTable[511])((nint)self, plt);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static float PlotToPixels(ImPlotAxisPtr self, double plt)
|
|
{
|
|
float ret = PlotToPixelsNative(self, plt);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static float PlotToPixels(ref ImPlotAxis self, double plt)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
float ret = PlotToPixelsNative((ImPlotAxis*)pself, plt);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static double PixelsToPlotNative(ImPlotAxis* self, float pix)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, float, double>)funcTable[512])(self, pix);
|
|
#else
|
|
return (double)((delegate* unmanaged[Cdecl]<nint, float, double>)funcTable[512])((nint)self, pix);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double PixelsToPlot(ImPlotAxisPtr self, float pix)
|
|
{
|
|
double ret = PixelsToPlotNative(self, pix);
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static double PixelsToPlot(ref ImPlotAxis self, float pix)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
double ret = PixelsToPlotNative((ImPlotAxis*)pself, pix);
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void ExtendFitNative(ImPlotAxis* self, double v)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotAxis*, double, void>)funcTable[513])(self, v);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, double, void>)funcTable[513])((nint)self, v);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void ExtendFit(ImPlotAxisPtr self, double v)
|
|
{
|
|
ExtendFitNative(self, v);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void ExtendFit(ref ImPlotAxis self, double v)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
ExtendFitNative((ImPlotAxis*)pself, v);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void ExtendFitWithNative(ImPlotAxis* self, ImPlotAxis* alt, double v, double vAlt)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotAxis*, ImPlotAxis*, double, double, void>)funcTable[514])(self, alt, v, vAlt);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, nint, double, double, void>)funcTable[514])((nint)self, (nint)alt, v, vAlt);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void ExtendFitWith(ImPlotAxisPtr self, ImPlotAxisPtr alt, double v, double vAlt)
|
|
{
|
|
ExtendFitWithNative(self, alt, v, vAlt);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void ExtendFitWith(ref ImPlotAxis self, ImPlotAxisPtr alt, double v, double vAlt)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
ExtendFitWithNative((ImPlotAxis*)pself, alt, v, vAlt);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void ExtendFitWith(ImPlotAxisPtr self, ref ImPlotAxis alt, double v, double vAlt)
|
|
{
|
|
fixed (ImPlotAxis* palt = &alt)
|
|
{
|
|
ExtendFitWithNative(self, (ImPlotAxis*)palt, v, vAlt);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void ExtendFitWith(ref ImPlotAxis self, ref ImPlotAxis alt, double v, double vAlt)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
fixed (ImPlotAxis* palt = &alt)
|
|
{
|
|
ExtendFitWithNative((ImPlotAxis*)pself, (ImPlotAxis*)palt, v, vAlt);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void ApplyFitNative(ImPlotAxis* self, float padding)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotAxis*, float, void>)funcTable[515])(self, padding);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, float, void>)funcTable[515])((nint)self, padding);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void ApplyFit(ImPlotAxisPtr self, float padding)
|
|
{
|
|
ApplyFitNative(self, padding);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void ApplyFit(ref ImPlotAxis self, float padding)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
ApplyFitNative((ImPlotAxis*)pself, padding);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte HasLabelNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, byte>)funcTable[516])(self);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, byte>)funcTable[516])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool HasLabel(ImPlotAxisPtr self)
|
|
{
|
|
byte ret = HasLabelNative(self);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool HasLabel(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = HasLabelNative((ImPlotAxis*)pself);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte HasGridLinesNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, byte>)funcTable[517])(self);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, byte>)funcTable[517])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool HasGridLines(ImPlotAxisPtr self)
|
|
{
|
|
byte ret = HasGridLinesNative(self);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool HasGridLines(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = HasGridLinesNative((ImPlotAxis*)pself);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte HasTickLabelsNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, byte>)funcTable[518])(self);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, byte>)funcTable[518])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool HasTickLabels(ImPlotAxisPtr self)
|
|
{
|
|
byte ret = HasTickLabelsNative(self);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool HasTickLabels(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = HasTickLabelsNative((ImPlotAxis*)pself);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte HasTickMarksNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, byte>)funcTable[519])(self);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, byte>)funcTable[519])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool HasTickMarks(ImPlotAxisPtr self)
|
|
{
|
|
byte ret = HasTickMarksNative(self);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool HasTickMarks(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = HasTickMarksNative((ImPlotAxis*)pself);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte WillRenderNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, byte>)funcTable[520])(self);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, byte>)funcTable[520])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool WillRender(ImPlotAxisPtr self)
|
|
{
|
|
byte ret = WillRenderNative(self);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool WillRender(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = WillRenderNative((ImPlotAxis*)pself);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte IsOppositeNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, byte>)funcTable[521])(self);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, byte>)funcTable[521])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsOpposite(ImPlotAxisPtr self)
|
|
{
|
|
byte ret = IsOppositeNative(self);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsOpposite(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = IsOppositeNative((ImPlotAxis*)pself);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte IsInvertedNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, byte>)funcTable[522])(self);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, byte>)funcTable[522])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsInverted(ImPlotAxisPtr self)
|
|
{
|
|
byte ret = IsInvertedNative(self);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsInverted(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = IsInvertedNative((ImPlotAxis*)pself);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte IsForegroundNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, byte>)funcTable[523])(self);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, byte>)funcTable[523])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsForeground(ImPlotAxisPtr self)
|
|
{
|
|
byte ret = IsForegroundNative(self);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsForeground(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = IsForegroundNative((ImPlotAxis*)pself);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte IsAutoFittingNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, byte>)funcTable[524])(self);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, byte>)funcTable[524])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsAutoFitting(ImPlotAxisPtr self)
|
|
{
|
|
byte ret = IsAutoFittingNative(self);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsAutoFitting(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = IsAutoFittingNative((ImPlotAxis*)pself);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte CanInitFitNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, byte>)funcTable[525])(self);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, byte>)funcTable[525])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool CanInitFit(ImPlotAxisPtr self)
|
|
{
|
|
byte ret = CanInitFitNative(self);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool CanInitFit(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = CanInitFitNative((ImPlotAxis*)pself);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte IsRangeLockedNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, byte>)funcTable[526])(self);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, byte>)funcTable[526])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsRangeLocked(ImPlotAxisPtr self)
|
|
{
|
|
byte ret = IsRangeLockedNative(self);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsRangeLocked(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = IsRangeLockedNative((ImPlotAxis*)pself);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte IsLockedMinNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, byte>)funcTable[527])(self);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, byte>)funcTable[527])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsLockedMin(ImPlotAxisPtr self)
|
|
{
|
|
byte ret = IsLockedMinNative(self);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsLockedMin(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = IsLockedMinNative((ImPlotAxis*)pself);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte IsLockedMaxNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, byte>)funcTable[528])(self);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, byte>)funcTable[528])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsLockedMax(ImPlotAxisPtr self)
|
|
{
|
|
byte ret = IsLockedMaxNative(self);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsLockedMax(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = IsLockedMaxNative((ImPlotAxis*)pself);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte IsLockedNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, byte>)funcTable[529])(self);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, byte>)funcTable[529])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsLocked(ImPlotAxisPtr self)
|
|
{
|
|
byte ret = IsLockedNative(self);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsLocked(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = IsLockedNative((ImPlotAxis*)pself);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte IsInputLockedMinNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, byte>)funcTable[530])(self);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, byte>)funcTable[530])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsInputLockedMin(ImPlotAxisPtr self)
|
|
{
|
|
byte ret = IsInputLockedMinNative(self);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsInputLockedMin(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = IsInputLockedMinNative((ImPlotAxis*)pself);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte IsInputLockedMaxNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, byte>)funcTable[531])(self);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, byte>)funcTable[531])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsInputLockedMax(ImPlotAxisPtr self)
|
|
{
|
|
byte ret = IsInputLockedMaxNative(self);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsInputLockedMax(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = IsInputLockedMaxNative((ImPlotAxis*)pself);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte IsInputLockedNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, byte>)funcTable[532])(self);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, byte>)funcTable[532])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsInputLocked(ImPlotAxisPtr self)
|
|
{
|
|
byte ret = IsInputLockedNative(self);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsInputLocked(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = IsInputLockedNative((ImPlotAxis*)pself);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte HasMenusNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, byte>)funcTable[533])(self);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, byte>)funcTable[533])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool HasMenus(ImPlotAxisPtr self)
|
|
{
|
|
byte ret = HasMenusNative(self);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool HasMenus(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = HasMenusNative((ImPlotAxis*)pself);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static byte IsPanLockedNative(ImPlotAxis* self, byte increasing)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAxis*, byte, byte>)funcTable[534])(self, increasing);
|
|
#else
|
|
return (byte)((delegate* unmanaged[Cdecl]<nint, byte, byte>)funcTable[534])((nint)self, increasing);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsPanLocked(ImPlotAxisPtr self, bool increasing)
|
|
{
|
|
byte ret = IsPanLockedNative(self, increasing ? (byte)1 : (byte)0);
|
|
return ret != 0;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static bool IsPanLocked(ref ImPlotAxis self, bool increasing)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
byte ret = IsPanLockedNative((ImPlotAxis*)pself, increasing ? (byte)1 : (byte)0);
|
|
return ret != 0;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void PushLinksNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotAxis*, void>)funcTable[535])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[535])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void PushLinks(ImPlotAxisPtr self)
|
|
{
|
|
PushLinksNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void PushLinks(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
PushLinksNative((ImPlotAxis*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void PullLinksNative(ImPlotAxis* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotAxis*, void>)funcTable[536])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[536])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void PullLinks(ImPlotAxisPtr self)
|
|
{
|
|
PullLinksNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void PullLinks(ref ImPlotAxis self)
|
|
{
|
|
fixed (ImPlotAxis* pself = &self)
|
|
{
|
|
PullLinksNative((ImPlotAxis*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static ImPlotAlignmentData* ImPlotAlignmentDataNative()
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
return ((delegate* unmanaged[Cdecl]<ImPlotAlignmentData*>)funcTable[537])();
|
|
#else
|
|
return (ImPlotAlignmentData*)((delegate* unmanaged[Cdecl]<nint>)funcTable[537])();
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static ImPlotAlignmentDataPtr ImPlotAlignmentData()
|
|
{
|
|
ImPlotAlignmentDataPtr ret = ImPlotAlignmentDataNative();
|
|
return ret;
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void DestroyNative(ImPlotAlignmentData* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotAlignmentData*, void>)funcTable[538])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[538])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Destroy(ImPlotAlignmentDataPtr self)
|
|
{
|
|
DestroyNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Destroy(ref ImPlotAlignmentData self)
|
|
{
|
|
fixed (ImPlotAlignmentData* pself = &self)
|
|
{
|
|
DestroyNative((ImPlotAlignmentData*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void BeginNative(ImPlotAlignmentData* self)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotAlignmentData*, void>)funcTable[539])(self);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, void>)funcTable[539])((nint)self);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Begin(ImPlotAlignmentDataPtr self)
|
|
{
|
|
BeginNative(self);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Begin(ref ImPlotAlignmentData self)
|
|
{
|
|
fixed (ImPlotAlignmentData* pself = &self)
|
|
{
|
|
BeginNative((ImPlotAlignmentData*)pself);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
internal static void UpdateNative(ImPlotAlignmentData* self, float* padA, float* padB, float* deltaA, float* deltaB)
|
|
{
|
|
#if NET5_0_OR_GREATER
|
|
((delegate* unmanaged[Cdecl]<ImPlotAlignmentData*, float*, float*, float*, float*, void>)funcTable[540])(self, padA, padB, deltaA, deltaB);
|
|
#else
|
|
((delegate* unmanaged[Cdecl]<nint, nint, nint, nint, nint, void>)funcTable[540])((nint)self, (nint)padA, (nint)padB, (nint)deltaA, (nint)deltaB);
|
|
#endif
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Update(ImPlotAlignmentDataPtr self, float* padA, float* padB, float* deltaA, float* deltaB)
|
|
{
|
|
UpdateNative(self, padA, padB, deltaA, deltaB);
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Update(ref ImPlotAlignmentData self, float* padA, float* padB, float* deltaA, float* deltaB)
|
|
{
|
|
fixed (ImPlotAlignmentData* pself = &self)
|
|
{
|
|
UpdateNative((ImPlotAlignmentData*)pself, padA, padB, deltaA, deltaB);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Update(ImPlotAlignmentDataPtr self, ref float padA, float* padB, float* deltaA, float* deltaB)
|
|
{
|
|
fixed (float* ppadA = &padA)
|
|
{
|
|
UpdateNative(self, (float*)ppadA, padB, deltaA, deltaB);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Update(ref ImPlotAlignmentData self, ref float padA, float* padB, float* deltaA, float* deltaB)
|
|
{
|
|
fixed (ImPlotAlignmentData* pself = &self)
|
|
{
|
|
fixed (float* ppadA = &padA)
|
|
{
|
|
UpdateNative((ImPlotAlignmentData*)pself, (float*)ppadA, padB, deltaA, deltaB);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Update(ImPlotAlignmentDataPtr self, float* padA, ref float padB, float* deltaA, float* deltaB)
|
|
{
|
|
fixed (float* ppadB = &padB)
|
|
{
|
|
UpdateNative(self, padA, (float*)ppadB, deltaA, deltaB);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Update(ref ImPlotAlignmentData self, float* padA, ref float padB, float* deltaA, float* deltaB)
|
|
{
|
|
fixed (ImPlotAlignmentData* pself = &self)
|
|
{
|
|
fixed (float* ppadB = &padB)
|
|
{
|
|
UpdateNative((ImPlotAlignmentData*)pself, padA, (float*)ppadB, deltaA, deltaB);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Update(ImPlotAlignmentDataPtr self, ref float padA, ref float padB, float* deltaA, float* deltaB)
|
|
{
|
|
fixed (float* ppadA = &padA)
|
|
{
|
|
fixed (float* ppadB = &padB)
|
|
{
|
|
UpdateNative(self, (float*)ppadA, (float*)ppadB, deltaA, deltaB);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Update(ref ImPlotAlignmentData self, ref float padA, ref float padB, float* deltaA, float* deltaB)
|
|
{
|
|
fixed (ImPlotAlignmentData* pself = &self)
|
|
{
|
|
fixed (float* ppadA = &padA)
|
|
{
|
|
fixed (float* ppadB = &padB)
|
|
{
|
|
UpdateNative((ImPlotAlignmentData*)pself, (float*)ppadA, (float*)ppadB, deltaA, deltaB);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Update(ImPlotAlignmentDataPtr self, float* padA, float* padB, ref float deltaA, float* deltaB)
|
|
{
|
|
fixed (float* pdeltaA = &deltaA)
|
|
{
|
|
UpdateNative(self, padA, padB, (float*)pdeltaA, deltaB);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Update(ref ImPlotAlignmentData self, float* padA, float* padB, ref float deltaA, float* deltaB)
|
|
{
|
|
fixed (ImPlotAlignmentData* pself = &self)
|
|
{
|
|
fixed (float* pdeltaA = &deltaA)
|
|
{
|
|
UpdateNative((ImPlotAlignmentData*)pself, padA, padB, (float*)pdeltaA, deltaB);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Update(ImPlotAlignmentDataPtr self, ref float padA, float* padB, ref float deltaA, float* deltaB)
|
|
{
|
|
fixed (float* ppadA = &padA)
|
|
{
|
|
fixed (float* pdeltaA = &deltaA)
|
|
{
|
|
UpdateNative(self, (float*)ppadA, padB, (float*)pdeltaA, deltaB);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Update(ref ImPlotAlignmentData self, ref float padA, float* padB, ref float deltaA, float* deltaB)
|
|
{
|
|
fixed (ImPlotAlignmentData* pself = &self)
|
|
{
|
|
fixed (float* ppadA = &padA)
|
|
{
|
|
fixed (float* pdeltaA = &deltaA)
|
|
{
|
|
UpdateNative((ImPlotAlignmentData*)pself, (float*)ppadA, padB, (float*)pdeltaA, deltaB);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Update(ImPlotAlignmentDataPtr self, float* padA, ref float padB, ref float deltaA, float* deltaB)
|
|
{
|
|
fixed (float* ppadB = &padB)
|
|
{
|
|
fixed (float* pdeltaA = &deltaA)
|
|
{
|
|
UpdateNative(self, padA, (float*)ppadB, (float*)pdeltaA, deltaB);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Update(ref ImPlotAlignmentData self, float* padA, ref float padB, ref float deltaA, float* deltaB)
|
|
{
|
|
fixed (ImPlotAlignmentData* pself = &self)
|
|
{
|
|
fixed (float* ppadB = &padB)
|
|
{
|
|
fixed (float* pdeltaA = &deltaA)
|
|
{
|
|
UpdateNative((ImPlotAlignmentData*)pself, padA, (float*)ppadB, (float*)pdeltaA, deltaB);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Update(ImPlotAlignmentDataPtr self, ref float padA, ref float padB, ref float deltaA, float* deltaB)
|
|
{
|
|
fixed (float* ppadA = &padA)
|
|
{
|
|
fixed (float* ppadB = &padB)
|
|
{
|
|
fixed (float* pdeltaA = &deltaA)
|
|
{
|
|
UpdateNative(self, (float*)ppadA, (float*)ppadB, (float*)pdeltaA, deltaB);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Update(ref ImPlotAlignmentData self, ref float padA, ref float padB, ref float deltaA, float* deltaB)
|
|
{
|
|
fixed (ImPlotAlignmentData* pself = &self)
|
|
{
|
|
fixed (float* ppadA = &padA)
|
|
{
|
|
fixed (float* ppadB = &padB)
|
|
{
|
|
fixed (float* pdeltaA = &deltaA)
|
|
{
|
|
UpdateNative((ImPlotAlignmentData*)pself, (float*)ppadA, (float*)ppadB, (float*)pdeltaA, deltaB);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Update(ImPlotAlignmentDataPtr self, float* padA, float* padB, float* deltaA, ref float deltaB)
|
|
{
|
|
fixed (float* pdeltaB = &deltaB)
|
|
{
|
|
UpdateNative(self, padA, padB, deltaA, (float*)pdeltaB);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Update(ref ImPlotAlignmentData self, float* padA, float* padB, float* deltaA, ref float deltaB)
|
|
{
|
|
fixed (ImPlotAlignmentData* pself = &self)
|
|
{
|
|
fixed (float* pdeltaB = &deltaB)
|
|
{
|
|
UpdateNative((ImPlotAlignmentData*)pself, padA, padB, deltaA, (float*)pdeltaB);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Update(ImPlotAlignmentDataPtr self, ref float padA, float* padB, float* deltaA, ref float deltaB)
|
|
{
|
|
fixed (float* ppadA = &padA)
|
|
{
|
|
fixed (float* pdeltaB = &deltaB)
|
|
{
|
|
UpdateNative(self, (float*)ppadA, padB, deltaA, (float*)pdeltaB);
|
|
}
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// To be documented.
|
|
/// </summary>
|
|
public static void Update(ref ImPlotAlignmentData self, ref float padA, float* padB, float* deltaA, ref float deltaB)
|
|
{
|
|
fixed (ImPlotAlignmentData* pself = &self)
|
|
{
|
|
fixed (float* ppadA = &padA)
|
|
{
|
|
fixed (float* pdeltaB = &deltaB)
|
|
{
|
|
UpdateNative((ImPlotAlignmentData*)pself, (float*)ppadA, padB, deltaA, (float*)pdeltaB);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|