Skip to content

Commit

Permalink
adding pre-processing for struct for structs
Browse files Browse the repository at this point in the history
  • Loading branch information
jbkyang-nvi committed Jun 10, 2023
1 parent 43f46cb commit 3b4f6b0
Show file tree
Hide file tree
Showing 15 changed files with 35 additions and 111 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ public class tritonserver extends org.bytedeco.tritonserver.presets.tritonserver
public static final int TRITONSERVER_API_VERSION_MAJOR = 1;

///
public static final int TRITONSERVER_API_VERSION_MINOR = 23;
public static final int TRITONSERVER_API_VERSION_MINOR = 22;

/** Get the TRITONBACKEND API version supported by the Triton shared
* library. This value can be compared against the
Expand Down Expand Up @@ -1211,7 +1211,6 @@ public static native TRITONSERVER_Error TRITONSERVER_InferenceRequestSetCorrelat
* @param correlation_id The correlation ID.
* @return a TRITONSERVER_Error indicating success or failure. */

///
///
public static native TRITONSERVER_Error TRITONSERVER_InferenceRequestSetCorrelationIdString(
TRITONSERVER_InferenceRequest inference_request,
Expand All @@ -1220,9 +1219,7 @@ public static native TRITONSERVER_Error TRITONSERVER_InferenceRequestSetCorrelat
TRITONSERVER_InferenceRequest inference_request,
@Cast("const char*") BytePointer correlation_id);

/** Deprecated. See TRITONSERVER_InferenceRequestPriorityUInt64 instead.
*
* Get the priority for a request. The default is 0 indicating that
/** Get the priority for a request. The default is 0 indicating that
* the request does not specify a priority and so will use the
* model's default priority.
*
Expand All @@ -1238,29 +1235,7 @@ public static native TRITONSERVER_Error TRITONSERVER_InferenceRequestPriority(
public static native TRITONSERVER_Error TRITONSERVER_InferenceRequestPriority(
TRITONSERVER_InferenceRequest inference_request, @Cast("uint32_t*") int[] priority);

/** Get the priority for a request. The default is 0 indicating that
* the request does not specify a priority and so will use the
* model's default priority.
*
* @param inference_request The request object.
* @param priority Returns the priority level.
* @return a TRITONSERVER_Error indicating success or failure. */

///
///
public static native TRITONSERVER_Error TRITONSERVER_InferenceRequestPriorityUInt64(
TRITONSERVER_InferenceRequest inference_request,
@Cast("uint64_t*") LongPointer priority);
public static native TRITONSERVER_Error TRITONSERVER_InferenceRequestPriorityUInt64(
TRITONSERVER_InferenceRequest inference_request,
@Cast("uint64_t*") LongBuffer priority);
public static native TRITONSERVER_Error TRITONSERVER_InferenceRequestPriorityUInt64(
TRITONSERVER_InferenceRequest inference_request,
@Cast("uint64_t*") long[] priority);

/** Deprecated. See TRITONSERVER_InferenceRequestSetPriorityUInt64 instead.
*
* Set the priority for a request. The default is 0 indicating that
/** Set the priority for a request. The default is 0 indicating that
* the request does not specify a priority and so will use the
* model's default priority.
*
Expand All @@ -1271,18 +1246,6 @@ public static native TRITONSERVER_Error TRITONSERVER_InferenceRequestPriorityUIn
///
public static native TRITONSERVER_Error TRITONSERVER_InferenceRequestSetPriority(
TRITONSERVER_InferenceRequest inference_request, @Cast("uint32_t") int priority);

/** Set the priority for a request. The default is 0 indicating that
* the request does not specify a priority and so will use the
* model's default priority.
*
* @param inference_request The request object.
* @param priority The priority level.
* @return a TRITONSERVER_Error indicating success or failure. */

///
public static native TRITONSERVER_Error TRITONSERVER_InferenceRequestSetPriorityUInt64(
TRITONSERVER_InferenceRequest inference_request, @Cast("uint64_t") long priority);

/** Get the timeout for a request, in microseconds. The default is 0
* which indicates that the request has no timeout.
Expand Down Expand Up @@ -3218,7 +3181,7 @@ public static native TRITONSERVER_Error TRITONSERVER_GetMetricKind(
public static final int TRITONBACKEND_API_VERSION_MAJOR = 1;

///
public static final int TRITONBACKEND_API_VERSION_MINOR = 13;
public static final int TRITONBACKEND_API_VERSION_MINOR = 12;

/** Get the TRITONBACKEND API version supported by Triton. This value
* can be compared against the TRITONBACKEND_API_VERSION_MAJOR and
Expand Down Expand Up @@ -4641,39 +4604,13 @@ public static native TRITONSERVER_Error TRITONBACKEND_ModelState(
* @param state The user state, or nullptr if no user state.
* @return a TRITONSERVER_Error indicating success or failure. */

///
public static native TRITONSERVER_Error TRITONBACKEND_ModelSetState(
TRITONBACKEND_Model model, Pointer state);

/** Report the memory usage of the model that will be released on
* TRITONBACKEND_ModelFinalize. The backend may call this function within the
* lifecycle of the TRITONBACKEND_Model object (between
* TRITONBACKEND_ModelInitialize and TRITONBACKEND_ModelFinalize) to report the
* latest usage. To report the memory usage of a model instance,
* see TRITONBACKEND_ModelInstanceReportMemoryUsage.
*
* @param model The model.
* @param usage The list of buffer attributes that records the memory usage,
* each entry should record the total memory usage of a given memory type and
* id. For example, if the model itself occupies 64 bytes on each of
* CUDA device 0 and CUDA device 1. Then 'usage' should have first two entries
* set, one has the buffer attributes of "type GPU, id 0, 64 bytes" and the
* other has "type GPU, id 1, 64 bytes". 'usage' is owned by the backend and
* may be released after the function returns.
* @param usage_size The number of entries in 'usage'.
* @return a TRITONSERVER_Error indicating success or failure. */


///
///
///
///
public static native TRITONSERVER_Error TRITONBACKEND_ModelReportMemoryUsage(
TRITONBACKEND_Model model, @Cast("TRITONSERVER_BufferAttributes**") PointerPointer usage,
@Cast("uint32_t") int usage_size);
public static native TRITONSERVER_Error TRITONBACKEND_ModelReportMemoryUsage(
TRITONBACKEND_Model model, @ByPtrPtr TRITONSERVER_BufferAttributes usage,
@Cast("uint32_t") int usage_size);
public static native TRITONSERVER_Error TRITONBACKEND_ModelSetState(
TRITONBACKEND_Model model, Pointer state);

/**
* TRITONBACKEND_ModelInstance
Expand Down Expand Up @@ -4882,39 +4819,13 @@ public static native TRITONSERVER_Error TRITONBACKEND_ModelInstanceState(
* @param state The user state, or nullptr if no user state.
* @return a TRITONSERVER_Error indicating success or failure. */

///
public static native TRITONSERVER_Error TRITONBACKEND_ModelInstanceSetState(
TRITONBACKEND_ModelInstance instance, Pointer state);

/** Report the memory usage of the model instance that will be released on
* TRITONBACKEND_ModelInstanceFinalize. The backend may call this function
* within the lifecycle of the TRITONBACKEND_Model object (between
* TRITONBACKEND_ModelInstanceInitialize and
* TRITONBACKEND_ModelInstanceFinalize) to report the latest usage. To report
* the memory usage of the model, see TRITONBACKEND_ModelReportMemoryUsage.
*
* @param instance The model instance.
* @param usage The list of buffer attributes that records the memory usage,
* each entry should record the total memory usage of a given memory type and
* id. For example, if the instance itself occupies 64 bytes on each of
* CUDA device 0 and CUDA device 1. Then 'usage' should have first two entries
* set, one has the buffer attributes of "type GPU, id 0, 64 bytes" and the
* other has "type GPU, id 1, 64 bytes". 'usage' is owned by the backend and
* may be released after the function returns.
* @param usage_size The number of entries in 'usage'.
* @return a TRITONSERVER_Error indicating success or failure. */

///
///
///
///
///
public static native TRITONSERVER_Error TRITONBACKEND_ModelInstanceReportMemoryUsage(
TRITONBACKEND_ModelInstance instance,
@Cast("TRITONSERVER_BufferAttributes**") PointerPointer usage, @Cast("uint32_t") int usage_size);
public static native TRITONSERVER_Error TRITONBACKEND_ModelInstanceReportMemoryUsage(
TRITONBACKEND_ModelInstance instance,
@ByPtrPtr TRITONSERVER_BufferAttributes usage, @Cast("uint32_t") int usage_size);
public static native TRITONSERVER_Error TRITONBACKEND_ModelInstanceSetState(
TRITONBACKEND_ModelInstance instance, Pointer state);

/** Record statistics for an inference request.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
// #endif
// #endif

@Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
@Name("struct TRITONSERVER_BufferAttributes") @Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
public class TRITONSERVER_BufferAttributes extends Pointer {
/** Empty constructor. Calls {@code super((Pointer)null)}. */
public TRITONSERVER_BufferAttributes() { super((Pointer)null); }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

import static org.bytedeco.tritonserver.global.tritonserver.*;

@Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
@Name("struct TRITONSERVER_Error") @Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
public class TRITONSERVER_Error extends Pointer {
/** Empty constructor. Calls {@code super((Pointer)null)}. */
public TRITONSERVER_Error() { super((Pointer)null); }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

import static org.bytedeco.tritonserver.global.tritonserver.*;

@Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
@Name("struct TRITONSERVER_InferenceRequest") @Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
public class TRITONSERVER_InferenceRequest extends Pointer {
/** Empty constructor. Calls {@code super((Pointer)null)}. */
public TRITONSERVER_InferenceRequest() { super((Pointer)null); }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

import static org.bytedeco.tritonserver.global.tritonserver.*;

@Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
@Name("struct TRITONSERVER_InferenceResponse") @Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
public class TRITONSERVER_InferenceResponse extends Pointer {
/** Empty constructor. Calls {@code super((Pointer)null)}. */
public TRITONSERVER_InferenceResponse() { super((Pointer)null); }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

import static org.bytedeco.tritonserver.global.tritonserver.*;

@Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
@Name("struct TRITONSERVER_InferenceTrace") @Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
public class TRITONSERVER_InferenceTrace extends Pointer {
/** Empty constructor. Calls {@code super((Pointer)null)}. */
public TRITONSERVER_InferenceTrace() { super((Pointer)null); }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

import static org.bytedeco.tritonserver.global.tritonserver.*;

@Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
@Name("struct TRITONSERVER_Message") @Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
public class TRITONSERVER_Message extends Pointer {
/** Empty constructor. Calls {@code super((Pointer)null)}. */
public TRITONSERVER_Message() { super((Pointer)null); }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

import static org.bytedeco.tritonserver.global.tritonserver.*;

@Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
@Name("struct TRITONSERVER_Metric") @Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
public class TRITONSERVER_Metric extends Pointer {
/** Empty constructor. Calls {@code super((Pointer)null)}. */
public TRITONSERVER_Metric() { super((Pointer)null); }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
///
///
///
@Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
@Name("struct TRITONSERVER_MetricFamily") @Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
public class TRITONSERVER_MetricFamily extends Pointer {
/** Empty constructor. Calls {@code super((Pointer)null)}. */
public TRITONSERVER_MetricFamily() { super((Pointer)null); }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

import static org.bytedeco.tritonserver.global.tritonserver.*;

@Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
@Name("struct TRITONSERVER_Metrics") @Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
public class TRITONSERVER_Metrics extends Pointer {
/** Empty constructor. Calls {@code super((Pointer)null)}. */
public TRITONSERVER_Metrics() { super((Pointer)null); }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

import static org.bytedeco.tritonserver.global.tritonserver.*;

@Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
@Name("struct TRITONSERVER_Parameter") @Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
public class TRITONSERVER_Parameter extends Pointer {
/** Empty constructor. Calls {@code super((Pointer)null)}. */
public TRITONSERVER_Parameter() { super((Pointer)null); }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

import static org.bytedeco.tritonserver.global.tritonserver.*;

@Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
@Name("struct TRITONSERVER_ResponseAllocator") @Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
public class TRITONSERVER_ResponseAllocator extends Pointer {
/** Empty constructor. Calls {@code super((Pointer)null)}. */
public TRITONSERVER_ResponseAllocator() { super((Pointer)null); }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

import static org.bytedeco.tritonserver.global.tritonserver.*;

@Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
@Name("struct TRITONSERVER_Server") @Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
public class TRITONSERVER_Server extends Pointer {
/** Empty constructor. Calls {@code super((Pointer)null)}. */
public TRITONSERVER_Server() { super((Pointer)null); }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

import static org.bytedeco.tritonserver.global.tritonserver.*;

@Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
@Name("struct TRITONSERVER_ServerOptions") @Opaque @Properties(inherit = org.bytedeco.tritonserver.presets.tritonserver.class)
public class TRITONSERVER_ServerOptions extends Pointer {
/** Empty constructor. Calls {@code super((Pointer)null)}. */
public TRITONSERVER_ServerOptions() { super((Pointer)null); }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,19 @@ public void map(InfoMap infoMap) {
.put(new Info("std::size_t").cast().valueTypes("long").pointerTypes("LongPointer", "LongBuffer", "long[]"))
.put(new Info("TRITONSERVER_EXPORT", "TRITONSERVER_DECLSPEC",
"TRITONBACKEND_DECLSPEC", "TRITONBACKEND_ISPEC",
"TRITONREPOAGENT_DECLSPEC", "TRITONREPOAGENT_ISPEC").cppTypes().annotations());
"TRITONREPOAGENT_DECLSPEC", "TRITONREPOAGENT_ISPEC").cppTypes().annotations())
.put(new Info("struct TRITONSERVER_BufferAttributes").cppText("TRITONSERVER_BufferAttributes"))
.put(new Info("struct TRITONSERVER_Error").cppText("TRITONSERVER_Error"))
.put(new Info("struct TRITONSERVER_InferenceRequest").cppText("TRITONSERVER_InferenceRequest"))
.put(new Info("struct TRITONSERVER_InferenceResponse").cppText("TRITONSERVER_InferenceResponse"))
.put(new Info("struct TRITONSERVER_InferenceTrace").cppText("TRITONSERVER_InferenceTrace"))
.put(new Info("struct TRITONSERVER_Message").cppText("TRITONSERVER_Message"))
.put(new Info("struct TRITONSERVER_Metrics").cppText("TRITONSERVER_Metrics"))
.put(new Info("struct TRITONSERVER_Parameter").cppText("TRITONSERVER_Parameter"))
.put(new Info("struct TRITONSERVER_ResponseAllocator").cppText("TRITONSERVER_ResponseAllocator"))
.put(new Info("struct TRITONSERVER_Server").cppText("TRITONSERVER_Server"))
.put(new Info("struct TRITONSERVER_ServerOptions").cppText("TRITONSERVER_ServerOptions"))
.put(new Info("struct TRITONSERVER_Metric").cppText("TRITONSERVER_Metric"))
.put(new Info("struct TRITONSERVER_MetricFamily").cppText("TRITONSERVER_MetricFamily"));
}
}

0 comments on commit 3b4f6b0

Please sign in to comment.