Compare commits

...

3 Commits

Author SHA1 Message Date
Barzan Hayati
3b9ff27524 Recator draw imprecise face 2025-08-24 12:42:06 +00:00
Barzan Hayati
65b8f9b9a8 Recator draw imprecise face 2025-08-24 09:18:13 +00:00
Barzan Hayati
eee2868e7c Draw imprecise face 2025-08-24 08:37:25 +00:00
5 changed files with 268 additions and 186 deletions

View File

@ -1,16 +1 @@
file:///opt/nvidia/deepstream/deepstream-7.1/samples/streams/sample_720p.mp4
file:///opt/nvidia/deepstream/deepstream-7.1/samples/streams/sample_720p.mp4
file:///opt/nvidia/deepstream/deepstream-7.1/samples/streams/sample_720p.mp4
file:///opt/nvidia/deepstream/deepstream-7.1/samples/streams/sample_720p.mp4
file:///opt/nvidia/deepstream/deepstream-7.1/samples/streams/sample_720p.mp4
file:///opt/nvidia/deepstream/deepstream-7.1/samples/streams/sample_720p.mp4
file:///opt/nvidia/deepstream/deepstream-7.1/samples/streams/sample_720p.mp4
file:///opt/nvidia/deepstream/deepstream-7.1/samples/streams/sample_720p.mp4
file:///opt/nvidia/deepstream/deepstream-7.1/samples/streams/sample_720p.mp4
file:///opt/nvidia/deepstream/deepstream-7.1/samples/streams/sample_720p.mp4
file:///opt/nvidia/deepstream/deepstream-7.1/samples/streams/sample_720p.mp4
file:///opt/nvidia/deepstream/deepstream-7.1/samples/streams/sample_720p.mp4
file:///opt/nvidia/deepstream/deepstream-7.1/samples/streams/sample_720p.mp4
file:///opt/nvidia/deepstream/deepstream-7.1/samples/streams/sample_720p.mp4
file:///opt/nvidia/deepstream/deepstream-7.1/samples/streams/sample_720p.mp4
file:///opt/nvidia/deepstream/deepstream-7.1/samples/streams/sample_720p.mp4
file:///root/Put.mp4

View File

@ -2,7 +2,7 @@
"MUXER_OUTPUT_HEIGHT": 1080,
"MUXER_OUTPUT_WIDTH": 1920,
"output_video_path": "test.mkv",
"display_output": 3,
"display_output": 2,
"codec_rtsp_out": "H264",
"mount_address": "/rtsp-output",
"udp_buffer_size": 524288,

View File

@ -20,6 +20,7 @@ unsigned int NvInferServerManager::nvds_lib_major_version = NVDS_VERSION_MAJOR;
unsigned int NvInferServerManager::nvds_lib_minor_version = NVDS_VERSION_MINOR;
const gchar pgie_class_str[PGIE_DETECTED_CLASS_NUM][32] = {"Person"};
const gchar imprecise_face_str[PGIE_DETECTED_CLASS_NUM][32] = {"ImpreciseFace"};
/* nvds_lib_major_version and nvds_lib_minor_version is the version number of
* deepstream sdk */
@ -223,6 +224,10 @@ GstPadProbeReturn NvInferServerManager::pgie_pad_buffer_probe(
for (NvDsMetaList *l_frame = batch_meta->frame_meta_list; l_frame != NULL;
l_frame = l_frame->next) {
NvDsFrameMeta *frame_meta = (NvDsFrameMeta *)l_frame->data;
// std::cout << "frame number = " << frame_meta->frame_num
// << " frame id = " << frame_meta->source_id << std::endl;
// to solve track not showing up issue
nvds_acquire_meta_lock(batch_meta);
frame_meta->bInferDone = TRUE;
@ -237,160 +242,244 @@ GstPadProbeReturn NvInferServerManager::pgie_pad_buffer_probe(
(void)stream_height;
(void)stream_width;
// float source_id = (float)frame_meta->source_id;
/* Iterate user metadata in frames to search PGIE's tensor metadata */
for (NvDsMetaList *l_user = frame_meta->frame_user_meta_list;
l_user != NULL; l_user = l_user->next) {
NvDsUserMeta *user_meta = (NvDsUserMeta *)l_user->data;
if (user_meta->base_meta.meta_type != NVDSINFER_TENSOR_OUTPUT_META)
continue;
/* convert to tensor metadata */
NvDsInferTensorMeta *meta =
(NvDsInferTensorMeta *)user_meta->user_meta_data;
for (unsigned int i = 0; i < meta->num_output_layers; i++) {
NvDsInferLayerInfo *info = &meta->output_layers_info[i];
info->buffer = meta->out_buf_ptrs_host[i];
if (use_device_mem && meta->out_buf_ptrs_dev[i]) {
cudaMemcpy(meta->out_buf_ptrs_host[i],
meta->out_buf_ptrs_dev[i],
info->inferDims.numElements * 4,
cudaMemcpyDeviceToHost);
}
}
/* Parse output tensor and fill detection results into objectList.
*/
std::vector<NvDsInferLayerInfo> outputLayersInfo(
meta->output_layers_info,
meta->output_layers_info + meta->num_output_layers);
#if NVDS_VERSION_MAJOR >= 5
if (nvds_lib_major_version >= 5) {
if (meta->network_info.width != networkInfo.width ||
meta->network_info.height != networkInfo.height ||
meta->network_info.channels != networkInfo.channels) {
g_error("failed to check pgie network info\n");
}
}
#endif
// std::cout << "frame number: " << frame_meta->frame_num
// << " frame id: " << frame_meta->source_id << std::endl;
float *outputBuffer = (float *)outputLayersInfo[0].buffer;
(void)outputBuffer;
// NvDsInferDims dims = outputLayersInfo[0].inferDims;
for (size_t jkl = 0; jkl < outputLayersInfo.size(); jkl++) {
const NvDsInferLayerInfo &layer = outputLayersInfo[jkl];
unsigned int numDims = layer.inferDims.numDims;
unsigned int numElements = layer.inferDims.numElements;
(void)numElements;
(void)numDims;
// std::cout << "Layer " << jkl << " (" << layer.layerName <<
// "):\n"; std::cout << " Num Dims: " << numDims << "\n";
// std::cout << " Num Elements: " << numElements << "\n";
// std::cout << " Dims: [";
// for (unsigned int mno = 0; mno < numDims; ++mno) {
// std::cout << layer.inferDims.d[mno];
// // layer.inferDims.d[0] = 100;
// // layer.inferDims.d[1] = 57;
// if (mno < numDims - 1)
// std::cout << ", ";
// }
// std::cout << "]\n";
}
const NvDsInferLayerInfo &layer =
outputLayersInfo[0]; // or loop over all
uint detected_persons = 0;
float *data = static_cast<float *>(layer.buffer);
for (unsigned int jkl = 0; jkl < 100;
jkl ++) { // 100 persons for each frame
if (data[jkl * 57 + 4] > threshold_body_detection) {
detected_persons++;
// std::cout
// << "nvinferserver first for x = " << data[jkl * 57 +
// 0]
// << " y = " << data[jkl * 57 + 1]
// << " w = " << data[jkl * 57 + 2]
// << " h = " << data[jkl * 57 + 3]
// << " score = " << data[jkl * 57 + 4] << std::endl;
for (unsigned int mno = 0; mno < 57; ++mno) {
float value = data[jkl * 57 + mno];
(void)value;
// std::cout << "data[" << jkl << "][" << mno
// << "] = " << value << std::endl;
}
}
}
for (uint index = 0; index < detected_persons; index++) {
NvDsObjectMeta *obj_meta =
nvds_acquire_obj_meta_from_pool(batch_meta);
obj_meta->unique_component_id = meta->unique_id;
obj_meta->confidence = data[index * 57 + 4];
// obj_meta->object_id = UNTRACKED_OBJECT_ID;
obj_meta->class_id = 0;
NvOSD_RectParams &rect_params = obj_meta->rect_params;
NvOSD_TextParams &text_params = obj_meta->text_params;
/* Assign bounding box coordinates. */
rect_params.left = int(data[index * 57 + 0] *
MUXER_OUTPUT_WIDTH / PGIE_NET_WIDTH);
rect_params.top = int(data[index * 57 + 1] *
MUXER_OUTPUT_HEIGHT / PGIE_NET_HEIGHT);
rect_params.width =
int((data[index * 57 + 2] - data[index * 57 + 0]) *
MUXER_OUTPUT_WIDTH / PGIE_NET_WIDTH);
rect_params.height =
int((data[index * 57 + 3] - data[index * 57 + 1]) *
MUXER_OUTPUT_HEIGHT / PGIE_NET_HEIGHT);
// std::cout << "nvinferserver second for x = " <<
// rect_params.left
// << " y = " << rect_params.top
// << " w = " << rect_params.width
// << " h = " << rect_params.height
// << " score = " << obj_meta->confidence <<
// std::endl;
/* Border of width 3. */
rect_params.border_width = 3;
rect_params.has_bg_color = 0;
rect_params.border_color = NvOSD_ColorParams{1, 0, 0, 1};
/* display_text requires heap allocated memory. */
text_params.display_text = g_strdup(pgie_class_str[0]);
/* Display text above the left top corner of the object. */
text_params.x_offset = rect_params.left;
text_params.y_offset = rect_params.top - 10;
/* Set black background for the text. */
text_params.set_bg_clr = 1;
text_params.text_bg_clr = NvOSD_ColorParams{0, 0, 0, 1};
/* Font face, size and color. */
text_params.font_params.font_name = (gchar *)"Serif";
text_params.font_params.font_size = 11;
text_params.font_params.font_color =
NvOSD_ColorParams{1, 1, 1, 1};
// adding landmarks to obj_meta as user_meta
NvDsUserMeta *um1 =
nvds_acquire_user_meta_from_pool(batch_meta);
um1->user_meta_data = set_metadata_ptr(
&(data[index * 57])); // Add landmarks here
um1->base_meta.meta_type =
NVDS_USER_OBJECT_META_LANDMARKS_AND_SOURCE_ID;
um1->base_meta.copy_func = (NvDsMetaCopyFunc)copy_user_meta;
um1->base_meta.release_func =
(NvDsMetaReleaseFunc)release_user_meta;
nvds_add_user_meta_to_obj(obj_meta, um1);
nvds_add_obj_meta_to_frame(frame_meta, obj_meta, NULL);
}
extract_tensor_metadata(user_meta, networkInfo, batch_meta,
frame_meta);
}
}
// use_device_mem = 1 - use_device_mem;
return GST_PAD_PROBE_OK;
}
void NvInferServerManager::extract_tensor_metadata(
NvDsUserMeta *user_meta, NvDsInferNetworkInfo networkInfo,
NvDsBatchMeta *batch_meta, NvDsFrameMeta *frame_meta) {
/* convert to tensor metadata */
NvDsInferTensorMeta *meta =
(NvDsInferTensorMeta *)user_meta->user_meta_data;
for (unsigned int i = 0; i < meta->num_output_layers; i++) {
NvDsInferLayerInfo *info = &meta->output_layers_info[i];
info->buffer = meta->out_buf_ptrs_host[i];
if (use_device_mem && meta->out_buf_ptrs_dev[i]) {
cudaMemcpy(meta->out_buf_ptrs_host[i], meta->out_buf_ptrs_dev[i],
info->inferDims.numElements * 4, cudaMemcpyDeviceToHost);
}
}
/* Parse output tensor and fill detection results into objectList.
*/
std::vector<NvDsInferLayerInfo> outputLayersInfo(
meta->output_layers_info,
meta->output_layers_info + meta->num_output_layers);
#if NVDS_VERSION_MAJOR >= 5
if (nvds_lib_major_version >= 5) {
if (meta->network_info.width != networkInfo.width ||
meta->network_info.height != networkInfo.height ||
meta->network_info.channels != networkInfo.channels) {
g_error("failed to check pgie network info\n");
}
}
#endif
float *outputBuffer = (float *)outputLayersInfo[0].buffer;
(void)outputBuffer;
// NvDsInferDims dims = outputLayersInfo[0].inferDims;
for (size_t jkl = 0; jkl < outputLayersInfo.size(); jkl++) {
const NvDsInferLayerInfo &layer = outputLayersInfo[jkl];
unsigned int numDims = layer.inferDims.numDims;
unsigned int numElements = layer.inferDims.numElements;
(void)numElements;
(void)numDims;
// std::cout << "Layer " << jkl << " (" << layer.layerName <<
// "):\n"; std::cout << " Num Dims: " << numDims << "\n";
// std::cout << " Num Elements: " << numElements << "\n";
// std::cout << " Dims: [";
// for (unsigned int mno = 0; mno < numDims; ++mno) {
// std::cout << layer.inferDims.d[mno];
// // layer.inferDims.d[0] = 100;
// // layer.inferDims.d[1] = 57;
// if (mno < numDims - 1)
// std::cout << ", ";
// }
// std::cout << "]\n";
}
const NvDsInferLayerInfo &layer = outputLayersInfo[0]; // or loop over all
uint detected_persons = 0;
float *data = static_cast<float *>(layer.buffer);
for (unsigned int jkl = 0; jkl < 100;
jkl++) { // maximum 100 persons for each frame
if (data[jkl * 57 + 4] > threshold_body_detection) {
detected_persons++;
}
}
update_frame_with_face_body_meta(detected_persons, batch_meta, meta, data,
frame_meta);
}
void NvInferServerManager::update_frame_with_face_body_meta(
uint detected_persons, NvDsBatchMeta *batch_meta, NvDsInferTensorMeta *meta,
float *data, NvDsFrameMeta *frame_meta) {
for (uint index = 0; index < detected_persons; index++) {
// imprecise_face_obj_meta is the imprecise face
NvDsObjectMeta *imprecise_face_obj_meta =
nvds_acquire_obj_meta_from_pool(batch_meta);
// meta->unique_id in NvDsInferTensorMeta
// This is the unique ID of the inference component (PGIE/SGIE)
// that produced the tensor output. It comes directly from the
// unique-id property in the [property] section of your
// config_infer_primary.txt or config_infer_secondary.txt. A
// pipeline can have multiple inference components (1 PGIE +
// many SGIEs). Each inference element might output tensors
// (NvDsInferTensorMeta) that are attached as user metadata.
// unique_id lets you know which inference element the tensor
// belongs to. meta->unique_id → The unique-id you assigned in
// the config for the inference component that produced these
// tensor outputs.
imprecise_face_obj_meta->unique_component_id = meta->unique_id;
// imprecise_face_obj_meta->unique_component_id
// Meaning: The ID of the component (PGIE, SGIE, Tracker,
// Custom, etc.) that generated this metadata. Source: Assigned
// by DeepStream when metadata is attached by a specific element
// in the pipeline. Example: PGIE might be assigned
// unique_component_id = 1 SGIE might be assigned
// unique_component_id = 2 Tracker usually doesnt overwrite
// PGIEs class_id but may extend metadata (like assigning
// object_id). You normally dont set this manually. DeepStream
// sets it when a particular component (PGIE/SGIE) attaches
// object metadata. You might override it only if youre
// injecting your own custom objects into the pipeline and need
// to differentiate your component from PGIE/SGIE.
imprecise_face_obj_meta->confidence = data[index * 57 + 4];
// imprecise_face_obj_meta->object_id = UNTRACKED_OBJECT_ID;
imprecise_face_obj_meta->class_id =
PGIE_CLASS_ID_PERSON; // 0 for body detection
NvOSD_RectParams &rect_params_imprecise_face =
imprecise_face_obj_meta->rect_params;
NvOSD_TextParams &text_params_imprecise_face =
imprecise_face_obj_meta->text_params;
/* Assign bounding box coordinates. */
rect_params_imprecise_face.left =
int(data[index * 57 + 0] * MUXER_OUTPUT_WIDTH / PGIE_NET_WIDTH);
rect_params_imprecise_face.top =
int(data[index * 57 + 1] * MUXER_OUTPUT_HEIGHT / PGIE_NET_HEIGHT);
float x_shoulder;
float y_shoulder;
if (data[index * 57 + 21] > data[index * 57 + 24]) {
x_shoulder = data[index * 57 + 21];
y_shoulder = data[index * 57 + 22];
} else {
x_shoulder = data[index * 57 + 24];
y_shoulder = data[index * 57 + 25];
}
rect_params_imprecise_face.width =
int((x_shoulder - data[index * 57 + 0]) * MUXER_OUTPUT_WIDTH /
PGIE_NET_WIDTH);
rect_params_imprecise_face.height =
int((y_shoulder - data[index * 57 + 1]) * MUXER_OUTPUT_HEIGHT /
PGIE_NET_HEIGHT);
// std::cout << "nvinferserver imprecise face for x = " <<
// rect_params_imprecise_face.left
// << " y = " << rect_params_imprecise_face.top
// << " w = " << rect_params_imprecise_face.width
// << " h = " << rect_params_imprecise_face.height
// << " score = " <<
// imprecise_face_obj_meta->confidence << std::endl;
/* Border of width 3. */
rect_params_imprecise_face.border_width = 3;
rect_params_imprecise_face.has_bg_color = 0;
rect_params_imprecise_face.border_color = NvOSD_ColorParams{0, 0, 1, 1};
/* display_text requires heap allocated memory. */
text_params_imprecise_face.display_text =
g_strdup(imprecise_face_str[0]); // g_strdup(pgie_class_str[0]);
/* Display text above the left top corner of the object. */
text_params_imprecise_face.x_offset = rect_params_imprecise_face.left;
text_params_imprecise_face.y_offset =
rect_params_imprecise_face.top - 10;
/* Set black background for the text. */
text_params_imprecise_face.set_bg_clr = 1;
text_params_imprecise_face.text_bg_clr = NvOSD_ColorParams{0, 0, 0, 1};
/* Font face, size and color. */
text_params_imprecise_face.font_params.font_name = (gchar *)"Serif";
text_params_imprecise_face.font_params.font_size = 11;
text_params_imprecise_face.font_params.font_color =
NvOSD_ColorParams{1, 1, 1, 1};
// adding landmarks to imprecise_face_obj_meta as user_meta
NvDsUserMeta *um1 = nvds_acquire_user_meta_from_pool(batch_meta);
um1->user_meta_data =
set_metadata_ptr(&(data[index * 57])); // Add landmarks here
um1->base_meta.meta_type =
NVDS_USER_OBJECT_META_LANDMARKS_AND_SOURCE_ID;
um1->base_meta.copy_func = (NvDsMetaCopyFunc)copy_user_meta;
um1->base_meta.release_func = (NvDsMetaReleaseFunc)release_user_meta;
nvds_add_user_meta_to_obj(imprecise_face_obj_meta, um1);
nvds_add_obj_meta_to_frame(frame_meta, imprecise_face_obj_meta, NULL);
NvDsObjectMeta *body_obj_meta =
nvds_acquire_obj_meta_from_pool(batch_meta);
body_obj_meta->unique_component_id = meta->unique_id;
body_obj_meta->confidence = data[index * 57 + 4];
// body_obj_meta->object_id = UNTRACKED_OBJECT_ID;
body_obj_meta->class_id = PGIE_CLASS_ID_PERSON; // 0 for body detection
NvOSD_RectParams &rect_params_body = body_obj_meta->rect_params;
NvOSD_TextParams &text_params_body = body_obj_meta->text_params;
/* Assign bounding box coordinates. */
rect_params_body.left =
int(data[index * 57 + 0] * MUXER_OUTPUT_WIDTH / PGIE_NET_WIDTH);
rect_params_body.top =
int(data[index * 57 + 1] * MUXER_OUTPUT_HEIGHT / PGIE_NET_HEIGHT);
rect_params_body.width =
int((data[index * 57 + 2] - data[index * 57 + 0]) *
MUXER_OUTPUT_WIDTH / PGIE_NET_WIDTH);
rect_params_body.height =
int((data[index * 57 + 3] - data[index * 57 + 1]) *
MUXER_OUTPUT_HEIGHT / PGIE_NET_HEIGHT);
/* Border of width 3. */
rect_params_body.border_width = 3;
rect_params_body.has_bg_color = 0;
rect_params_body.border_color = NvOSD_ColorParams{1, 0, 0, 1};
/* display_text requires heap allocated memory. */
text_params_body.display_text = g_strdup(pgie_class_str[0]);
/* Display text above the left top corner of the object. */
text_params_body.x_offset = rect_params_body.left;
text_params_body.y_offset = rect_params_body.top - 30;
/* Set black background for the text. */
text_params_body.set_bg_clr = 1;
text_params_body.text_bg_clr = NvOSD_ColorParams{0, 0, 0, 1};
/* Font face, size and color. */
text_params_body.font_params.font_name = (gchar *)"Serif";
text_params_body.font_params.font_size = 11;
text_params_body.font_params.font_color = NvOSD_ColorParams{1, 1, 1, 1};
// // adding landmarks to body_obj_meta as user_meta
// NvDsUserMeta *um1 =
// nvds_acquire_user_meta_from_pool(batch_meta);
// um1->user_meta_data = set_metadata_ptr(
// &(data[index * 57])); // Add landmarks here
// um1->base_meta.meta_type =
// NVDS_USER_OBJECT_META_LANDMARKS_AND_SOURCE_ID;
// um1->base_meta.copy_func = (NvDsMetaCopyFunc)copy_user_meta;
// um1->base_meta.release_func =
// (NvDsMetaReleaseFunc)release_user_meta;
// nvds_add_user_meta_to_obj(body_obj_meta, um1);
nvds_add_obj_meta_to_frame(frame_meta, body_obj_meta, NULL);
}
}
// add custom infromation to metadata by: set_metadata_ptr, copy_user_meta,
// release_user_meta
void *NvInferServerManager::set_metadata_ptr(float *arr) {

View File

@ -40,4 +40,9 @@ class NvInferServerManager {
static void *set_metadata_ptr(float *);
static gpointer copy_user_meta(gpointer, gpointer);
static void release_user_meta(gpointer, gpointer);
static void update_frame_with_face_body_meta(uint, NvDsBatchMeta *,
NvDsInferTensorMeta *, float *,
NvDsFrameMeta *);
static void extract_tensor_metadata(NvDsUserMeta *, NvDsInferNetworkInfo,
NvDsBatchMeta *, NvDsFrameMeta *);
};

View File

@ -230,13 +230,14 @@ bool PipelineManager::setup_pipeline() {
// (without a transform_jetson plugin before the sink plugin) custom_plugin
// is dsexample pluging
if (sink_manager->display_output < 3) {
gst_bin_add_many(
GST_BIN(pipeline), nv_infer_server_manager->primary_detector,
nv_tracker_manager->tracker,
face_nv_infer_server_manager->face_detector,
gstds_example_manager->custom_plugin, tiler_manager->tiler,
queue_array[2].queue, nv_video_convert_manager->nvvidconv,
nv_osd_manager->nvosd, sink_manager->sink, NULL);
gst_bin_add_many(GST_BIN(pipeline),
nv_infer_server_manager->primary_detector,
nv_tracker_manager->tracker,
face_nv_infer_server_manager->face_detector,
// gstds_example_manager->custom_plugin,
tiler_manager->tiler, queue_array[2].queue,
nv_video_convert_manager->nvvidconv,
nv_osd_manager->nvosd, sink_manager->sink, NULL);
/* we link the elements together
* nvstreammux -> nvinfer -> nvtiler -> nvvidconv -> nvosd ->
@ -246,37 +247,39 @@ bool PipelineManager::setup_pipeline() {
nv_infer_server_manager->primary_detector,
nv_tracker_manager->tracker,
face_nv_infer_server_manager->face_detector,
gstds_example_manager->custom_plugin,
// gstds_example_manager->custom_plugin,
tiler_manager->tiler, nv_osd_manager->nvosd,
sink_manager->sink, NULL)) {
g_printerr("Elements could not be linked.\n");
return false;
}
} else {
gst_bin_add_many(
GST_BIN(pipeline), nv_infer_server_manager->primary_detector,
nv_tracker_manager->tracker,
face_nv_infer_server_manager->face_detector,
gstds_example_manager->custom_plugin, tiler_manager->tiler,
queue_array[2].queue, nv_video_convert_manager->nvvidconv,
nv_osd_manager->nvosd, sink_manager->nvvidconv_postosd,
sink_manager->caps, sink_manager->encoder, sink_manager->rtppay,
sink_manager->sink, NULL);
gst_bin_add_many(GST_BIN(pipeline),
nv_infer_server_manager->primary_detector,
nv_tracker_manager->tracker,
face_nv_infer_server_manager->face_detector,
// gstds_example_manager->custom_plugin,
tiler_manager->tiler, queue_array[2].queue,
nv_video_convert_manager->nvvidconv,
nv_osd_manager->nvosd, sink_manager->nvvidconv_postosd,
sink_manager->caps, sink_manager->encoder,
sink_manager->rtppay, sink_manager->sink, NULL);
// Link the elements together:
// file-source -> h264-parser -> nvh264-decoder ->
// nvinfer -> nvvidconv -> nvosd -> nvvidconv_postosd ->
// caps -> encoder -> rtppay -> udpsink
if (!gst_element_link_many(
streammux_manager->streammux,
nv_video_convert_manager->nvvidconv,
nv_infer_server_manager->primary_detector,
nv_tracker_manager->tracker,
face_nv_infer_server_manager->face_detector,
gstds_example_manager->custom_plugin, tiler_manager->tiler,
nv_osd_manager->nvosd, sink_manager->nvvidconv_postosd,
sink_manager->caps, sink_manager->encoder, sink_manager->rtppay,
sink_manager->sink, NULL)) {
if (!gst_element_link_many(streammux_manager->streammux,
nv_video_convert_manager->nvvidconv,
nv_infer_server_manager->primary_detector,
nv_tracker_manager->tracker,
face_nv_infer_server_manager->face_detector,
// gstds_example_manager->custom_plugin,
tiler_manager->tiler, nv_osd_manager->nvosd,
sink_manager->nvvidconv_postosd,
sink_manager->caps, sink_manager->encoder,
sink_manager->rtppay, sink_manager->sink,
NULL)) {
g_printerr("Elements could not be linked.\n");
return false;
}