Add nvtracker to pipeline

This commit is contained in:
Barzan Hayati 2025-07-26 07:08:46 +00:00
parent bf0b3d4a79
commit 3c8c1002a1
14 changed files with 518 additions and 14 deletions

View File

@ -68,6 +68,7 @@ include_directories(${PROJECT_SOURCE_DIR}/rtsp_streaming_manager.hpp)
include_directories(${PROJECT_SOURCE_DIR}/metrics_manager.hpp)
include_directories(${PROJECT_SOURCE_DIR}/config_manager.hpp)
include_directories(${PROJECT_SOURCE_DIR}/nv_infer_server_manager.hpp)
include_directories(${PROJECT_SOURCE_DIR}/nv_tracker_manager.hpp)
@ -76,7 +77,7 @@ set(SOURCES src/main.cpp src/camera_manager.cpp src/pipeline_manager.cpp src/st
src/nv_video_convert_manager.cpp src/nv_osd_manager.cpp src/queue_manager.cpp
src/nv_ds_logger_manager.cpp src/sink_manager.cpp src/message_handling.cpp
src/rtsp_streaming_manager.cpp src/metrics_manager.cpp src/config_manager.cpp
src/nv_infer_server_manager.cpp)
src/nv_infer_server_manager.cpp src/nv_tracker_manager.cpp)
# missing initializer for member 'NvDsInferDims::d' [-Werror=missing-field-initializers] NvDsInferDims dims = {0};

View File

@ -19,5 +19,7 @@
"pgie_batch_size": 16,
"inferserver_pgie_config_file": "../data/inferserver/primary_detector_config.txt",
"PGIE_NET_WIDTH": 640,
"PGIE_NET_HEIGHT": 640
"PGIE_NET_HEIGHT": 640,
"ll-config-file": "../data/tracker_configs/config_tracker_NvDCF_perf.yml",
"ll-lib-file": "../data/tracker_configs/libnvds_nvmultiobjecttracker.so"
}

View File

@ -0,0 +1,42 @@
%YAML:1.0
####################################################################################################
# SPDX-FileCopyrightText: Copyright (c) 2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
#
# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
# property and proprietary rights in and to this material, related
# documentation and any modifications thereto. Any use, reproduction,
# disclosure or distribution of this material and related documentation
# without an express license agreement from NVIDIA CORPORATION or
# its affiliates is strictly prohibited.
####################################################################################################
BaseConfig:
minDetectorConfidence: 0 # If the confidence of a detector bbox is lower than this, then it won't be considered for tracking
TargetManagement:
preserveStreamUpdateOrder: 0 # When assigning new target ids, preserve input streams' order to keep target ids in a deterministic order over multuple runs
maxTargetsPerStream: 150 # Max number of targets to track per stream. Recommended to set >10. Note: this value should account for the targets being tracked in shadow mode as well. Max value depends on the memory capacity
# [Creation & Termination Policy]
minIouDiff4NewTarget: 0.5 # If the IOU between the newly detected object and any of the existing targets is higher than this threshold, this newly detected object will be discarded.
probationAge: 4 # If the target's age exceeds this, the target will be considered to be valid.
maxShadowTrackingAge: 38 # Max length of shadow tracking. If the shadowTrackingAge exceeds this limit, the tracker will be terminated.
earlyTerminationAge: 1 # If the shadowTrackingAge reaches this threshold while in TENTATIVE period, the target will be terminated prematurely.
TrajectoryManagement:
useUniqueID: 0 # Use 64-bit long Unique ID when assignining tracker ID.
DataAssociator:
dataAssociatorType: 0 # the type of data associator among { DEFAULT= 0 }
associationMatcherType: 0 # the type of matching algorithm among { GREEDY=0, CASCADED=1 }
checkClassMatch: 1 # If checked, only the same-class objects are associated with each other. Default: true
# [Association Metric: Thresholds for valid candidates]
minMatchingScore4Overall: 0.0 # Min total score
minMatchingScore4SizeSimilarity: 0.0 # Min bbox size similarity score
minMatchingScore4Iou: 0.0 # Min IOU score
# [Association Metric: Weights]
matchingScoreWeight4SizeSimilarity: 0.4 # Weight for the Size-similarity score
matchingScoreWeight4Iou: 0.6 # Weight for the IOU score

View File

@ -0,0 +1,126 @@
%YAML:1.0
####################################################################################################
# SPDX-FileCopyrightText: Copyright (c) 2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
#
# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
# property and proprietary rights in and to this material, related
# documentation and any modifications thereto. Any use, reproduction,
# disclosure or distribution of this material and related documentation
# without an express license agreement from NVIDIA CORPORATION or
# its affiliates is strictly prohibited.
####################################################################################################
BaseConfig:
minDetectorConfidence: 0.1894 # If the confidence of a detector bbox is lower than this, then it won't be considered for tracking
TargetManagement:
enableBboxUnClipping: 1 # In case the bbox is likely to be clipped by image border, unclip bbox
preserveStreamUpdateOrder: 0 # When assigning new target ids, preserve input streams' order to keep target ids in a deterministic order over multuple runs
maxTargetsPerStream: 150 # Max number of targets to track per stream. Recommended to set >10. Note: this value should account for the targets being tracked in shadow mode as well. Max value depends on the GPU memory capacity
# [Creation & Termination Policy]
minIouDiff4NewTarget: 0.3686 # If the IOU between the newly detected object and any of the existing targets is higher than this threshold, this newly detected object will be discarded.
minTrackerConfidence: 0.1513 # If the confidence of an object tracker is lower than this on the fly, then it will be tracked in shadow mode. Valid Range: [0.0, 1.0]
probationAge: 2 # If the target's age exceeds this, the target will be considered to be valid.
maxShadowTrackingAge: 42 # Max length of shadow tracking. If the shadowTrackingAge exceeds this limit, the tracker will be terminated.
earlyTerminationAge: 1 # If the shadowTrackingAge reaches this threshold while in TENTATIVE period, the target will be terminated prematurely.
TrajectoryManagement:
useUniqueID: 0 # Use 64-bit long Unique ID when assignining tracker ID. Default is [true]
enableReAssoc: 1 # Enable Re-Assoc
# [Re-Assoc Metric: Thresholds for valid candidates]
minMatchingScore4Overall: 0.6622 # min matching score for overall
minTrackletMatchingScore: 0.2940 # min tracklet similarity score for re-assoc
minMatchingScore4ReidSimilarity: 0.0771 # min reid similarity score for re-assoc
# [Re-Assoc Metric: Weights]
matchingScoreWeight4TrackletSimilarity: 0.7981 # weight for tracklet similarity score
matchingScoreWeight4ReidSimilarity: 0.3848 # weight for reid similarity score
# [Re-Assoc: Motion-based]
minTrajectoryLength4Projection: 34 # min trajectory length required to make projected trajectory
prepLength4TrajectoryProjection: 58 # the length of the trajectory during which the state estimator is updated to make projections
trajectoryProjectionLength: 33 # the length of the projected trajectory
maxAngle4TrackletMatching: 67 # max angle difference for tracklet matching [degree]
minSpeedSimilarity4TrackletMatching: 0.0574 # min speed similarity for tracklet matching
minBboxSizeSimilarity4TrackletMatching: 0.1013 # min bbox size similarity for tracklet matching
maxTrackletMatchingTimeSearchRange: 27 # the search space in time for max tracklet similarity
trajectoryProjectionProcessNoiseScale: 0.0100 # trajectory projector's process noise scale w.r.t. state estimator
trajectoryProjectionMeasurementNoiseScale: 100 # trajectory projector's measurement noise scale w.r.t. state estimator
trackletSpacialSearchRegionScale: 0.0100 # the search region scale for peer tracklet
# [Re-Assoc: Reid based. Reid model params are set in ReID section]
reidExtractionInterval: 8 # frame interval to extract reid features per target
DataAssociator:
dataAssociatorType: 0 # the type of data associator among { DEFAULT= 0 }
associationMatcherType: 1 # the type of matching algorithm among { GREEDY=0, CASCADED=1 }
checkClassMatch: 1 # If checked, only the same-class objects are associated with each other. Default: true
# [Association Metric: Thresholds for valid candidates]
minMatchingScore4Overall: 0.0222 # Min total score
minMatchingScore4SizeSimilarity: 0.3552 # Min bbox size similarity score
minMatchingScore4Iou: 0.0548 # Min IOU score
minMatchingScore4VisualSimilarity: 0.5043 # Min visual similarity score
# [Association Metric: Weights]
matchingScoreWeight4VisualSimilarity: 0.3951 # Weight for the visual similarity (in terms of correlation response ratio)
matchingScoreWeight4SizeSimilarity: 0.6003 # Weight for the Size-similarity score
matchingScoreWeight4Iou: 0.4033 # Weight for the IOU score
# [Association Metric: Tentative detections] only uses iou similarity for tentative detections
tentativeDetectorConfidence: 0.1024 # If a detection's confidence is lower than this but higher than minDetectorConfidence, then it's considered as a tentative detection
minMatchingScore4TentativeIou: 0.2852 # Min iou threshold to match targets and tentative detection
StateEstimator:
stateEstimatorType: 1 # the type of state estimator among { DUMMY=0, SIMPLE=1, REGULAR=2 }
# [Dynamics Modeling]
processNoiseVar4Loc: 6810.8668 # Process noise variance for bbox center
processNoiseVar4Size: 1541.8647 # Process noise variance for bbox size
processNoiseVar4Vel: 1348.4874 # Process noise variance for velocity
measurementNoiseVar4Detector: 100.0000 # Measurement noise variance for detector's detection
measurementNoiseVar4Tracker: 293.3238 # Measurement noise variance for tracker's localization
VisualTracker:
visualTrackerType: 1 # the type of visual tracker among { DUMMY=0, NvDCF=1 }
# [NvDCF: Feature Extraction]
useColorNames: 1 # Use ColorNames feature
useHog: 1 # Use Histogram-of-Oriented-Gradient (HOG) feature
featureImgSizeLevel: 3 # Size of a feature image. Valid range: {1, 2, 3, 4, 5}, from the smallest to the largest
featureFocusOffsetFactor_y: -0.1054 # The offset for the center of hanning window relative to the feature height. The center of hanning window would move by (featureFocusOffsetFactor_y*featureMatSize.height) in vertical direction
# [NvDCF: Correlation Filter]
filterLr: 0.0767 # learning rate for DCF filter in exponential moving average. Valid Range: [0.0, 1.0]
filterChannelWeightsLr: 0.0339 # learning rate for the channel weights among feature channels. Valid Range: [0.0, 1.0]
gaussianSigma: 0.5687 # Standard deviation for Gaussian for desired response when creating DCF filter [pixels]
ReID:
reidType: 2 # The type of reid among { DUMMY=0, NvDEEPSORT=1, Reid based reassoc=2, both NvDEEPSORT and reid based reassoc=3}
# [Reid Network Info]
batchSize: 100 # Batch size of reid network
workspaceSize: 1000 # Workspace size to be used by reid engine, in MB
reidFeatureSize: 256 # Size of reid feature
reidHistorySize: 100 # Max number of reid features kept for one object
inferDims: [3, 256, 128] # Reid network input dimension CHW or HWC based on inputOrder
networkMode: 1 # Reid network inference precision mode among {fp32=0, fp16=1, int8=2 }
# [Input Preprocessing]
inputOrder: 0 # Reid network input order among { NCHW=0, NHWC=1 }. Batch will be converted to the specified order before reid input.
colorFormat: 0 # Reid network input color format among {RGB=0, BGR=1 }. Batch will be converted to the specified color before reid input.
offsets: [123.6750, 116.2800, 103.5300] # Array of values to be subtracted from each input channel, with length equal to number of channels
netScaleFactor: 0.01735207 # Scaling factor for reid network input after substracting offsets
keepAspc: 1 # Whether to keep aspc ratio when resizing input objects for reid
# [Output Postprocessing]
addFeatureNormalization: 1 # If reid feature is not normalized in network, adding normalization on output so each reid feature has l2 norm equal to 1
minVisibility4GalleryUpdate: 0.6 # Add ReID embedding to the gallery only if the visibility is not lower than this
# [Paths and Names]
tltEncodedModel: "/opt/nvidia/deepstream/deepstream/samples/models/Tracker/resnet50_market1501.etlt" # NVIDIA TAO model path
tltModelKey: "nvidia_tao" # NVIDIA TAO model key
modelEngineFile: "/opt/nvidia/deepstream/deepstream/samples/models/Tracker/resnet50_market1501.etlt_b100_gpu0_fp16.engine" # Engine file path

View File

@ -0,0 +1,70 @@
%YAML:1.0
####################################################################################################
# SPDX-FileCopyrightText: Copyright (c) 2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
#
# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
# property and proprietary rights in and to this material, related
# documentation and any modifications thereto. Any use, reproduction,
# disclosure or distribution of this material and related documentation
# without an express license agreement from NVIDIA CORPORATION or
# its affiliates is strictly prohibited.
####################################################################################################
BaseConfig:
minDetectorConfidence: 0 # If the confidence of a detector bbox is lower than this, then it won't be considered for tracking
TargetManagement:
enableBboxUnClipping: 0 # In case the bbox is likely to be clipped by image border, unclip bbox
preserveStreamUpdateOrder: 0 # When assigning new target ids, preserve input streams' order to keep target ids in a deterministic order over multuple runs
maxTargetsPerStream: 100 # Max number of targets to track per stream. Recommended to set >10. Note: this value should account for the targets being tracked in shadow mode as well. Max value depends on the GPU memory capacity
# [Creation & Termination Policy]
minIouDiff4NewTarget: 0.5 # If the IOU between the newly detected object and any of the existing targets is higher than this threshold, this newly detected object will be discarded.
minTrackerConfidence: 0.2 # If the confidence of an object tracker is lower than this on the fly, then it will be tracked in shadow mode. Valid Range: [0.0, 1.0]
probationAge: 3 # If the target's age exceeds this, the target will be considered to be valid.
maxShadowTrackingAge: 10 # Max length of shadow tracking. If the shadowTrackingAge exceeds this limit, the tracker will be terminated.
earlyTerminationAge: 1 # If the shadowTrackingAge reaches this threshold while in TENTATIVE period, the target will be terminated prematurely.
TrajectoryManagement:
useUniqueID: 0 # Use 64-bit long Unique ID when assignining tracker ID.
DataAssociator:
dataAssociatorType: 0 # the type of data associator among { DEFAULT= 0 }
associationMatcherType: 0 # the type of matching algorithm among { GREEDY=0, CASCADED=1 }
checkClassMatch: 1 # If checked, only the same-class objects are associated with each other. Default: true
# [Association Metric: Thresholds for valid candidates]
minMatchingScore4Overall: 0.0 # Min total score
minMatchingScore4SizeSimilarity: 0.6 # Min bbox size similarity score
minMatchingScore4Iou: 0.0 # Min IOU score
minMatchingScore4VisualSimilarity: 0.7 # Min visual similarity score
# [Association Metric: Weights]
matchingScoreWeight4VisualSimilarity: 0.6 # Weight for the visual similarity (in terms of correlation response ratio)
matchingScoreWeight4SizeSimilarity: 0.0 # Weight for the Size-similarity score
matchingScoreWeight4Iou: 0.4 # Weight for the IOU score
StateEstimator:
stateEstimatorType: 1 # the type of state estimator among { DUMMY=0, SIMPLE=1, REGULAR=2 }
# [Dynamics Modeling]
processNoiseVar4Loc: 2.0 # Process noise variance for bbox center
processNoiseVar4Size: 1.0 # Process noise variance for bbox size
processNoiseVar4Vel: 0.1 # Process noise variance for velocity
measurementNoiseVar4Detector: 4.0 # Measurement noise variance for detector's detection
measurementNoiseVar4Tracker: 16.0 # Measurement noise variance for tracker's localization
VisualTracker:
visualTrackerType: 1 # the type of visual tracker among { DUMMY=0, NvDCF=1 }
# [NvDCF: Feature Extraction]
useColorNames: 1 # Use ColorNames feature
useHog: 0 # Use Histogram-of-Oriented-Gradient (HOG) feature
featureImgSizeLevel: 1 # Size of a feature image. Valid range: {1, 2, 3, 4, 5}, from the smallest to the largest
featureFocusOffsetFactor_y: -0.2 # The offset for the center of hanning window relative to the feature height. The center of hanning window would move by (featureFocusOffsetFactor_y*featureMatSize.height) in vertical direction
# [NvDCF: Correlation Filter]
filterLr: 0.075 # learning rate for DCF filter in exponential moving average. Valid Range: [0.0, 1.0]
filterChannelWeightsLr: 0.1 # learning rate for the channel weights among feature channels. Valid Range: [0.0, 1.0]
gaussianSigma: 0.75 # Standard deviation for Gaussian for desired response when creating DCF filter [pixels]

View File

@ -0,0 +1,74 @@
%YAML:1.0
####################################################################################################
# SPDX-FileCopyrightText: Copyright (c) 2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
#
# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
# property and proprietary rights in and to this material, related
# documentation and any modifications thereto. Any use, reproduction,
# disclosure or distribution of this material and related documentation
# without an express license agreement from NVIDIA CORPORATION or
# its affiliates is strictly prohibited.
####################################################################################################
BaseConfig:
minDetectorConfidence: 0.0430 # If the confidence of a detector bbox is lower than this, then it won't be considered for tracking
TargetManagement:
enableBboxUnClipping: 1 # In case the bbox is likely to be clipped by image border, unclip bbox
preserveStreamUpdateOrder: 0 # When assigning new target ids, preserve input streams' order to keep target ids in a deterministic order over multuple runs
maxTargetsPerStream: 150 # Max number of targets to track per stream. Recommended to set >10. Note: this value should account for the targets being tracked in shadow mode as well. Max value depends on the GPU memory capacity
# [Creation & Termination Policy]
minIouDiff4NewTarget: 0.7418 # If the IOU between the newly detected object and any of the existing targets is higher than this threshold, this newly detected object will be discarded.
minTrackerConfidence: 0.4009 # If the confidence of an object tracker is lower than this on the fly, then it will be tracked in shadow mode. Valid Range: [0.0, 1.0]
probationAge: 2 # If the target's age exceeds this, the target will be considered to be valid.
maxShadowTrackingAge: 51 # Max length of shadow tracking. If the shadowTrackingAge exceeds this limit, the tracker will be terminated.
earlyTerminationAge: 1 # If the shadowTrackingAge reaches this threshold while in TENTATIVE period, the target will be terminated prematurely.
TrajectoryManagement:
useUniqueID: 0 # Use 64-bit long Unique ID when assignining tracker ID.
DataAssociator:
dataAssociatorType: 0 # the type of data associator among { DEFAULT= 0 }
associationMatcherType: 1 # the type of matching algorithm among { GREEDY=0, CASCADED=1 }
checkClassMatch: 1 # If checked, only the same-class objects are associated with each other. Default: true
# [Association Metric: Thresholds for valid candidates]
minMatchingScore4Overall: 0.4290 # Min total score
minMatchingScore4SizeSimilarity: 0.3627 # Min bbox size similarity score
minMatchingScore4Iou: 0.2575 # Min IOU score
minMatchingScore4VisualSimilarity: 0.5356 # Min visual similarity score
# [Association Metric: Weights]
matchingScoreWeight4VisualSimilarity: 0.3370 # Weight for the visual similarity (in terms of correlation response ratio)
matchingScoreWeight4SizeSimilarity: 0.4354 # Weight for the Size-similarity score
matchingScoreWeight4Iou: 0.3656 # Weight for the IOU score
# [Association Metric: Tentative detections] only uses iou similarity for tentative detections
tentativeDetectorConfidence: 0.2008 # If a detection's confidence is lower than this but higher than minDetectorConfidence, then it's considered as a tentative detection
minMatchingScore4TentativeIou: 0.5296 # Min iou threshold to match targets and tentative detection
StateEstimator:
stateEstimatorType: 1 # the type of state estimator among { DUMMY=0, SIMPLE=1, REGULAR=2 }
# [Dynamics Modeling]
processNoiseVar4Loc: 1.5110 # Process noise variance for bbox center
processNoiseVar4Size: 1.3159 # Process noise variance for bbox size
processNoiseVar4Vel: 0.0300 # Process noise variance for velocity
measurementNoiseVar4Detector: 3.0283 # Measurement noise variance for detector's detection
measurementNoiseVar4Tracker: 8.1505 # Measurement noise variance for tracker's localization
VisualTracker:
visualTrackerType: 1 # the type of visual tracker among { DUMMY=0, NvDCF=1 }
# [NvDCF: Feature Extraction]
useColorNames: 1 # Use ColorNames feature
useHog: 0 # Use Histogram-of-Oriented-Gradient (HOG) feature
featureImgSizeLevel: 2 # Size of a feature image. Valid range: {1, 2, 3, 4, 5}, from the smallest to the largest
featureFocusOffsetFactor_y: -0.2000 # The offset for the center of hanning window relative to the feature height. The center of hanning window would move by (featureFocusOffsetFactor_y*featureMatSize.height) in vertical direction
# [NvDCF: Correlation Filter]
filterLr: 0.0750 # learning rate for DCF filter in exponential moving average. Valid Range: [0.0, 1.0]
filterChannelWeightsLr: 0.1000 # learning rate for the channel weights among feature channels. Valid Range: [0.0, 1.0]
gaussianSigma: 0.7500 # Standard deviation for Gaussian for desired response when creating DCF filter [pixels]

View File

@ -0,0 +1,86 @@
%YAML:1.0
####################################################################################################
# SPDX-FileCopyrightText: Copyright (c) 2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
#
# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
# property and proprietary rights in and to this material, related
# documentation and any modifications thereto. Any use, reproduction,
# disclosure or distribution of this material and related documentation
# without an express license agreement from NVIDIA CORPORATION or
# its affiliates is strictly prohibited.
####################################################################################################
BaseConfig:
minDetectorConfidence: 0.0762 # If the confidence of a detector bbox is lower than this, then it won't be considered for tracking
TargetManagement:
preserveStreamUpdateOrder: 0 # When assigning new target ids, preserve input streams' order to keep target ids in a deterministic order over multuple runs
maxTargetsPerStream: 150 # Max number of targets to track per stream. Recommended to set >10. Note: this value should account for the targets being tracked in shadow mode as well. Max value depends on the GPU memory capacity
# [Creation & Termination Policy]
minIouDiff4NewTarget: 0.9847 # If the IOU between the newly detected object and any of the existing targets is higher than this threshold, this newly detected object will be discarded.
minTrackerConfidence: 0.4314 # If the confidence of an object tracker is lower than this on the fly, then it will be tracked in shadow mode. Valid Range: [0.0, 1.0]
probationAge: 2 # If the target's age exceeds this, the target will be considered to be valid.
maxShadowTrackingAge: 68 # Max length of shadow tracking. If the shadowTrackingAge exceeds this limit, the tracker will be terminated.
earlyTerminationAge: 1 # If the shadowTrackingAge reaches this threshold while in TENTATIVE period, the the target will be terminated prematurely.
TrajectoryManagement:
useUniqueID: 0 # Use 64-bit long Unique ID when assignining tracker ID.
DataAssociator:
dataAssociatorType: 0 # the type of data associator among { DEFAULT= 0 }
associationMatcherType: 1 # the type of matching algorithm among { GREEDY=0, CASCADED=1 }
checkClassMatch: 1 # If checked, only the same-class objects are associated with each other. Default: true
# [Association Metric: Mahalanobis distance threshold (refer to DeepSORT paper) ]
thresholdMahalanobis: 12.1875 # Threshold of Mahalanobis distance. A detection and a target are not matched if their distance is larger than the threshold.
# [Association Metric: Thresholds for valid candidates]
minMatchingScore4Overall: 0.1794 # Min total score
minMatchingScore4SizeSimilarity: 0.3291 # Min bbox size similarity score
minMatchingScore4Iou: 0.2364 # Min IOU score
minMatchingScore4ReidSimilarity: 0.7505 # Min reid similarity score
# [Association Metric: Weights for valid candidates]
matchingScoreWeight4SizeSimilarity: 0.7178 # Weight for the Size-similarity score
matchingScoreWeight4Iou: 0.4551 # Weight for the IOU score
matchingScoreWeight4ReidSimilarity: 0.3197 # Weight for the reid similarity
# [Association Metric: Tentative detections] only uses iou similarity for tentative detections
tentativeDetectorConfidence: 0.2479 # If a detection's confidence is lower than this but higher than minDetectorConfidence, then it's considered as a tentative detection
minMatchingScore4TentativeIou: 0.2376 # Min iou threshold to match targets and tentative detection
StateEstimator:
stateEstimatorType: 2 # the type of state estimator among { DUMMY=0, SIMPLE=1, REGULAR=2 }
# [Dynamics Modeling]
noiseWeightVar4Loc: 0.0503 # weight of process and measurement noise for bbox center; if set, location noise will be proportional to box height
noiseWeightVar4Vel: 0.0037 # weight of process and measurement noise for velocity; if set, velocity noise will be proportional to box height
useAspectRatio: 1 # use aspect ratio in Kalman filter's observation
ReID:
reidType: 1 # The type of reid among { DUMMY=0, DEEP=1 }
# [Reid Network Info]
batchSize: 100 # Batch size of reid network
workspaceSize: 1000 # Workspace size to be used by reid engine, in MB
reidFeatureSize: 256 # Size of reid feature
reidHistorySize: 100 # Max number of reid features kept for one object
inferDims: [3, 256, 128] # Reid network input dimension CHW or HWC based on inputOrder
networkMode: 1 # Reid network inference precision mode among {fp32=0, fp16=1, int8=2 }
# [Input Preprocessing]
inputOrder: 0 # Reid network input order among { NCHW=0, NHWC=1 }. Batch will be converted to the specified order before reid input.
colorFormat: 0 # Reid network input color format among {RGB=0, BGR=1 }. Batch will be converted to the specified color before reid input.
offsets: [123.6750, 116.2800, 103.5300] # Array of values to be subtracted from each input channel, with length equal to number of channels
netScaleFactor: 0.01735207 # Scaling factor for reid network input after substracting offsets
keepAspc: 1 # Whether to keep aspc ratio when resizing input objects for reid
# [Output Postprocessing]
addFeatureNormalization: 1 # If reid feature is not normalized in network, adding normalization on output so each reid feature has l2 norm equal to 1
# [Paths and Names]
tltEncodedModel: "/opt/nvidia/deepstream/deepstream/samples/models/Tracker/resnet50_market1501.etlt" # NVIDIA TAO model path
tltModelKey: "nvidia_tao" # NVIDIA TAO model key
modelEngineFile: "/opt/nvidia/deepstream/deepstream/samples/models/Tracker/resnet50_market1501.etlt_b100_gpu0_fp16.engine" # Engine file path

View File

@ -0,0 +1,55 @@
%YAML:1.0
####################################################################################################
# SPDX-FileCopyrightText: Copyright (c) 2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# SPDX-License-Identifier: LicenseRef-NvidiaProprietary
#
# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
# property and proprietary rights in and to this material, related
# documentation and any modifications thereto. Any use, reproduction,
# disclosure or distribution of this material and related documentation
# without an express license agreement from NVIDIA CORPORATION or
# its affiliates is strictly prohibited.
####################################################################################################
BaseConfig:
minDetectorConfidence: 0.1345 # If the confidence of a detector bbox is lower than this, then it won't be considered for tracking
TargetManagement:
enableBboxUnClipping: 0 # In case the bbox is likely to be clipped by image border, unclip bbox
maxTargetsPerStream: 300 # Max number of targets to track per stream. Recommended to set >10. Note: this value should account for the targets being tracked in shadow mode as well. Max value depends on the GPU memory capacity
# [Creation & Termination Policy]
minIouDiff4NewTarget: 0.5780 # If the IOU between the newly detected object and any of the existing targets is higher than this threshold, this newly detected object will be discarded.
minTrackerConfidence: 0.8216 # If the confidence of an object tracker is lower than this on the fly, then it will be tracked in shadow mode. Valid Range: [0.0, 1.0]
probationAge: 5 # 5 # If the target's age exceeds this, the target will be considered to be valid.
maxShadowTrackingAge: 26 # Max length of shadow tracking. If the shadowTrackingAge exceeds this limit, the tracker will be terminated.
earlyTerminationAge: 1 # If the shadowTrackingAge reaches this threshold while in TENTATIVE period, the the target will be terminated prematurely.
TrajectoryManagement:
useUniqueID: 0 # Use 64-bit long Unique ID when assignining tracker ID. Default is [true]
DataAssociator:
dataAssociatorType: 0 # the type of data associator among { DEFAULT= 0 }
associationMatcherType: 1 # the type of matching algorithm among { GREEDY=0, CASCADED=1 }
checkClassMatch: 1 # If checked, only the same-class objects are associated with each other. Default: true
# [Association Metric: Thresholds for valid candidates]
minMatchingScore4Overall: 0.2543 # Min total score
minMatchingScore4SizeSimilarity: 0.4019 # Min bbox size similarity score
minMatchingScore4Iou: 0.2159 # Min IOU score
matchingScoreWeight4SizeSimilarity: 0.1365 # Weight for the Size-similarity score
matchingScoreWeight4Iou: 0.3836 # Weight for the IOU score
# [Association Metric: Tentative detections] only uses iou similarity for tentative detections
tentativeDetectorConfidence: 0.2331 # If a detection's confidence is lower than this but higher than minDetectorConfidence, then it's considered as a tentative detection
minMatchingScore4TentativeIou: 0.2867 # Min iou threshold to match targets and tentative detection
usePrediction4Assoc: 1 # use the predicted state info for association instead of the past known states
StateEstimator:
stateEstimatorType: 2 # the type of state estimator among { DUMMY=0, SIMPLE=1, REGULAR=2 }
# [Dynamics Modeling]
noiseWeightVar4Loc: 0.0301 # weight of process and measurement noise for bbox center; if set, location noise will be proportional to box height
noiseWeightVar4Vel: 0.0017 # weight of process and measurement noise for velocity; if set, velocity noise will be proportional to box height
useAspectRatio: 1 # use aspect ratio in Kalman filter's observation

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,27 @@
#include "nv_tracker_manager.hpp"
#define SET_GPU_ID(object, gpu_id) \
g_object_set(G_OBJECT(object), "gpu-id", gpu_id, NULL);
#define GPU_ID 0
NvTrackerManager::NvTrackerManager() {
const auto &config = ConfigManager::get_instance().get_config();
ll_config_file = config["ll-config-file"].get<std::string>();
ll_lib_file = config["ll-lib-file"].get<std::string>();
}
bool NvTrackerManager::create_nv_tracker() {
tracker = gst_element_factory_make("nvtracker", "tracker_plugin");
g_object_set(G_OBJECT(tracker), "ll-config-file", ll_config_file.c_str(),
NULL);
g_object_set(G_OBJECT(tracker), "ll-lib-file", ll_lib_file.c_str(), NULL);
g_object_set(G_OBJECT(tracker), "display-tracking-id", 1, NULL);
g_object_set(G_OBJECT(tracker), "gpu_id", GPU_ID, NULL);
// g_object_set (G_OBJECT (tracker), "enable_batch_process", 1, NULL);
if (!tracker) {
g_printerr("\033[1;31m Unable to create Tracker. Exiting. \033[0m\n");
return false;
}
return true;
}

View File

@ -0,0 +1,17 @@
#include <gst/gst.h>
#include <fstream>
#include <iostream>
#include "config_manager.hpp"
class NvTrackerManager {
private:
public:
GstElement *tracker = NULL;
std::string ll_config_file;
std::string ll_lib_file;
NvTrackerManager();
~NvTrackerManager();
bool create_nv_tracker();
};

View File

@ -231,10 +231,10 @@ bool PipelineManager::setup_pipeline() {
if (sink_manager->display_output < 3) {
gst_bin_add_many(
GST_BIN(pipeline), nv_infer_server_manager->primary_detector,
// pgie, tracker,
gstds_example_manager->custom_plugin, tiler_manager->tiler,
queue_array[2].queue, nv_video_convert_manager->nvvidconv,
nv_osd_manager->nvosd, sink_manager->sink, NULL);
nv_tracker_manager->tracker, gstds_example_manager->custom_plugin,
tiler_manager->tiler, queue_array[2].queue,
nv_video_convert_manager->nvvidconv, nv_osd_manager->nvosd,
sink_manager->sink, NULL);
/* we link the elements together
* nvstreammux -> nvinfer -> nvtiler -> nvvidconv -> nvosd ->
@ -242,7 +242,7 @@ bool PipelineManager::setup_pipeline() {
if (!gst_element_link_many(streammux_manager->streammux,
nv_video_convert_manager->nvvidconv,
nv_infer_server_manager->primary_detector,
// pgie, tracker,
nv_tracker_manager->tracker,
gstds_example_manager->custom_plugin,
tiler_manager->tiler, nv_osd_manager->nvosd,
sink_manager->sink, NULL)) {
@ -253,12 +253,12 @@ bool PipelineManager::setup_pipeline() {
} else {
gst_bin_add_many(
GST_BIN(pipeline), nv_infer_server_manager->primary_detector,
// pgie, tracker,
gstds_example_manager->custom_plugin, tiler_manager->tiler,
queue_array[2].queue, nv_video_convert_manager->nvvidconv,
nv_osd_manager->nvosd, sink_manager->nvvidconv_postosd,
sink_manager->caps, sink_manager->encoder, sink_manager->rtppay,
sink_manager->sink, NULL);
nv_tracker_manager->tracker, gstds_example_manager->custom_plugin,
tiler_manager->tiler, queue_array[2].queue,
nv_video_convert_manager->nvvidconv, nv_osd_manager->nvosd,
sink_manager->nvvidconv_postosd, sink_manager->caps,
sink_manager->encoder, sink_manager->rtppay, sink_manager->sink,
NULL);
// Link the elements together:
// file-source -> h264-parser -> nvh264-decoder ->
@ -268,7 +268,7 @@ bool PipelineManager::setup_pipeline() {
streammux_manager->streammux,
nv_video_convert_manager->nvvidconv,
nv_infer_server_manager->primary_detector,
// pgie, tracker,
nv_tracker_manager->tracker,
gstds_example_manager->custom_plugin, tiler_manager->tiler,
nv_osd_manager->nvosd, sink_manager->nvvidconv_postosd,
sink_manager->caps, sink_manager->encoder, sink_manager->rtppay,
@ -357,6 +357,8 @@ bool PipelineManager::create_pipeline_elements(int num_sources,
// tracker
// gst_object_unref(nvinfer);
nv_tracker_manager->create_nv_tracker();
message_handling->create_message_handler(pipeline, g_run_forever, loop);
setup_pipeline();

View File

@ -11,6 +11,7 @@
#include "nv_ds_logger_manager.hpp"
#include "nv_infer_server_manager.hpp"
#include "nv_osd_manager.hpp"
#include "nv_tracker_manager.hpp"
#include "nv_video_convert_manager.hpp"
#include "queue_manager.hpp"
#include "rtsp_streaming_manager.hpp"
@ -36,6 +37,7 @@ class PipelineManager {
MessageHandling *message_handling = new MessageHandling();
RtspStreamingManager *rtsp_streaming_manager = new RtspStreamingManager();
NvInferServerManager *nv_infer_server_manager = new NvInferServerManager();
NvTrackerManager *nv_tracker_manager = new NvTrackerManager();
static double fps_buffer_probe;
static double fps_probe;
static double fps_osd;