Merge pull request #24224 from AsyaPronina:asyadev/port_vas_ot_to_opencv

This commit is contained in:
Alexander Alekhin 2023-12-08 11:41:25 +00:00
commit 850ebec135
29 changed files with 4012 additions and 3 deletions

View File

@ -88,6 +88,7 @@ set(gapi_srcs
src/api/kernels_imgproc.cpp
src/api/kernels_video.cpp
src/api/kernels_nnparsers.cpp
src/api/kernels_ot.cpp
src/api/kernels_streaming.cpp
src/api/kernels_stereo.cpp
src/api/render.cpp
@ -130,6 +131,7 @@ set(gapi_srcs
src/backends/cpu/gcpustereo.cpp
src/backends/cpu/gcpuvideo.cpp
src/backends/cpu/gcpucore.cpp
src/backends/cpu/gcpuot.cpp
src/backends/cpu/gnnparsers.cpp
# Fluid Backend (also built-in, FIXME:move away)
@ -240,18 +242,25 @@ set(gapi_srcs
src/utils/itt.cpp
)
file(GLOB_RECURSE gapi_3rdparty_srcs
"${CMAKE_CURRENT_LIST_DIR}/src/3rdparty/vasot/src/*.cpp"
)
ocv_add_dispatched_file(backends/fluid/gfluidimgproc_func SSE4_1 AVX2)
ocv_add_dispatched_file(backends/fluid/gfluidcore_func SSE4_1 AVX2)
ocv_list_add_prefix(gapi_srcs "${CMAKE_CURRENT_LIST_DIR}/")
# For IDE users
ocv_source_group("Src" FILES ${gapi_srcs})
ocv_source_group("Src" FILES ${gapi_srcs} ${gapi_3rdparty_srcs})
ocv_source_group("Include" FILES ${gapi_ext_hdrs})
ocv_set_module_sources(HEADERS ${gapi_ext_hdrs} SOURCES ${gapi_srcs})
ocv_set_module_sources(HEADERS ${gapi_ext_hdrs} SOURCES ${gapi_srcs} ${gapi_3rdparty_srcs})
ocv_module_include_directories("${CMAKE_CURRENT_LIST_DIR}/src")
# VAS Object Tracking includes
ocv_module_include_directories(${CMAKE_CURRENT_LIST_DIR}/src/3rdparty/vasot/include)
ocv_create_module()
ocv_target_link_libraries(${the_module} PRIVATE ade)
@ -383,10 +392,11 @@ if(HAVE_ONNX)
endif()
endif()
ocv_install_3rdparty_licenses(vasot "${CMAKE_CURRENT_SOURCE_DIR}/src/3rdparty/vasot/LICENSE.txt")
ocv_add_perf_tests()
ocv_add_samples()
# Required for sample with inference on host
if(TARGET example_gapi_onevpl_infer_with_advanced_device_selection)
if(TARGET ocv.3rdparty.openvino AND OPENCV_GAPI_WITH_OPENVINO)

View File

@ -0,0 +1,29 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2018 Intel Corporation
#ifndef OPENCV_GAPI_CPU_OT_API_HPP
#define OPENCV_GAPI_CPU_OT_API_HPP
#include <opencv2/core/cvdef.h> // GAPI_EXPORTS
#include <opencv2/gapi/gkernel.hpp> // GKernelPackage
namespace cv {
namespace gapi {
/**
* @brief This namespace contains G-API Operation Types for
* VAS Object Tracking module functionality.
*/
namespace ot {
namespace cpu {
GAPI_EXPORTS GKernelPackage kernels();
} // namespace cpu
} // namespace ot
} // namespace gapi
} // namespace cv
#endif // OPENCV_GAPI_CPU_OT_API_HPP

View File

@ -0,0 +1,190 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2023 Intel Corporation
#ifndef OPENCV_GAPI_OT_HPP
#define OPENCV_GAPI_OT_HPP
#include <opencv2/gapi.hpp>
#include <opencv2/gapi/s11n.hpp>
#include <opencv2/gapi/gkernel.hpp>
namespace cv {
namespace gapi {
/**
* @brief This namespace contains G-API Operation Types for
* VAS Object Tracking module functionality.
*/
namespace ot {
/**
* @enum TrackingStatus
*
* Tracking status twin for vas::ot::TrackingStatus
*/
enum class TrackingStatus: int32_t
{
NEW = 0, /**< The object is newly added. */
TRACKED, /**< The object is being tracked. */
LOST /**< The object gets lost now. The object can be tracked again
by specifying detected object manually. */
};
struct ObjectTrackerParams
{
/**
* Maximum number of trackable objects in a frame.
* Valid range: 1 <= max_num_objects. Or it can be -1 if there is no limitation
* of maximum number in X86. KMB/TBH has limitation up to 1024.
* Default value is -1 which means there is no limitation in X86. KMB/TBH is -1 means 200.
*/
int32_t max_num_objects = -1;
/**
* Input color format. Supports 0(BGR), 1(NV12), 2(BGRX) and 4(I420)
*/
int32_t input_image_format = 0;
/**
* Specifies whether tracker to use detection class for keeping id of an object.
* If it is true, new detection will be associated from previous tracking only when
* those two have same class.
* class id of an object is fixed across video frames.
* If it is false, new detection can be associated across different-class objects.
* In this case, the class id of an object may change across video frames depending on the tracker input.
* It is recommended to turn this option off when it is likely that detector confuses the class of object.
* For example, when detector confuses bicycle and motorbike. Turning this option off will increase
* the tracking reliability as tracker will ignore the class label of detector.
* @n
* Default value is true.
*/
bool tracking_per_class = true;
bool operator==(const ObjectTrackerParams& other) const
{
return max_num_objects == other.max_num_objects
&& input_image_format == other.input_image_format
&& tracking_per_class == other.tracking_per_class;
}
};
using GTrackedInfo = std::tuple<cv::GArray<cv::Rect>, cv::GArray<int32_t>, cv::GArray<uint64_t>, cv::GArray<TrackingStatus>>;
G_API_OP(GTrackFromMat, <GTrackedInfo(cv::GMat, cv::GArray<cv::Rect>, cv::GArray<int32_t>, float)>, "com.intel.track_from_mat")
{
static std::tuple<cv::GArrayDesc, cv::GArrayDesc,
cv::GArrayDesc, cv::GArrayDesc> outMeta(cv::GMatDesc, cv::GArrayDesc, cv::GArrayDesc, float)
{
return std::make_tuple(cv::empty_array_desc(), cv::empty_array_desc(),
cv::empty_array_desc(), cv::empty_array_desc());
}
};
G_API_OP(GTrackFromFrame, <GTrackedInfo(cv::GFrame, cv::GArray<cv::Rect>, cv::GArray<int32_t>, float)>, "com.intel.track_from_frame")
{
static std::tuple<cv::GArrayDesc, cv::GArrayDesc,
cv::GArrayDesc, cv::GArrayDesc> outMeta(cv::GFrameDesc, cv::GArrayDesc, cv::GArrayDesc, float)
{
return std::make_tuple(cv::empty_array_desc(), cv::empty_array_desc(),
cv::empty_array_desc(), cv::empty_array_desc());
}
};
/**
* @brief Tracks objects with video frames.
* If a detected object is overlapped enough with one of tracked object, the tracked object's
* informationis updated with the input detected object.
* On the other hand, if a detected object is overlapped with none of tracked objects,
* the detected object is newly added and ObjectTracker starts to track the object.
* In zero term tracking type, ObjectTracker clears tracked objects in case that empty
* list of detected objects is passed in.
*
* @param mat Input frame.
* @param detected_rects Detected objects rectangles in the input frame.
* @param detected_class_labels Detected objects class labels in the input frame.
* @param delta Frame_delta_t Delta time between two consecutive tracking in seconds.
* The valid range is [0.005 ~ 0.5].
* @return Tracking results of target objects.
* cv::GArray<cv::Rect> Array of rectangles for tracked objects.
* cv::GArray<int32_t> Array of detected objects labels.
* cv::GArray<uint64_t> Array of tracking IDs for objects.
* Numbering sequence starts from 1.
* The value 0 means the tracking ID of this object has
* not been assigned.
* cv::GArray<TrackingStatus> Array of tracking statuses for objects.
*/
GAPI_EXPORTS std::tuple<cv::GArray<cv::Rect>,
cv::GArray<int32_t>,
cv::GArray<uint64_t>,
cv::GArray<TrackingStatus>> track(const cv::GMat& mat,
const cv::GArray<cv::Rect>& detected_rects,
const cv::GArray<int>& detected_class_labels,
float delta);
/**
* @brief Tracks objects with video frames. Overload of track(...) for frame as GFrame.
*
* @param frame Input frame.
* @param detected_rects Detected objects rectangles in the input frame.
* @param detected_class_labels Detected objects class labels in the input frame.
* @param delta Frame_delta_t Delta time between two consecutive tracking in seconds.
* The valid range is [0.005 ~ 0.5].
* @return Tracking results of target objects.
* @return Tracking results of target objects.
* cv::GArray<cv::Rect> Array of rectangles for tracked objects.
* cv::GArray<int32_t> Array of detected objects labels.
* cv::GArray<uint64_t> Array of tracking IDs for objects.
* Numbering sequence starts from 1.
* The value 0 means the tracking ID of this object has
* not been assigned.
* cv::GArray<TrackingStatus> Array of tracking statuses for objects.
*/
GAPI_EXPORTS std::tuple<cv::GArray<cv::Rect>,
cv::GArray<int32_t>,
cv::GArray<uint64_t>,
cv::GArray<TrackingStatus>> track(const cv::GFrame& frame,
const cv::GArray<cv::Rect>& detected_rects,
const cv::GArray<int>& detected_class_labels,
float delta);
} // namespace ot
} // namespace gapi
} // namespace cv
// FIXME: move to a separate file?
namespace cv
{
namespace detail
{
template<> struct CompileArgTag<cv::gapi::ot::ObjectTrackerParams>
{
static const char* tag()
{
return "cv.gapi.ot.object_tracker_params";
}
};
} // namespace detail
namespace gapi
{
namespace s11n
{
namespace detail
{
template<> struct S11N<cv::gapi::ot::ObjectTrackerParams> {
static void serialize(IOStream &os, const cv::gapi::ot::ObjectTrackerParams &p) {
os << p. max_num_objects << p.input_image_format << p.tracking_per_class;
}
static cv::gapi::ot::ObjectTrackerParams deserialize(IIStream &is) {
cv::gapi::ot::ObjectTrackerParams p;
is >> p. max_num_objects >> p.input_image_format >> p.tracking_per_class;
return p;
}
};
} // namespace detail
} // namespace s11n
} // namespace gapi
} // namespace cv
#endif // OPENCV_GAPI_OT_HPP

View File

@ -0,0 +1,21 @@
The MIT License
Copyright (c) 2018-2019 Intel Corporation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -0,0 +1,83 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#ifndef VAS_COMMON_HPP
#define VAS_COMMON_HPP
#include <cstdint>
#define OT_VERSION_MAJOR 1
#define OT_VERSION_MINOR 0
#define OT_VERSION_PATCH 0
#define VAS_EXPORT //__attribute__((visibility("default")))
namespace vas {
/**
* @class Version
*
* Contains version information.
*/
class Version {
public:
/**
* Constructor.
*
* @param[in] major Major version.
* @param[in] minor Minor version.
* @param[in] patch Patch version.
*/
explicit Version(uint32_t major, uint32_t minor, uint32_t patch) : major_(major), minor_(minor), patch_(patch) {
}
/**
* Returns major version.
*/
uint32_t GetMajor() const noexcept {
return major_;
}
/**
* Returns minor version.
*/
uint32_t GetMinor() const noexcept {
return minor_;
}
/**
* Returns patch version.
*/
uint32_t GetPatch() const noexcept {
return patch_;
}
private:
uint32_t major_;
uint32_t minor_;
uint32_t patch_;
};
/**
* @enum BackendType
*
* Represents HW backend types.
*/
enum class BackendType {
CPU, /**< CPU */
GPU /**< GPU */
};
/**
* @enum ColorFormat
*
* Represents Color formats.
*/
enum class ColorFormat { BGR, NV12, BGRX, GRAY, I420 };
}; // namespace vas
#endif // VAS_COMMON_HPP

View File

@ -0,0 +1,440 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#ifndef VAS_OT_HPP
#define VAS_OT_HPP
#include <vas/common.hpp>
#include <opencv2/core.hpp>
#include <iostream>
#include <map>
#include <memory>
#include <vector>
namespace vas {
/**
* @namespace vas::ot
* @brief %vas::ot namespace.
*
* The ot namespace has classes, functions, and definitions for object tracker.
* It is a general tracker, and an object is represented as a rectangular box.
* Thus, you can use any kind of detector if it generates a rectangular box as output.
* Once an object is added to object tracker, the object is started to be tracked.
*/
namespace ot {
/**
* Returns current version.
*/
VAS_EXPORT vas::Version GetVersion() noexcept;
/**
* @enum TrackingType
*
* Tracking type.
*/
enum class TrackingType {
LONG_TERM,
SHORT_TERM,
ZERO_TERM,
SHORT_TERM_KCFVAR,
SHORT_TERM_IMAGELESS,
ZERO_TERM_IMAGELESS,
ZERO_TERM_COLOR_HISTOGRAM
};
/**
* @enum TrackingStatus
*
* Tracking status.
*/
enum class TrackingStatus {
NEW = 0, /**< The object is newly added. */
TRACKED, /**< The object is being tracked. */
LOST /**< The object gets lost now. The object can be tracked again automatically(long term tracking) or by
specifying detected object manually(short term and zero term tracking). */
};
/**
* @class DetectedObject
* @brief Represents an input object.
*
* In order to track an object, detected object should be added one or more times to ObjectTracker.
* When an object is required to be added to ObjectTracker, you can create an instance of this class and fill its
* values.
*/
class DetectedObject {
public:
/**
* Default constructor.
*/
DetectedObject() : rect(), class_label() {
}
/**
* Constructor with specific values.
*
* @param[in] input_rect Rectangle of input object.
* @param[in] input_class_label Class label of input object.
*/
DetectedObject(const cv::Rect &input_rect, int32_t input_class_label)
: rect(input_rect), class_label(input_class_label) {
}
public:
/**
* Object rectangle.
*/
cv::Rect rect;
/**
* Input class label.
* It is an arbitrary value that is specified by user.
* You can utilize this value to categorize input objects.
* Same value will be assigned to the class_label in Object class.
*/
int32_t class_label;
};
/**
* @class Object
* @brief Represents tracking result of a target object.
*
* It contains tracking information of a target object.
* ObjectTracker generates an instance of this class per tracked object when Track method is called.
*/
class Object {
public:
/**
* Object rectangle.
*/
cv::Rect rect;
/**
* Tracking ID.
* Numbering sequence starts from 1.
* The value 0 means the tracking ID of this object has not been assigned.
*/
uint64_t tracking_id;
/**
* Class label.
* This is specified by DetectedObject.
*/
int32_t class_label;
/**
* Tracking status.
*/
TrackingStatus status;
/**
* Index in the DetectedObject vector.
* If the Object was not in detection input at this frame, then it will be -1.
*/
int32_t association_idx;
};
VAS_EXPORT std::ostream &operator<<(std::ostream &os, TrackingStatus ts);
VAS_EXPORT std::ostream &operator<<(std::ostream &os, const Object &object);
/**
* @class ObjectTracker
* @brief Tracks objects from video frames.
*
* This class tracks objects from the input frames.
* In order to create an instance of this class, you need to use ObjectTracker::Builder class.
* @n
* ObjectTracker can run in three different ways as TrackingType defines.
* @n
* In short term tracking, an object is added at the beginning, and the object is tracked with consecutive input frames.
* It is recommended to update the tracked object's information for every 10-20 frames.
* @n
* Zero term tracking can be thought as association between a detected object and tracked object.
* Detected objects should always be added when Track method is invoked.
* For each frame, detected objects are mapped to tracked objects with this tracking type, which enables ID tracking of
detected objects.
* @n
* Long term tracking is deprecated.
* In long term tracking, an object is added at the beginning, and the object is tracked with consecutive input frames.
* User doesn't need to update manually the object's information.
* Long term tracking takes relatively long time to track objects.
* @n
* You can specify tracking type by setting attributes of Builder class when you create instances of this class.
* It is not possible to run ObjectTracker with two or more different tracking types in one instance.
* You can also limit the number of tracked objects by setting attributes of Builder class.
* @n
* Currently, ObjectTracker does not support HW offloading.
* It is possible to run ObjectTracker only on CPU.
* @n@n
* Following sample code shows how to use short term tracking type.
* Objects are added to ObjectTracker at the beginnning of tracking and in the middle of tracking periodically as well.
* @code
cv::VideoCapture video("/path/to/video/source");
cv::Mat frame;
cv::Mat first_frame;
video >> first_frame;
vas::ot::ObjectTracker::Builder ot_builder;
auto ot = ot_builder.Build(vas::ot::TrackingType::SHORT_TERM);
vas::pvd::PersonVehicleDetector::Builder pvd_builder;
auto pvd = pvd_builder.Build("/path/to/directory/of/fd/model/files");
std::vector<vas::pvd::PersonVehicle> person_vehicles;
std::vector<vas::ot::DetectedObject> detected_objects;
// Assume that there're objects in the first frame
person_vehicles = pvd->Detect(first_frame);
for (const auto& pv : person_vehicles)
detected_objects.emplace_back(pv.rect, static_cast<int32_t>(pv.type));
ot->Track(first_frame, detected_objects);
// Assume that now pvd is running in another thread
StartThread(pvd);
while (video.read(frame))
{
detected_objects.clear();
// Assume that frames are forwarded to the thread on which pvd is running
EnqueueFrame(frame);
// Assumes that pvd is adding its result into a queue in another thread.
// Assumes also that latency from the last pvd frame to current frame is ignorable.
person_vehicles = DequeuePersonVehicles();
if (!person_vehicles.empty())
{
detected_objects.clear();
for (const auto& pv : person_vehicles)
detected_objects.emplace_back(pv.rect, static_cast<int32_t>(pv.type));
}
auto objects = ot->Track(frame, detected_objects);
for (const auto& object : objects)
{
// Handle tracked object
}
}
* @endcode
* @n
* Following sample code shows how to use zero term tracking type.
* In this sample, pvd runs for each input frame.
* After pvd generates results, ot runs with the results and object IDs are preserved.
* @code
cv::VideoCapture video("/path/to/video/source");
cv::Mat frame;
vas::ot::ObjectTracker::Builder ot_builder;
auto ot = ot_builder.Build(vas::ot::TrackingType::ZERO_TERM);
vas::pvd::PersonVehicleDetector::Builder pvd_builder;
auto pvd = pvd_builder.Build("/path/to/directory/of/fd/model/files");
std::vector<vas::ot::DetectedObject> detected_objects;
ot->SetFrameDeltaTime(0.033f);
while (video.read(frame))
{
detected_objects.clear();
auto person_vehicles = pvd->Detect(first_frame);
for (const auto& pv : person_vehicles)
detected_objects.emplace_back(pv.rect, static_cast<int32_t>(pv.type));
auto objects = ot->Track(frame, detected_objects);
for (const auto& object : objects)
{
// Handle tracked object
}
}
* @endcode
*/
class ObjectTracker {
public:
class Builder;
public:
ObjectTracker() = delete;
ObjectTracker(const ObjectTracker &) = delete;
ObjectTracker(ObjectTracker &&) = delete;
/**
* Destructor.
*/
VAS_EXPORT ~ObjectTracker();
public:
ObjectTracker &operator=(const ObjectTracker &) = delete;
ObjectTracker &operator=(ObjectTracker &&) = delete;
public:
/**
* Tracks objects with video frames.
* Also, this method is used to add detected objects.
* If a detected object is overlapped enough with one of tracked object, the tracked object's information is updated
* with the input detected object. On the other hand, if a detected object is overlapped with none of tracked
* objects, the detected object is newly added and ObjectTracker starts to track the object. In long term and short
* term tracking type, ObjectTracker continues to track objects in case that empty list of detected objects is
* passed in. In zero term tracking type, however, ObjectTracker clears tracked objects in case that empty list of
* detected objects is passed in.
* @n
* The supported color formats are BGR, NV12, BGRx and I420.
*
* @param[in] frame Input frame.
* @param[in] detected_objects Detected objects in the input frame. Default value is an empty vector.
* @return Information of tracked objects.
* @exception std::invalid_argument Input frame is invalid.
*/
VAS_EXPORT std::vector<Object>
Track(const cv::Mat &frame, const std::vector<DetectedObject> &detected_objects = std::vector<DetectedObject>());
/**
* This function is to set a parameter indicating 'delta time' between now and last call to Track() in seconds.
* The default value of the delta time is 0.033f which is tuned for 30 fps video frame rate.
* It is to achieve improved tracking quality for other frame rates or inconstant frame rate by frame drops.
* If input frames come from a video stream of constant frame rate, then a user needs to set this value as 1.0/fps
* just after video open. For example, 60 fps video stream should set 0.0167f. If input frames have inconstant frame
* rate, then a user needs to call this function before the Track() function.
*
* @param[in] frame_delta_t Delta time between two consecutive tracking in seconds. The valid range is [0.005 ~
* 0.5].
*/
VAS_EXPORT void SetFrameDeltaTime(float frame_delta_t);
/**
* Returns the tracking type of current instance.
*/
VAS_EXPORT TrackingType GetTrackingType() const noexcept;
/**
* Returns the currently set maximum number of trackable objects.
*/
VAS_EXPORT int32_t GetMaxNumObjects() const noexcept;
/**
* Returns the currently set frame delta time.
*/
VAS_EXPORT float GetFrameDeltaTime() const noexcept;
/**
* Returns the currently set color format.
*/
VAS_EXPORT vas::ColorFormat GetInputColorFormat() const noexcept;
/**
* Returns the backend type of current instance.
*/
VAS_EXPORT vas::BackendType GetBackendType() const noexcept;
/**
* Returns the current set tracking per class.
*/
VAS_EXPORT bool GetTrackingPerClass() const noexcept;
private:
class Impl;
private:
explicit ObjectTracker(Impl *impl);
private:
std::unique_ptr<Impl> impl_;
friend class Builder;
};
/**
* @class ObjectTracker::Builder
* @brief Creates ObjectTracker instances.
*
* This class is used to build ObjectTracker instances.
* All the attributes of this class affects how ObjectTracker is initialized.
*/
class ObjectTracker::Builder {
public:
/**
* Default constructor.
*/
VAS_EXPORT Builder();
/**
* Destructor.
*/
VAS_EXPORT ~Builder();
public:
/**
* Creates an instance of ObjectTracker based on tracking type and attributes you set.
* In case that you set valid values for all attributes, an instance of ObjectTracker is created successfully.
*
* @param[in] tracking_type Tracking type for newly created ObjectTracker instance.
* @exception std::invalid_argument One or more attributes you set are invalid.
* @return ObjectTracker instance.
*/
VAS_EXPORT std::unique_ptr<ObjectTracker> Build(TrackingType tracking_type) const;
public:
/**
* Specifies HW backend on which object tracker runs.
* @n
* Default value is vas::BackendType::CPU.
*/
vas::BackendType backend_type;
/**
* Maximum number of trackable objects in a frame.
* @n
* Valid range: 1 <= max_num_objects. Or it can be -1 if there is no limitation of maximum number in X86.
* @n
* Default value is -1 which means there is no limitation in X86.
*/
int32_t max_num_objects;
/**
* Input color format vas::ColorFormat. Supports BGR, BGRX, NV12 and I420
* @n
* Default value is BGR.
*/
vas::ColorFormat input_image_format;
/**
* Specifies whether tracker to use detection class for keeping id of an object.
* If it is true, new detection will be associated from previous tracking only when those two have same class.
* class id of an object is fixed across video frames.
* If it is false, new detection can be associated across different-class objects.
* In this case, the class id of an object may change across video frames depending on the tracker input.
* It is recommended to turn this option off when it is likely that detector confuses the class of object.
* For example, when detector confuses bicycle and motorbike. Turning this option off will increase the tracking
* reliability as tracker will ignore the class label of detector.
* @n
* Default value is true.
*/
bool tracking_per_class;
/**
* Platform configuration
* You can set various configuraions for each platform using predefined configurations
* @n
* For Parallelization in KCFVAR mode, use key "max_num_threads" to set the maximum number of threads. Consult the
* following format
* @code platform_config["max_num_threads"] = "2"; // set maximum number of threads(concurrency level) to 2 @endcode
* @n
* Default value is 1
* if value >=1, set value as the number of threads to process OT in parallel mode
* if value >= Number of available cores OR value is -1, limit concurrency level to maximum available logical cores
* otherwise: @exception Invalid input
*/
std::map<std::string, std::string> platform_config;
};
}; // namespace ot
}; // namespace vas
#endif // VAS_OT_HPP

View File

@ -0,0 +1,24 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#ifndef VAS_COMMON_EXCEPTION_HPP
#define VAS_COMMON_EXCEPTION_HPP
#include <vas/common.hpp>
#include <exception>
#include <stdexcept>
#define ETHROW(condition, exception_class, message, ...) \
{ \
if (!(condition)) { \
throw std::exception_class(message); \
} \
}
#define TRACE(fmt, ...)
#endif // VAS_COMMON_EXCEPTION_HPP

View File

@ -0,0 +1,144 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#ifndef VAS_COMMON_PROF_HPP
#define VAS_COMMON_PROF_HPP
#include <cstddef>
#include <cstdint>
#include <list>
#include <map>
#include <memory>
#include <ostream>
#include <stack>
#include <vector>
#define PROF_COMP_NAME(comp) vas::Prof::Component::comp
#ifdef BUILD_OPTION_PROFILING
#define PROF_INIT(component) vas::Prof::Init(PROF_COMP_NAME(component))
#define PROF_START(tag) vas::Prof::Start(tag, __FUNCTION__, __LINE__)
#define PROF_END(tag) vas::Prof::End(tag)
#define PROF_EXTRA(tag, value) vas::Prof::SetExtra(tag, value)
#define PROF_FLUSH(component) vas::Prof::GetInstance(PROF_COMP_NAME(component)).Flush()
#else
#define PROF_INIT(tag)
#define PROF_START(tag)
#define PROF_END(tag)
#define PROF_EXTRA(tag, value)
#define PROF_FLUSH(component)
#endif
#define PROF_TAG_GENERATE(component, group_id, description) \
{ PROF_COMP_NAME(component), group_id, description }
namespace vas {
/**
* @class Prof
*
* Global Prof instance accumulates all ProfData in a Tree structure.
* Parallel codes within sigle vas component (ex. STKCF TBB) creates wrong profile result.
*/
class Prof {
public:
enum class Component : int32_t { FD, FR, PVD, CD, FAC, OT, PAC, HD, REID, BASE, KN, kCount };
typedef uint64_t GroupId;
typedef uint64_t UniqueId;
/**
* @class Prof::ProfData
*
* Data Node withtin Prof class
* Accumulates elapsed times between PROF_START / PROF_END
*/
class ProfData {
public:
ProfData(UniqueId id, GroupId group_id, size_t depth, const char *function_name, const int64_t line,
const char *description);
ProfData(const ProfData &other);
~ProfData() = default;
ProfData &operator=(const ProfData &) = delete;
bool operator==(const ProfData &) const;
ProfData *clone();
std::vector<int64_t> accum_time;
std::list<ProfData *> children;
const UniqueId id;
const GroupId group_id;
const size_t depth;
const char *function_name;
const int64_t line;
const char *description;
int64_t start_time;
};
typedef struct _ProfTag {
vas::Prof::Component component;
vas::Prof::GroupId group_id;
const char *description;
} ProfTag;
public:
Prof();
~Prof() = default;
static void Init(Component comp);
static void Start(const ProfTag &tag, const char *function_name, int64_t line);
static void End(const ProfTag &tag);
static Prof &GetInstance(Component comp);
static void SetExtra(const ProfTag &tag, int32_t value);
void StartProfile(GroupId group_id, const char *function_name, int64_t line, const char *description);
void EndProfile();
void SetExtraData(const std::string &key, int32_t value);
void Flush();
void MergeToMainInstance(Prof *in);
private:
const char *GetComponentName(Component comp);
void Clear();
// Print detailed prof data.
void PrintSummary1(std::ostream *out);
// Print prof data merged in same stack.
void PrintSummary2(std::ostream *out);
// Print prof data merged with the same group-id.
void PrintSummary3(std::ostream *out);
void PrintSummary1ToCSV(std::ostream *out);
void PrintExtra(std::ostream *out);
void PrintAllData(std::ostream *out);
void Traverse(const ProfData *root, const std::list<ProfData *> &data_list,
void (*print_function)(const ProfData *, const ProfData &, std::ostream *), std::ostream *out);
void TraverseMergeSameStackGroups(const std::list<ProfData *> &in_data_list, std::list<ProfData *> *out_data_list);
void TraverseMergeAllGroups(const std::list<ProfData *> &in_data_list, std::list<ProfData *> *out_data_list);
void MergeProfDataList(std::list<Prof::ProfData *> *mergeList, const std::list<Prof::ProfData *> &addList);
private:
std::string outdir_;
std::string out_prof_file_;
Component component_;
std::list<ProfData *> root_data_list_;
std::stack<ProfData *> current_data_;
std::map<std::string, std::vector<int32_t>> extra_data_;
};
} // namespace vas
#endif // VAS_COMMON_PROF_HPP

View File

@ -0,0 +1,292 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#include "kalman_filter_no_opencv.hpp"
#include "../../../common/exception.hpp"
#include <cstring>
#define KALMAN_FILTER_NSHIFT 4
#define KF_USE_PARTIAL_64F
namespace vas {
// R
const int32_t kNoiseCovarFactor = 8;
const float kDefaultDeltaT = 0.033f;
const int32_t kDefaultErrCovFactor = 1;
// lower noise = slowly changed
KalmanFilterNoOpencv::KalmanFilterNoOpencv(const cv::Rect2f &initial_rect) : delta_t_(kDefaultDeltaT) {
int32_t left = static_cast<int32_t>(initial_rect.x);
int32_t right = static_cast<int32_t>(initial_rect.x + initial_rect.width);
int32_t top = static_cast<int32_t>(initial_rect.y);
int32_t bottom = static_cast<int32_t>(initial_rect.y + initial_rect.height);
int32_t cX = (left + right) << (KALMAN_FILTER_NSHIFT - 1);
int32_t cY = (top + bottom) << (KALMAN_FILTER_NSHIFT - 1);
int32_t cRX = (right - left) << (KALMAN_FILTER_NSHIFT - 1);
int32_t cRY = (bottom - top) << (KALMAN_FILTER_NSHIFT - 1);
kalmanfilter1d32i_init(&kfX, &cX, 0);
kalmanfilter1d32i_init(&kfY, &cY, 0);
kalmanfilter1d32i_init(&kfRX, &cRX, 0);
kalmanfilter1d32i_init(&kfRY, &cRY, 0);
int32_t object_size = std::max(64, (cRX * cRY));
noise_ratio_coordinates_ = kMeasurementNoiseCoordinate;
noise_ratio_rect_size_ = kMeasurementNoiseRectSize;
// Set default Q
int32_t cood_cov = static_cast<int32_t>(object_size * noise_ratio_coordinates_);
int32_t size_cov = static_cast<int32_t>(object_size * noise_ratio_rect_size_);
kfX.Q[0][0] = cood_cov;
kfX.Q[1][1] = cood_cov;
kfY.Q[0][0] = cood_cov;
kfY.Q[1][1] = cood_cov;
kfRX.Q[0][0] = size_cov;
kfRY.Q[0][0] = size_cov;
}
cv::Rect2f KalmanFilterNoOpencv::Predict(float delta_tf) {
delta_t_ = delta_tf;
kalmanfilter1d32i_predict_phase(&kfX, delta_tf);
kalmanfilter1d32i_predict_phase(&kfY, delta_tf);
kalmanfilter1d32i_predict_phase(&kfRX, 0);
kalmanfilter1d32i_predict_phase(&kfRY, 0);
int32_t cp_x = kfX.Xk[0] >> KALMAN_FILTER_NSHIFT;
int32_t cp_y = kfY.Xk[0] >> KALMAN_FILTER_NSHIFT;
int32_t rx = kfRX.Xk[0] >> KALMAN_FILTER_NSHIFT;
int32_t ry = kfRY.Xk[0] >> KALMAN_FILTER_NSHIFT;
int32_t pre_x = cp_x - rx;
int32_t pre_y = cp_y - ry;
auto width = 2 * rx;
auto height = 2 * ry;
// printf(" - In Predict: result (%d, %d %dx%d)\n", pre_x, pre_y, width, height);
return cv::Rect2f(float(pre_x), float(pre_y),
float(width), float(height));
}
cv::Rect2f KalmanFilterNoOpencv::Correct(const cv::Rect2f &measured_region) {
int32_t pX = static_cast<int32_t>(measured_region.x + (measured_region.x + measured_region.width))
<< (KALMAN_FILTER_NSHIFT - 1);
int32_t pY = static_cast<int32_t>(measured_region.y + (measured_region.y + measured_region.height))
<< (KALMAN_FILTER_NSHIFT - 1);
int32_t pRX = static_cast<int32_t>(measured_region.width) << (KALMAN_FILTER_NSHIFT - 1);
int32_t pRY = static_cast<int32_t>(measured_region.height) << (KALMAN_FILTER_NSHIFT - 1);
int32_t cX = 0;
int32_t cY = 0;
int32_t cRX = 0;
int32_t cRY = 0;
int32_t delta_t = static_cast<int32_t>(delta_t_ * 31.3f);
if (delta_t < kDefaultErrCovFactor)
delta_t = kDefaultErrCovFactor;
// Set rect-size-adaptive process/observation noise covariance
int32_t object_size = std::max(64, (pRX * pRY));
// Q
int32_t cood_cov = static_cast<int32_t>(object_size * noise_ratio_coordinates_ * delta_t);
int32_t size_cov = static_cast<int32_t>(object_size * noise_ratio_rect_size_ * delta_t);
kfX.Q[0][0] = cood_cov;
kfX.Q[1][1] = cood_cov;
kfY.Q[0][0] = cood_cov;
kfY.Q[1][1] = cood_cov;
kfRX.Q[0][0] = size_cov;
kfRY.Q[0][0] = size_cov;
if (kfX.Xk[0] == 0 && kfY.Xk[0] == 0) {
kalmanfilter1d32i_predict_phase(&kfX, delta_t_);
kalmanfilter1d32i_predict_phase(&kfY, delta_t_);
kalmanfilter1d32i_predict_phase(&kfRX, 0);
kalmanfilter1d32i_predict_phase(&kfRY, 0);
}
// R
int32_t noise_covariance = object_size >> (kNoiseCovarFactor + delta_t);
kfX.R = noise_covariance;
kfY.R = noise_covariance;
kfRX.R = noise_covariance;
kfRY.R = noise_covariance;
kalmanfilter1d32i_update_phase(&kfX, pX, &cX);
kalmanfilter1d32i_update_phase(&kfY, pY, &cY);
kalmanfilter1d32i_update_phase(&kfRX, pRX, &cRX);
kalmanfilter1d32i_update_phase(&kfRY, pRY, &cRY);
auto x = (cX - cRX) >> KALMAN_FILTER_NSHIFT;
auto y = (cY - cRY) >> KALMAN_FILTER_NSHIFT;
auto width = (cRX >> (KALMAN_FILTER_NSHIFT - 1));
auto height = (cRY >> (KALMAN_FILTER_NSHIFT - 1));
// printf(" - In Correct: result (%d, %d %dx%d)\n", x, y, width, height);
return cv::Rect2f(float(x), float(y),
float(width), float(height));
}
void KalmanFilterNoOpencv::kalmanfilter1d32i_init(kalmanfilter1d32i *kf, int32_t *z, int32_t var) {
std::memset(kf, 0, sizeof(kalmanfilter1d32i));
if (z) {
kf->X[0] = *z;
}
kf->P[0][0] = var;
kf->P[1][1] = 0;
}
static void mul_matvec_32f(int32_t Ab[2], float A[2][2], int32_t b[2]) {
Ab[0] = static_cast<int32_t>(A[0][0] * b[0] + A[0][1] * b[1]); // b[0] + dt * b[1]
Ab[1] = static_cast<int32_t>(A[1][0] * b[0] + A[1][1] * b[1]); // b[1] ( A[1][0] == 0)
}
static void mul_matmat_32f(int32_t AB[2][2], float trans_mat[2][2], int32_t B[2][2]) {
AB[0][0] =
static_cast<int32_t>(trans_mat[0][0] * B[0][0] + trans_mat[0][1] * B[1][0]); // kf->P[0][0] + dt * kf->P[1][0]
AB[0][1] =
static_cast<int32_t>(trans_mat[0][0] * B[0][1] + trans_mat[0][1] * B[1][1]); // kf->P[0][1] + dt * kf->P[1][1]
AB[1][0] = static_cast<int32_t>(trans_mat[1][1] * B[1][0]);
AB[1][1] = static_cast<int32_t>(trans_mat[1][1] * B[1][1]);
}
#ifndef KF_USE_PARTIAL_64F
static void mul_matmat_32i(int32_t AB[2][2], int32_t A[2][2], int32_t B[2][2]) {
AB[0][0] = A[0][0] * B[0][0] + A[0][1] * B[1][0];
AB[0][1] = A[0][0] * B[0][1] + A[0][1] * B[1][1];
AB[1][0] = A[1][0] * B[0][0] + A[1][1] * B[1][0];
AB[1][1] = A[1][0] * B[0][1] + A[1][1] * B[1][1];
}
static void mul_matmatT_32i(int32_t ABt[2][2], int32_t A[2][2], int32_t B[2][2]) {
ABt[0][0] = A[0][0] * B[0][0] + A[0][1] * B[0][1];
ABt[0][1] = A[0][0] * B[1][0] + A[0][1] * B[1][1];
ABt[1][0] = A[1][0] * B[0][0] + A[1][1] * B[0][1];
ABt[1][1] = A[1][0] * B[1][0] + A[1][1] * B[1][1];
}
#endif
static void mul_matmatT_32f(int32_t ABt[2][2], int32_t A[2][2], float B[2][2]) {
ABt[0][0] = static_cast<int32_t>(A[0][0] * B[0][0] + A[0][1] * B[0][1]);
ABt[0][1] = static_cast<int32_t>(A[0][0] * B[1][0] + A[0][1] * B[1][1]);
ABt[1][0] = static_cast<int32_t>(A[1][0] * B[0][0] + A[1][1] * B[0][1]);
ABt[1][1] = static_cast<int32_t>(A[1][0] * B[1][0] + A[1][1] * B[1][1]);
}
static void add_matmat_32i(int32_t A_B[2][2], int32_t A[2][2], int32_t B[2][2]) {
A_B[0][0] = A[0][0] + B[0][0];
A_B[0][1] = A[0][1] + B[0][1];
A_B[1][0] = A[1][0] + B[1][0];
A_B[1][1] = A[1][1] + B[1][1];
}
void KalmanFilterNoOpencv::kalmanfilter1d32i_predict_phase(kalmanfilter1d32i *kf, float dt) {
float F[2][2] = {{1.f, 1.f}, {0.f, 1.f}};
float A[2][2] = {{1.f, 1.f}, {0.f, 1.f}};
int32_t AP[2][2];
int32_t APAt[2][2];
float weight = 8.f; // 2^(KALMAN_FILTER_NSHIFT - 1)
float delta_t = dt * weight;
F[0][1] = delta_t;
// Predict state
// - [x(k) = F x(k-1)]
mul_matvec_32f(kf->Xk, F, kf->X);
// Predict error estimate covariance matrix (Predicted estimate covariance) : P(k)
// - [P(k) = F P(k-1) Ft + Q]
mul_matmat_32f(AP, A, kf->P);
mul_matmatT_32f(APAt, AP, A);
add_matmat_32i(kf->Pk, APAt, kf->Q);
// Update kf->x from x(k-1) to x(k)
kf->X[0] = kf->Xk[0];
kf->X[1] = kf->Xk[1];
// Update kf->P from P(k-1) to P(k)
kf->P[0][0] = kf->Pk[0][0];
kf->P[0][1] = kf->Pk[0][1];
kf->P[1][0] = kf->Pk[1][0];
kf->P[1][1] = kf->Pk[1][1];
}
void KalmanFilterNoOpencv::kalmanfilter1d32i_update_phase(kalmanfilter1d32i *kf, int32_t z, int32_t *x) {
int32_t y;
int32_t S;
int32_t K[2];
int32_t I_KH[2][2];
if (kf->Xk[0] == 0 && kf->Pk[0][0] == 0) {
(*x) = z;
return;
}
// Compute measurement pre-fit residual : Y
// H : measurement matrix
// z(k) : actual reading(observed) result of k
// - [ Y(k) = z(k) - H * X(k) ]
y = z - kf->Xk[0];
// Compute residual covariance : S
// - [ S = H*P(k)*Ht + R]
S = kf->Pk[0][0] + kf->R;
if (S == 0) {
(*x) = z;
return;
}
// Compute optimal kalman gain : K(k)
// - [ K(k) = P(k)*Ht*inv(S)]
// K[0] = kf->P[0][0]/S;
// K[1] = kf->P[1][0]/S;
K[0] = kf->Pk[0][0];
K[1] = kf->Pk[1][0];
// Get updated state
// - [ x'(k) = x(k) + K'*Y )]
kf->X[0] = kf->Xk[0] + K[0] * y / S;
kf->X[1] = kf->Xk[1] + K[1] * y / S;
// 7. Get updated estimate covariance : P'(k)
// - [ P'(k) = (I - K(k) * H) * P(k)]
I_KH[0][0] = S - K[0];
I_KH[0][1] = 0;
I_KH[1][0] = -K[1];
I_KH[1][1] = S;
// modified by chan - 20110329 - start
// Here, INTEGER is 32bit.
// To avoid overflow in the below matrix multiplecation, this code is modified.
#ifdef KF_USE_PARTIAL_64F
{
kf->P[0][0] = static_cast<int32_t>(
(I_KH[0][0] * static_cast<double>(kf->Pk[0][0]) + I_KH[0][1] * static_cast<double>(kf->Pk[1][0])) / S);
kf->P[0][1] = static_cast<int32_t>(
(I_KH[0][0] * static_cast<double>(kf->Pk[0][1]) + I_KH[0][1] * static_cast<double>(kf->Pk[1][1])) / S);
kf->P[1][0] = static_cast<int32_t>(
(I_KH[1][0] * static_cast<double>(kf->Pk[0][0]) + I_KH[1][1] * static_cast<double>(kf->Pk[1][0])) / S);
kf->P[1][1] = static_cast<int32_t>(
(I_KH[1][0] * static_cast<double>(kf->Pk[0][1]) + I_KH[1][1] * static_cast<double>(kf->Pk[1][1])) / S);
}
#else // KF_USE_PARTIAL_64F
{
mul_matmat_32i(kf->P, I_KH, kf->Pk);
kf->P[0][0] /= S;
kf->P[0][1] /= S;
kf->P[1][0] /= S;
kf->P[1][1] /= S;
}
#endif // KF_USE_PARTIAL_64F
// 9. return result
(*x) = kf->X[0];
}
}; // namespace vas

View File

@ -0,0 +1,91 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#ifndef VAS_OT_KALMAN_FILTER_NO_OPENCV_HPP
#define VAS_OT_KALMAN_FILTER_NO_OPENCV_HPP
#include <vas/common.hpp>
#include <opencv2/core.hpp>
const float kMeasurementNoiseCoordinate = 0.001f;
const float kMeasurementNoiseRectSize = 0.002f;
namespace vas {
/*
* This class implements a kernel of a standard kalman filter without using of OpenCV.
* It supplies simple and common APIs to be use by all components.
*
*/
class KalmanFilterNoOpencv {
public:
/** @brief Create & initialize KalmanFilterNoOpencv
* This function initializes Kalman filter with a spectific value of the ratio for measurement noise covariance
* matrix. If you consider the detection method is enough reliable, it is recommended to use lower ratio value than
* the default value.
* @code
* cv::Rect2f input_rect(50.f, 50.f, 100.f, 100.f);
* cv::Rect2f predicted, corrected;
* vas::KalmanFilter kalman_filter = new vas::KalmanFilter(input_rect);
* predicted = kalman_filter->Predict();
* corrected = kalman_filter->Correct(cv::Rect(52, 52, 105, 105));
* delete kalman_filter;
* @endcode
* @param
* initial_rect Initial rectangular coordinates
*/
explicit KalmanFilterNoOpencv(const cv::Rect2f &initial_rect);
KalmanFilterNoOpencv() = delete;
KalmanFilterNoOpencv(const KalmanFilterNoOpencv &) = delete;
KalmanFilterNoOpencv &operator=(const KalmanFilterNoOpencv &) = delete;
/* @brief Destroy Kalman filter kernel
*/
~KalmanFilterNoOpencv() = default;
/*
* This function computes a predicted state.
* input 'delta_t' is not used.
*/
cv::Rect2f Predict(float delta_t = 0.033f);
/*
* This function updates the predicted state from the measurement.
*/
cv::Rect2f Correct(const cv::Rect2f &detect_rect);
private:
struct kalmanfilter1d32i {
int32_t X[2];
int32_t P[2][2];
int32_t Q[2][2];
int32_t R;
int32_t Pk[2][2]; // buffer to copy from Pk-1 to Pk
int32_t Xk[2]; // buffer to copy form Xk-1 to Xk
};
void kalmanfilter1d32i_init(kalmanfilter1d32i *kf, int32_t *z, int32_t var);
void kalmanfilter1d32i_predict_phase(kalmanfilter1d32i *kf, float dt);
void kalmanfilter1d32i_update_phase(kalmanfilter1d32i *kf, int32_t z, int32_t *x);
void kalmanfilter1d32i_filter(kalmanfilter1d32i *kf, int32_t *z, int32_t dt, int32_t *x);
kalmanfilter1d32i kfX;
kalmanfilter1d32i kfY;
kalmanfilter1d32i kfRX;
kalmanfilter1d32i kfRY;
float noise_ratio_coordinates_;
float noise_ratio_rect_size_;
float delta_t_;
};
}; // namespace vas
#endif // VAS_OT_KALMAN_FILTER_NO_OPENCV_HPP

View File

@ -0,0 +1,325 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#include "hungarian_wrap.hpp"
#include "../../../common/exception.hpp"
#include <vas/common.hpp>
#include <stdint.h>
#include <stdio.h>
const float kHungarianValueScale = 1024.0f;
namespace vas {
namespace ot {
HungarianAlgo::HungarianAlgo(const cv::Mat_<float> &cost_map)
: size_width_(cost_map.cols), size_height_(cost_map.rows), int_cost_map_rows_(), int_cost_map_(), problem_() {
// Convert float 2D cost matrix into int32_t** 2D array with scaling
int_cost_map_rows_.resize(size_height_, nullptr);
int_cost_map_.create(size_height_, size_width_);
for (int32_t r = 0; r < size_height_; ++r) {
int_cost_map_rows_[r] = reinterpret_cast<int32_t *>(int_cost_map_.ptr(r));
for (int32_t c = 0; c < size_width_; ++c)
int_cost_map_(r, c) = static_cast<int32_t>(cost_map(r, c) * kHungarianValueScale);
}
}
HungarianAlgo::~HungarianAlgo() {
FreeHungarian();
}
cv::Mat_<uint8_t> HungarianAlgo::Solve() {
ETHROW(size_height_ > 0 && size_width_ > 0, invalid_argument, "Initialized with invalid cost_map size in Solve");
// Initialize the gungarian_problem using the cost matrix
cv::Mat_<uint8_t> assignment_map;
int32_t matrix_size = InitHungarian(kHungarianModeMinimizeCost);
// Solve the assignement problem
SolveHungarian();
// Copy assignment map
assignment_map.create(matrix_size, matrix_size);
for (int32_t r = 0; r < matrix_size; ++r) {
for (int32_t c = 0; c < matrix_size; ++c) {
(assignment_map)(r, c) = static_cast<uint8_t>(problem_.assignment[r][c]);
}
}
// Free used memory
FreeHungarian();
return assignment_map;
}
// Returns the row size of the assignment matrix
int32_t HungarianAlgo::InitHungarian(int32_t mode) {
int32_t max_cost = 0;
int32_t **cost_matrix = &int_cost_map_rows_[0];
// Is the number of cols not equal to number of size_height_ : if yes, expand with 0-size_width_ / 0-size_width_
ETHROW(size_height_ > 0 && size_width_ > 0, invalid_argument,
"Initialized with invalid cost_map size in InitHungarian");
problem_.num_rows = (size_width_ < size_height_) ? size_height_ : size_width_;
problem_.num_cols = problem_.num_rows;
problem_.cost.resize(problem_.num_rows);
problem_.assignment.resize(problem_.num_rows);
for (int32_t i = 0; i < problem_.num_rows; ++i) {
problem_.cost[i].resize(problem_.num_cols, 0);
problem_.assignment[i].resize(problem_.num_cols, 0);
}
for (int32_t i = 0; i < problem_.num_rows; ++i) {
for (int32_t j = 0; j < problem_.num_cols; ++j) {
problem_.cost[i][j] = (i < size_height_ && j < size_width_) ? cost_matrix[i][j] : 0;
problem_.assignment[i][j] = 0;
if (max_cost < problem_.cost[i][j])
max_cost = problem_.cost[i][j];
}
}
if (mode == kHungarianModeMaximizeUtil) {
for (int32_t i = 0; i < problem_.num_rows; ++i) {
for (int32_t j = 0; j < problem_.num_cols; ++j) {
problem_.cost[i][j] = max_cost - problem_.cost[i][j];
}
}
} else if (mode == kHungarianModeMinimizeCost) {
// Nothing to do
} else {
TRACE(" Unknown mode. Mode was set to HUNGARIAN_MODE_MINIMIZE_COST");
}
return problem_.num_rows;
}
//
//
void HungarianAlgo::FreeHungarian() {
}
void HungarianAlgo::SolveHungarian() {
int32_t k = 0;
int32_t l = 0;
int32_t unmatched = 0;
ETHROW(problem_.cost.size() != 0 && problem_.assignment.size() != 0, logic_error, "Unexpected solve");
std::unique_ptr<int32_t[]> vert_col(new int32_t[problem_.num_rows]);
std::unique_ptr<int32_t[]> row_unselected(new int32_t[problem_.num_rows]);
std::unique_ptr<int32_t[]> row_dec(new int32_t[problem_.num_rows]);
std::unique_ptr<int32_t[]> row_slack(new int32_t[problem_.num_rows]);
std::unique_ptr<int32_t[]> vert_row(new int32_t[problem_.num_cols]);
std::unique_ptr<int32_t[]> parent_row(new int32_t[problem_.num_cols]);
std::unique_ptr<int32_t[]> col_inc(new int32_t[problem_.num_cols]);
std::unique_ptr<int32_t[]> slack(new int32_t[problem_.num_cols]);
for (int32_t i = 0; i < problem_.num_rows; ++i) {
vert_col[i] = 0;
row_unselected[i] = 0;
row_dec[i] = 0;
row_slack[i] = 0;
}
for (int32_t i = 0; i < problem_.num_cols; ++i) {
vert_row[i] = 0;
parent_row[i] = 0;
col_inc[i] = 0;
slack[i] = 0;
}
for (int32_t i = 0; i < problem_.num_rows; ++i)
for (int32_t j = 0; j < problem_.num_cols; ++j)
problem_.assignment[i][j] = kHungarianNotAssigned;
// Begin subtract column minima in order to start with lots of zeroes 12
TRACE(" Using heuristic");
for (int32_t i = 0; i < problem_.num_cols; ++i) {
int32_t s = problem_.cost[0][i];
for (int32_t j = 1; j < problem_.num_rows; ++j) {
if (problem_.cost[j][i] < s)
s = problem_.cost[j][i];
}
if (s != 0) {
for (int32_t j = 0; j < problem_.num_rows; ++j)
problem_.cost[j][i] -= s;
}
}
// End subtract column minima in order to start with lots of zeroes 12
// Begin initial state 16
int32_t t = 0;
for (int32_t i = 0; i < problem_.num_cols; ++i) {
vert_row[i] = -1;
parent_row[i] = -1;
col_inc[i] = 0;
slack[i] = kIntMax;
}
for (k = 0; k < problem_.num_rows; ++k) {
bool is_row_done = false;
int32_t s = problem_.cost[k][0];
for (l = 1; l < problem_.num_cols; ++l) {
if (problem_.cost[k][l] < s)
s = problem_.cost[k][l];
}
row_dec[k] = s;
for (l = 0; l < problem_.num_cols; ++l) {
if (s == problem_.cost[k][l] && vert_row[l] < 0) {
vert_col[k] = l;
vert_row[l] = k;
TRACE(" Matching col (%d)==row (%d)", l, k);
is_row_done = true;
break;
}
}
if (is_row_done == true) {
continue;
} else {
vert_col[k] = -1;
TRACE(" Node %d: unmatched row %d", t, k);
row_unselected[t++] = k;
}
}
// End initial state 16
// Begin Hungarian algorithm 18
if (t == 0)
goto done;
unmatched = t;
while (1) {
TRACE("Matched %d rows.", problem_.num_rows - t);
int32_t q = 0;
while (1) {
while (q < t) {
// Begin explore node q of the forest 19
k = row_unselected[q];
int32_t s = row_dec[k];
for (l = 0; l < problem_.num_cols; ++l) {
if (slack[l] == 0)
continue;
int32_t del = problem_.cost[k][l] - s + col_inc[l];
if (del >= slack[l])
continue;
if (del == 0) {
if (vert_row[l] < 0)
goto leave_break_thru;
slack[l] = 0;
parent_row[l] = k;
TRACE("node %d: row %d==col %d--row %d", t, vert_row[l], l, k);
row_unselected[t++] = vert_row[l];
} else {
slack[l] = del;
row_slack[l] = k;
}
}
// End explore node q of the forest 19
q++;
}
// Begin introduce a new zero into the matrix 21
int32_t s = kIntMax;
for (int32_t i = 0; i < problem_.num_cols; ++i) {
if (slack[i] && slack[i] < s)
s = slack[i];
}
for (q = 0; q < t; ++q) {
row_dec[row_unselected[q]] += s;
}
for (l = 0; l < problem_.num_cols; ++l) {
if (slack[l]) {
slack[l] -= s;
if (slack[l] == 0) {
// Begin look at a new zero 22
k = row_slack[l];
TRACE("Decreasing uncovered elements by %d produces zero at [%d,%d]", s, k, l);
if (vert_row[l] < 0) {
for (int32_t j = l + 1; j < problem_.num_cols; ++j) {
if (slack[j] == 0)
col_inc[j] += s;
}
goto leave_break_thru;
} else {
parent_row[l] = k;
TRACE("node %d: row %d==col %d--row %d", t, vert_row[l], l, k);
row_unselected[t++] = vert_row[l];
}
// End look at a new zero 22
}
} else {
col_inc[l] += s;
}
}
// End introduce a new zero into the matrix 21
} // while (1)
leave_break_thru:
TRACE("Breakthrough at node %d of %d!", q, t);
while (1) {
int32_t j = vert_col[k];
vert_col[k] = l;
vert_row[l] = k;
TRACE("rematching col %d==row %d", l, k);
if (j < 0)
break;
k = parent_row[j];
l = j;
}
// End update the matching 20
if (--unmatched == 0)
goto done;
// Begin get ready for another stage 17
t = 0;
for (int32_t i = 0; i < problem_.num_cols; ++i) {
parent_row[i] = -1;
slack[i] = kIntMax;
}
for (int32_t i = 0; i < problem_.num_rows; ++i) {
if (vert_col[i] < 0) {
TRACE(" Node %d: unmatched row %d", t, i);
row_unselected[t++] = i;
}
}
// End get ready for another stage 17
}
done:
for (int32_t i = 0; i < problem_.num_rows; ++i) {
problem_.assignment[i][vert_col[i]] = kHungarianAssigned;
}
for (int32_t i = 0; i < problem_.num_rows; ++i) {
for (int32_t j = 0; j < problem_.num_cols; ++j) {
problem_.cost[i][j] = problem_.cost[i][j] - row_dec[i] + col_inc[j];
}
}
}
}; // namespace ot
}; // namespace vas

View File

@ -0,0 +1,71 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#ifndef VAS_OT_HUNGARIAN_WRAP_HPP
#define VAS_OT_HUNGARIAN_WRAP_HPP
#include <opencv2/core.hpp>
#include <cstdint>
#include <vector>
namespace vas {
namespace ot {
const int32_t kHungarianModeMinimizeCost = 0;
const int32_t kHungarianModeMaximizeUtil = 1;
typedef struct {
int32_t num_rows;
int32_t num_cols;
std::vector<std::vector<int32_t>> cost;
std::vector<std::vector<int32_t>> assignment;
} hungarian_problem_t;
class HungarianAlgo {
public:
explicit HungarianAlgo(const cv::Mat_<float> &cost_map);
~HungarianAlgo();
cv::Mat_<uint8_t> Solve();
HungarianAlgo() = delete;
HungarianAlgo(const HungarianAlgo &) = delete;
HungarianAlgo(HungarianAlgo &&) = delete;
HungarianAlgo &operator=(const HungarianAlgo &) = delete;
HungarianAlgo &operator=(HungarianAlgo &&) = delete;
protected:
/* This method initializes the hungarian_problem structure and the cost matrices (missing lines or columns are
*filled with 0). It returns the size of the quadratic(!) assignment matrix.
**/
int32_t InitHungarian(int32_t mode);
// Computes the optimal assignment
void SolveHungarian();
// Free the memory allocated by Init
void FreeHungarian();
int32_t size_width_;
int32_t size_height_;
private:
const int32_t kHungarianNotAssigned = 0;
const int32_t kHungarianAssigned = 1;
const int32_t kIntMax = INT_MAX;
std::vector<int32_t *> int_cost_map_rows_;
cv::Mat_<int32_t> int_cost_map_;
hungarian_problem_t problem_;
};
}; // namespace ot
}; // namespace vas
#endif // VAS_OT_HUNGARIAN_WRAP_HPP

View File

@ -0,0 +1,183 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#include "objects_associator.hpp"
#include "hungarian_wrap.hpp"
#include "rgb_histogram.hpp"
#include "../prof_def.hpp"
#include "../../../common/exception.hpp"
namespace vas {
namespace ot {
const float kAssociationCostThreshold = 1.0f;
const float kRgbHistDistScale = 0.25f;
const float kNormCenterDistScale = 0.5f;
const float kNormShapeDistScale = 0.75f;
ObjectsAssociator::ObjectsAssociator(bool tracking_per_class) : tracking_per_class_(tracking_per_class) {
}
ObjectsAssociator::~ObjectsAssociator() {
}
std::pair<std::vector<bool>, std::vector<int32_t>>
ObjectsAssociator::Associate(const std::vector<Detection> &detections,
const std::vector<std::shared_ptr<Tracklet>> &tracklets,
const std::vector<cv::Mat> *detection_rgb_features) {
PROF_START(PROF_COMPONENTS_OT_ASSOCIATE_COMPUTE_DIST_TABLE);
std::vector<std::vector<float>> d2t_rgb_dist_table;
if (detection_rgb_features != nullptr) {
d2t_rgb_dist_table = ComputeRgbDistance(detections, tracklets, detection_rgb_features);
}
auto n_detections = detections.size();
auto n_tracklets = tracklets.size();
std::vector<bool> d_is_associated(n_detections, false);
std::vector<int32_t> t_associated_d_index(n_tracklets, -1);
// Compute detection-tracklet normalized position distance table
std::vector<std::vector<float>> d2t_pos_dist_table(n_detections, std::vector<float>(n_tracklets, 1000.0f));
for (std::size_t d = 0; d < n_detections; ++d) {
TRACE("input detect(%.0f,%.0f %.0fx%.0f)", detections[d].rect.x, detections[d].rect.y, detections[d].rect.width,
detections[d].rect.height);
for (std::size_t t = 0; t < n_tracklets; ++t) {
if (tracking_per_class_ && (detections[d].class_label != tracklets[t]->label))
continue;
d2t_pos_dist_table[d][t] = NormalizedCenterDistance(detections[d].rect, tracklets[t]->trajectory.back());
}
}
// Compute detection-tracklet normalized shape distance table
std::vector<std::vector<float>> d2t_shape_dist_table(n_detections, std::vector<float>(n_tracklets, 1000.0f));
for (std::size_t d = 0; d < n_detections; ++d) {
for (std::size_t t = 0; t < n_tracklets; ++t) {
if (tracking_per_class_ && (detections[d].class_label != tracklets[t]->label))
continue;
d2t_shape_dist_table[d][t] = NormalizedShapeDistance(detections[d].rect, tracklets[t]->trajectory.back());
}
}
PROF_END(PROF_COMPONENTS_OT_ASSOCIATE_COMPUTE_DIST_TABLE);
PROF_START(PROF_COMPONENTS_OT_ASSOCIATE_COMPUTE_COST_TABLE);
// Compute detection-tracklet association cost table
cv::Mat_<float> d2t_cost_table;
d2t_cost_table.create(static_cast<int32_t>(detections.size()),
static_cast<int32_t>(tracklets.size() + detections.size()));
d2t_cost_table = kAssociationCostThreshold + 1.0f;
for (std::size_t t = 0; t < n_tracklets; ++t) {
const auto &tracklet = tracklets[t];
float rgb_hist_dist_scale = kRgbHistDistScale;
float const_ratio = 0.95f;
float norm_center_dist_scale =
(1.0f - const_ratio) * kNormCenterDistScale * tracklet->association_delta_t / 0.033f +
const_ratio * kNormCenterDistScale; // adaptive to delta_t
float norm_shape_dist_scale =
(1.0f - const_ratio) * kNormShapeDistScale * tracklet->association_delta_t / 0.033f +
const_ratio * kNormShapeDistScale; // adaptive to delta_t
float log_term = logf(rgb_hist_dist_scale * norm_center_dist_scale * norm_shape_dist_scale);
for (std::size_t d = 0; d < n_detections; ++d) {
if (tracking_per_class_ && (detections[d].class_label != tracklets[t]->label))
continue;
d2t_cost_table(static_cast<int32_t>(d), static_cast<int32_t>(t)) =
log_term + d2t_pos_dist_table[d][t] / norm_center_dist_scale +
d2t_shape_dist_table[d][t] / norm_shape_dist_scale;
if (d2t_rgb_dist_table.empty() == false) {
d2t_cost_table(static_cast<int32_t>(d), static_cast<int32_t>(t)) +=
d2t_rgb_dist_table[d][t] / kRgbHistDistScale;
}
}
}
for (std::size_t d = 0; d < n_detections; ++d) {
d2t_cost_table(static_cast<int32_t>(d), static_cast<int32_t>(d + n_tracklets)) =
kAssociationCostThreshold;
}
PROF_END(PROF_COMPONENTS_OT_ASSOCIATE_COMPUTE_COST_TABLE);
// Solve detection-tracking association using Hungarian algorithm
PROF_START(PROF_COMPONENTS_OT_ASSOCIATE_WITH_HUNGARIAN);
HungarianAlgo hungarian(d2t_cost_table);
cv::Mat_<uint8_t> d2t_assign_table = hungarian.Solve();
PROF_END(PROF_COMPONENTS_OT_ASSOCIATE_WITH_HUNGARIAN);
for (std::size_t d = 0; d < n_detections; ++d) {
for (std::size_t t = 0; t < n_tracklets; ++t) {
if (d2t_assign_table(static_cast<int32_t>(d), static_cast<int32_t>(t))) {
d_is_associated[d] = true;
t_associated_d_index[t] = static_cast<int32_t>(d);
break;
}
}
}
return std::make_pair(d_is_associated, t_associated_d_index);
}
std::vector<std::vector<float>>
ObjectsAssociator::ComputeRgbDistance(const std::vector<Detection> &detections,
const std::vector<std::shared_ptr<Tracklet>> &tracklets,
const std::vector<cv::Mat> *detection_rgb_features) {
auto n_detections = detections.size();
auto n_tracklets = tracklets.size();
// Compute detection-tracklet RGB feature distance table
std::vector<std::vector<float>> d2t_rgb_dist_table(n_detections, std::vector<float>(n_tracklets, 1000.0f));
for (std::size_t d = 0; d < n_detections; ++d) {
const auto &d_rgb_feature = (*detection_rgb_features)[d];
for (std::size_t t = 0; t < n_tracklets; ++t) {
if (tracking_per_class_ && (detections[d].class_label != tracklets[t]->label))
continue;
// Find best match in rgb feature history
float min_dist = 1000.0f;
for (const auto &t_rgb_feature : *(tracklets[t]->GetRgbFeatures())) {
min_dist = std::min(min_dist, 1.0f - RgbHistogram::ComputeSimilarity(d_rgb_feature, t_rgb_feature));
}
d2t_rgb_dist_table[d][t] = min_dist;
}
}
return d2t_rgb_dist_table;
}
float ObjectsAssociator::NormalizedCenterDistance(const cv::Rect2f &r1, const cv::Rect2f &r2) {
float normalizer = std::min(0.5f * (r1.width + r1.height), 0.5f * (r2.width + r2.height));
float r1x = r1.x + 0.5f * r1.width;
float r1y = r1.y + 0.5f * r1.height;
float r2x = r2.x + 0.5f * r2.width;
float r2y = r2.y + 0.5f * r2.height;
float dx = (r2x - r1x) / normalizer;
float dy = (r2y - r1y) / normalizer;
return std::sqrt(dx * dx + dy * dy);
}
float ObjectsAssociator::NormalizedShapeDistance(const cv::Rect2f &r1, const cv::Rect2f &r2) {
int32_t normalize_w = int32_t(r1.width);
int32_t normalize_h = int32_t(r1.height);
if (r2.width + r2.height < r1.width + r1.height) {
normalize_w = int32_t(r2.width);
normalize_h = int32_t(r2.height);
}
float dw = (r2.width - r1.width) / normalize_w;
float dh = (r2.height - r1.height) / normalize_h;
return std::sqrt(dw * dw + dh * dh);
}
}; // namespace ot
}; // namespace vas

View File

@ -0,0 +1,41 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#ifndef VAS_OT_OBJECTS_ASSOCIATOR_HPP
#define VAS_OT_OBJECTS_ASSOCIATOR_HPP
#include "../tracklet.hpp"
namespace vas {
namespace ot {
class ObjectsAssociator {
public:
explicit ObjectsAssociator(bool tracking_per_class);
virtual ~ObjectsAssociator();
ObjectsAssociator() = delete;
public:
std::pair<std::vector<bool>, std::vector<int32_t>>
Associate(const std::vector<Detection> &detections, const std::vector<std::shared_ptr<Tracklet>> &tracklets,
const std::vector<cv::Mat> *detection_rgb_features = nullptr);
private:
std::vector<std::vector<float>> ComputeRgbDistance(const std::vector<Detection> &detections,
const std::vector<std::shared_ptr<Tracklet>> &tracklets,
const std::vector<cv::Mat> *detection_rgb_features);
static float NormalizedCenterDistance(const cv::Rect2f &r1, const cv::Rect2f &r2);
static float NormalizedShapeDistance(const cv::Rect2f &r1, const cv::Rect2f &r2);
private:
bool tracking_per_class_;
};
}; // namespace ot
}; // namespace vas
#endif // VAS_OT_OBJECTS_ASSOCIATOR_HPP

View File

@ -0,0 +1,126 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#include "rgb_histogram.hpp"
namespace vas {
namespace ot {
RgbHistogram::RgbHistogram(int32_t rgb_bin_size)
: rgb_bin_size_(rgb_bin_size), rgb_num_bins_(256 / rgb_bin_size),
rgb_hist_size_(static_cast<int32_t>(pow(rgb_num_bins_, 3))) {
}
RgbHistogram::~RgbHistogram(void) {
}
void RgbHistogram::Compute(const cv::Mat &image, cv::Mat *hist) {
// Init output buffer
hist->create(1, rgb_hist_size_, CV_32FC1);
(*hist) = cv::Scalar(0);
float *hist_data = hist->ptr<float>();
// Compute quantized RGB histogram
AccumulateRgbHistogram(image, hist_data);
}
void RgbHistogram::ComputeFromBgra32(const cv::Mat &image, cv::Mat *hist) {
// Init output buffer
hist->create(1, rgb_hist_size_, CV_32FC1);
(*hist) = cv::Scalar(0);
float *hist_data = hist->ptr<float>();
// Compute quantized RGB histogram
AccumulateRgbHistogramFromBgra32(image, hist_data);
}
int32_t RgbHistogram::FeatureSize(void) const {
return rgb_hist_size_;
}
float RgbHistogram::ComputeSimilarity(const cv::Mat &hist1, const cv::Mat &hist2) {
// PROF_START(PROF_COMPONENTS_OT_SHORTTERM_HIST_SIMILARITY);
// Bhattacharyya coeff (w/o weights)
const float eps = 0.0001f;
const int32_t hist_size = hist1.cols;
const float *hist_data1 = hist1.ptr<float>();
const float *hist_data2 = hist2.ptr<float>();
float corr = 0.0f;
float sum1 = 0.0f;
float sum2 = 0.0f;
for (int32_t i = 0; i < hist_size; ++i) {
float v1 = hist_data1[i];
float v2 = hist_data2[i];
corr += sqrtf(v1 * v2);
sum1 += v1;
sum2 += v2;
}
// PROF_END(PROF_COMPONENTS_OT_SHORTTERM_HIST_SIMILARITY);
if (sum1 > eps && sum2 > eps) {
return corr / sqrtf(sum1 * sum2);
} else {
return 0.0f;
}
}
void RgbHistogram::AccumulateRgbHistogram(const cv::Mat &patch, float *rgb_hist) const {
for (int32_t y = 0; y < patch.rows; ++y) {
const cv::Vec3b *patch_ptr = patch.ptr<cv::Vec3b>(y);
for (int32_t x = 0; x < patch.cols; ++x) {
int32_t index0 = patch_ptr[x][0] / rgb_bin_size_;
int32_t index1 = patch_ptr[x][1] / rgb_bin_size_;
int32_t index2 = patch_ptr[x][2] / rgb_bin_size_;
int32_t hist_index = rgb_num_bins_ * (rgb_num_bins_ * index0 + index1) + index2;
rgb_hist[hist_index] += 1.0f;
}
}
}
void RgbHistogram::AccumulateRgbHistogram(const cv::Mat &patch, const cv::Mat &weight, float *rgb_hist) const {
for (int32_t y = 0; y < patch.rows; ++y) {
const cv::Vec3b *patch_ptr = patch.ptr<cv::Vec3b>(y);
const float *weight_ptr = weight.ptr<float>(y);
for (int32_t x = 0; x < patch.cols; ++x) {
int32_t index0 = patch_ptr[x][0] / rgb_bin_size_;
int32_t index1 = patch_ptr[x][1] / rgb_bin_size_;
int32_t index2 = patch_ptr[x][2] / rgb_bin_size_;
int32_t hist_index = rgb_num_bins_ * (rgb_num_bins_ * index0 + index1) + index2;
rgb_hist[hist_index] += weight_ptr[x];
}
}
}
void RgbHistogram::AccumulateRgbHistogramFromBgra32(const cv::Mat &patch, float *rgb_hist) const {
for (int32_t y = 0; y < patch.rows; ++y) {
const cv::Vec4b *patch_ptr = patch.ptr<cv::Vec4b>(y);
for (int32_t x = 0; x < patch.cols; ++x) {
int32_t index0 = patch_ptr[x][0] / rgb_bin_size_;
int32_t index1 = patch_ptr[x][1] / rgb_bin_size_;
int32_t index2 = patch_ptr[x][2] / rgb_bin_size_;
int32_t hist_index = rgb_num_bins_ * (rgb_num_bins_ * index0 + index1) + index2;
rgb_hist[hist_index] += 1.0f;
}
}
}
void RgbHistogram::AccumulateRgbHistogramFromBgra32(const cv::Mat &patch, const cv::Mat &weight,
float *rgb_hist) const {
for (int32_t y = 0; y < patch.rows; ++y) {
const cv::Vec4b *patch_ptr = patch.ptr<cv::Vec4b>(y);
const float *weight_ptr = weight.ptr<float>(y);
for (int32_t x = 0; x < patch.cols; ++x) {
int32_t index0 = patch_ptr[x][0] / rgb_bin_size_;
int32_t index1 = patch_ptr[x][1] / rgb_bin_size_;
int32_t index2 = patch_ptr[x][2] / rgb_bin_size_;
int32_t hist_index = rgb_num_bins_ * (rgb_num_bins_ * index0 + index1) + index2;
rgb_hist[hist_index] += weight_ptr[x];
}
}
}
}; // namespace ot
}; // namespace vas

View File

@ -0,0 +1,42 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#ifndef VAS_OT_RGB_HISTOGRAM_HPP
#define VAS_OT_RGB_HISTOGRAM_HPP
#include <opencv2/core.hpp>
#include <cstdint>
namespace vas {
namespace ot {
class RgbHistogram {
public:
explicit RgbHistogram(int32_t rgb_bin_size);
virtual ~RgbHistogram(void);
virtual void Compute(const cv::Mat &image, cv::Mat *hist);
virtual void ComputeFromBgra32(const cv::Mat &image, cv::Mat *hist);
virtual int32_t FeatureSize(void) const; // currently 512 * float32
static float ComputeSimilarity(const cv::Mat &hist1, const cv::Mat &hist2);
protected:
int32_t rgb_bin_size_;
int32_t rgb_num_bins_;
int32_t rgb_hist_size_;
void AccumulateRgbHistogram(const cv::Mat &patch, float *rgb_hist) const;
void AccumulateRgbHistogram(const cv::Mat &patch, const cv::Mat &weight, float *rgb_hist) const;
void AccumulateRgbHistogramFromBgra32(const cv::Mat &patch, float *rgb_hist) const;
void AccumulateRgbHistogramFromBgra32(const cv::Mat &patch, const cv::Mat &weight, float *rgb_hist) const;
};
}; // namespace ot
}; // namespace vas
#endif // VAS_OT_RGB_HISTOGRAM_HPP

View File

@ -0,0 +1,363 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#include "prof_def.hpp"
#include "tracker.hpp"
#include "../../common/exception.hpp"
#include <vas/ot.hpp>
#include <vas/common.hpp>
#include <memory>
namespace vas {
namespace ot {
const float kDefaultDeltaTime = 0.033f;
const int kDefaultNumThreads = 1;
const char kNameMaxNumThreads[] = "max_num_threads";
vas::Version GetVersion() noexcept {
vas::Version version(OT_VERSION_MAJOR, OT_VERSION_MINOR, OT_VERSION_PATCH);
return version;
}
std::ostream &operator<<(std::ostream &os, TrackingStatus ts) {
if (ts == TrackingStatus::NEW)
os << "NEW";
else if (ts == TrackingStatus::TRACKED)
os << "TRACKED";
// else if (ts == TrackingStatus::LOST)
else
os << "LOST";
return os;
}
std::ostream &operator<<(std::ostream &os, const Object &object) {
os << "Object:" << std::endl;
os << " rect -> " << object.rect << std::endl;
os << " tracking id -> " << object.tracking_id << std::endl;
os << " class label -> " << object.class_label << std::endl;
os << " tracking status -> " << object.status;
return os;
}
// Internal implementation: includes OT component
class ObjectTracker::Impl {
public:
class InitParameters : public vas::ot::Tracker::InitParameters {
public:
TrackingType tracking_type;
vas::BackendType backend_type;
};
public:
explicit Impl(const InitParameters &param);
Impl() = delete;
~Impl();
Impl(const Impl &) = delete;
Impl(Impl &&) = delete;
Impl &operator=(const Impl &) = delete;
Impl &operator=(Impl &&) = delete;
public:
int32_t GetMaxNumObjects() const noexcept;
TrackingType GetTrackingType() const noexcept;
vas::ColorFormat GetInputColorFormat() const noexcept;
float GetDeltaTime() const noexcept;
vas::BackendType GetBackendType() const noexcept;
bool GetTrackingPerClass() const noexcept;
void SetDeltaTime(float delta_t);
std::vector<Object> Track(const cv::Mat &frame, const std::vector<DetectedObject> &objects);
private:
std::unique_ptr<vas::ot::Tracker> tracker_;
std::vector<std::shared_ptr<Tracklet>> produced_tracklets_;
int32_t max_num_objects_;
float delta_t_;
TrackingType tracking_type_;
vas::BackendType backend_type_;
vas::ColorFormat input_color_format_;
bool tracking_per_class_;
#ifdef DUMP_OTAV
Otav otav_;
#endif
friend class ObjectTracker::Builder;
};
namespace {
void vas_exit() {
}
} // anonymous namespace
ObjectTracker::ObjectTracker(ObjectTracker::Impl *impl) : impl_(impl) {
atexit(vas_exit);
}
ObjectTracker::~ObjectTracker() = default;
int32_t ObjectTracker::GetMaxNumObjects() const noexcept {
return impl_->GetMaxNumObjects();
}
TrackingType ObjectTracker::GetTrackingType() const noexcept {
return impl_->GetTrackingType();
}
vas::ColorFormat ObjectTracker::GetInputColorFormat() const noexcept {
return impl_->GetInputColorFormat();
}
float ObjectTracker::GetFrameDeltaTime() const noexcept {
return impl_->GetDeltaTime();
}
vas::BackendType ObjectTracker::GetBackendType() const noexcept {
return impl_->GetBackendType();
}
bool ObjectTracker::GetTrackingPerClass() const noexcept {
return impl_->GetTrackingPerClass();
}
void ObjectTracker::SetFrameDeltaTime(float frame_delta_t) {
impl_->SetDeltaTime(frame_delta_t);
}
std::vector<Object> ObjectTracker::Track(const cv::Mat &frame, const std::vector<DetectedObject> &objects) {
return impl_->Track(frame, objects);
}
ObjectTracker::Impl::Impl(const InitParameters &param)
: max_num_objects_(param.max_num_objects), delta_t_(kDefaultDeltaTime), tracking_type_(param.tracking_type),
backend_type_(param.backend_type), input_color_format_(param.format),
tracking_per_class_(param.tracking_per_class) {
PROF_INIT(OT);
TRACE("BEGIN");
if ((param.max_num_objects) != -1 && (param.max_num_objects <= 0)) {
std::cout << "Error: Invalid maximum number of objects: " << param.max_num_objects << std::endl;
ETHROW(false, invalid_argument, "Invalid maximum number of objects");
}
TRACE("tracking_type: %d, backend_type: %d, color_format: %d, max_num_object: %d, tracking_per_class: %d",
static_cast<int32_t>(tracking_type_), static_cast<int32_t>(backend_type_),
static_cast<int32_t>(input_color_format_), max_num_objects_, tracking_per_class_);
if (param.backend_type == vas::BackendType::CPU) {
tracker_.reset(vas::ot::Tracker::CreateInstance(param));
} else {
std::cout << "Error: Unexpected backend type" << std::endl;
ETHROW(false, invalid_argument, "Unexpected backend type");
}
produced_tracklets_.clear();
TRACE("END");
}
ObjectTracker::Impl::~Impl() {
PROF_FLUSH(OT);
}
void ObjectTracker::Impl::SetDeltaTime(float delta_t) {
if (delta_t < 0.005f || delta_t > 0.5f) {
std::cout << "Error: Invalid argument for SetFrameDeltaTime " << delta_t << std::endl;
ETHROW(false, invalid_argument, "Invalid argument for SetFrameDeltaTime");
}
delta_t_ = delta_t;
return;
}
int32_t ObjectTracker::Impl::GetMaxNumObjects() const noexcept {
return max_num_objects_;
}
TrackingType ObjectTracker::Impl::GetTrackingType() const noexcept {
return tracking_type_;
}
vas::ColorFormat ObjectTracker::Impl::GetInputColorFormat() const noexcept {
return input_color_format_;
}
float ObjectTracker::Impl::GetDeltaTime() const noexcept {
return delta_t_;
}
vas::BackendType ObjectTracker::Impl::GetBackendType() const noexcept {
return backend_type_;
}
bool ObjectTracker::Impl::GetTrackingPerClass() const noexcept {
return tracking_per_class_;
}
std::vector<Object> ObjectTracker::Impl::Track(const cv::Mat &frame,
const std::vector<DetectedObject> &detected_objects) {
if (frame.cols <= 0 || frame.rows <= 0) {
std::cout << "Error: Invalid frame size(" << frame.cols << "x" << frame.rows << ") empty("
<< frame.empty() << ")" << std::endl;
ETHROW(false, invalid_argument, "Invalid frame size(%dx%d) empty(%d)\n", frame.cols, frame.rows, frame.empty());
}
int32_t frame_w = frame.cols;
int32_t frmae_h = (input_color_format_ == vas::ColorFormat::NV12) ? frame.rows * 2 / 3 : frame.rows;
cv::Rect frame_rect(0, 0, frame_w, frmae_h);
TRACE("START");
PROF_START(PROF_COMPONENTS_OT_RUN_TRACK);
std::vector<vas::ot::Detection> detections;
TRACE("+ Number: Detected objects (%d)", static_cast<int32_t>(detected_objects.size()));
int32_t index = 0;
for (const auto &object : detected_objects) {
vas::ot::Detection detection;
detection.class_label = object.class_label;
detection.rect = static_cast<cv::Rect2f>(object.rect);
detection.index = index;
detections.emplace_back(detection);
index++;
}
std::vector<Object> objects;
if (backend_type_ == vas::BackendType::CPU) {
tracker_->TrackObjects(frame, detections, &produced_tracklets_, delta_t_);
TRACE("+ Number: Tracking objects (%d)", static_cast<int32_t>(produced_tracklets_.size()));
for (const auto &tracklet : produced_tracklets_) // result 'Tracklet'
{
cv::Rect rect = static_cast<cv::Rect>(tracklet->trajectory_filtered.back());
if ((rect & frame_rect).area() > 0) {
Object object;
// TRACE(" - ID(%d) Status(%d)", tracklet.id, tracklet.status);
object.rect = static_cast<cv::Rect>(tracklet->trajectory_filtered.back());
object.tracking_id = tracklet->id;
object.class_label = tracklet->label;
object.association_idx = tracklet->association_idx;
object.status = vas::ot::TrackingStatus::LOST;
switch (tracklet->status) {
case ST_NEW:
object.status = vas::ot::TrackingStatus::NEW;
break;
case ST_TRACKED:
object.status = vas::ot::TrackingStatus::TRACKED;
break;
case ST_LOST:
default:
object.status = vas::ot::TrackingStatus::LOST;
}
objects.emplace_back(object);
} else {
TRACE("[ %d, %d, %d, %d ] is out of the image bound! -> Filtered out.", rect.x, rect.y, rect.width,
rect.height);
}
}
} else {
ETHROW(false, invalid_argument, "Unexpected input backend type for VAS-OT.")
}
TRACE("+ Number: Result objects (%d)", static_cast<int32_t>(objects.size()));
PROF_END(PROF_COMPONENTS_OT_RUN_TRACK);
#ifdef DUMP_OTAV
otav_.Dump(frame, detections, produced_tracklets_, tracker_->GetFrameCount() - 1);
#endif
TRACE("END");
return objects;
}
ObjectTracker::Builder::Builder()
: backend_type(vas::BackendType::CPU), max_num_objects(kDefaultMaxNumObjects),
input_image_format(vas::ColorFormat::BGR), tracking_per_class(true) {
}
ObjectTracker::Builder::~Builder() {
}
std::unique_ptr<ObjectTracker> ObjectTracker::Builder::Build(TrackingType tracking_type) const {
TRACE("BEGIN");
ObjectTracker::Impl *ot_impl = nullptr;
ObjectTracker::Impl::InitParameters param;
param.max_num_objects = max_num_objects;
param.format = input_image_format;
param.backend_type = backend_type;
param.tracking_type = tracking_type;
param.tracking_per_class = tracking_per_class;
if (static_cast<int32_t>(vas::ColorFormat::BGR) > static_cast<int32_t>(input_image_format) ||
static_cast<int32_t>(vas::ColorFormat::I420) < static_cast<int32_t>(input_image_format)) {
ETHROW(false, invalid_argument, "Invalid color format(%d)", static_cast<int32_t>(input_image_format));
}
switch (tracking_type) {
case vas::ot::TrackingType::LONG_TERM:
param.profile = vas::ot::Tracker::PROFILE_LONG_TERM;
break;
case vas::ot::TrackingType::SHORT_TERM:
param.profile = vas::ot::Tracker::PROFILE_SHORT_TERM;
break;
case vas::ot::TrackingType::SHORT_TERM_KCFVAR:
param.profile = vas::ot::Tracker::PROFILE_SHORT_TERM_KCFVAR;
break;
case vas::ot::TrackingType::SHORT_TERM_IMAGELESS:
param.profile = vas::ot::Tracker::PROFILE_SHORT_TERM_IMAGELESS;
break;
case vas::ot::TrackingType::ZERO_TERM:
param.profile = vas::ot::Tracker::PROFILE_ZERO_TERM;
break;
case vas::ot::TrackingType::ZERO_TERM_COLOR_HISTOGRAM:
param.profile = vas::ot::Tracker::PROFILE_ZERO_TERM_COLOR_HISTOGRAM;
break;
case vas::ot::TrackingType::ZERO_TERM_IMAGELESS:
param.profile = vas::ot::Tracker::PROFILE_ZERO_TERM_IMAGELESS;
break;
default:
std::cout << "Error: Invalid tracker type vas::ot::Tracker" << std::endl;
ETHROW(false, invalid_argument, "Invalid tracker type vas::ot::Tracker");
return nullptr;
}
// Not exposed to external parameter
param.min_region_ratio_in_boundary =
kMinRegionRatioInImageBoundary; // Ratio threshold of size: used by zttchist, zttimgless, sttkcfvar, sttimgless
for (const auto &item : platform_config) {
(void)item; // resolves ununsed warning when LOG_TRACE is OFF
TRACE("platform_config[%s] = %s", item.first.c_str(), item.second.c_str());
}
int max_num_threads = kDefaultNumThreads;
auto max_num_threads_iter = platform_config.find(kNameMaxNumThreads);
if (max_num_threads_iter != platform_config.end()) {
try {
max_num_threads = std::stoi(max_num_threads_iter->second);
} catch (const std::exception &) {
ETHROW(false, invalid_argument, "max_num_threads should be integer");
}
if (max_num_threads == 0 || max_num_threads < -1)
ETHROW(false, invalid_argument, "max_num_threads cannot be 0 or smaller than -1");
}
param.max_num_threads = max_num_threads;
ot_impl = new ObjectTracker::Impl(param);
std::unique_ptr<ObjectTracker> ot(new ObjectTracker(ot_impl));
TRACE("END");
return ot;
}
}; // namespace ot
}; // namespace vas

View File

@ -0,0 +1,30 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#ifndef VAS_OT_PROF_DEF_HPP
#define VAS_OT_PROF_DEF_HPP
#include "../../common/prof.hpp"
// 0 ~ 999 : Reserved group id for UnitTests.
// 1000 ~ : User Defined id range under modules.
#define PROF_COMPONENTS_OT_RUN_TRACK PROF_TAG_GENERATE(OT, 1000, " ObjectTracker::Track")
#define PROF_COMPONENTS_OT_ZEROTERM_RUN_TRACKER PROF_TAG_GENERATE(OT, 1400, " ZeroTermTracker::TrackObjects")
#define PROF_COMPONENTS_OT_ZEROTERM_KALMAN_PREDICTION PROF_TAG_GENERATE(OT, 1411, " ZeroTermTracker::KalmanPrediction")
#define PROF_COMPONENTS_OT_ZEROTERM_RUN_ASSOCIATION PROF_TAG_GENERATE(OT, 1421, " ZeroTermTracker::Association")
#define PROF_COMPONENTS_OT_ZEROTERM_UPDATE_STATUS PROF_TAG_GENERATE(OT, 1441, " ZeroTermTracker::UpdateTrackedStatus")
#define PROF_COMPONENTS_OT_ZEROTERM_COMPUTE_OCCLUSION PROF_TAG_GENERATE(OT, 1461, " ZeroTermTracker::ComputeOcclusion")
#define PROF_COMPONENTS_OT_ZEROTERM_UPDATE_MODEL PROF_TAG_GENERATE(OT, 1481, " ZeroTermTracker::UpdateModel")
#define PROF_COMPONENTS_OT_ZEROTERM_REGISTER_OBJECT PROF_TAG_GENERATE(OT, 1491, " ZeroTermTracker::RegisterObject")
#define PROF_COMPONENTS_OT_ASSOCIATE_COMPUTE_DIST_TABLE \
PROF_TAG_GENERATE(OT, 1600, " Association::ComputeDistanceTable")
#define PROF_COMPONENTS_OT_ASSOCIATE_COMPUTE_COST_TABLE PROF_TAG_GENERATE(OT, 1610, " Association::ComputeCostTable")
#define PROF_COMPONENTS_OT_ASSOCIATE_WITH_HUNGARIAN PROF_TAG_GENERATE(OT, 1620, " Association::AssociateWithHungarian")
#endif // __OT_PROF_DEF_H__

View File

@ -0,0 +1,256 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#include "short_term_imageless_tracker.hpp"
#include "prof_def.hpp"
#include "../../common/exception.hpp"
#include <memory>
namespace vas {
namespace ot {
const int32_t kMaxAssociationLostCount = 2; // ST_TRACKED -> ST_LOST
const int32_t kMaxAssociationFailCount = 20; // ST_LOST -> ST_DEAD
const int32_t kMaxOutdatedCountInTracked = 30; // ST_TRACKED -> ST_LOST
const int32_t kMaxOutdatedCountInLost = 20; // ST_LOST -> ST_DEAD
const int32_t kMaxTrajectorySize = 30;
/**
*
* ShortTermImagelessTracker
*
**/
ShortTermImagelessTracker::ShortTermImagelessTracker(vas::ot::Tracker::InitParameters init_param)
: Tracker(init_param.max_num_objects, init_param.min_region_ratio_in_boundary, init_param.format,
init_param.tracking_per_class),
image_sz(0, 0) {
TRACE(" - Created tracker = ShortTermImagelessTracker");
}
ShortTermImagelessTracker::~ShortTermImagelessTracker() {
}
int32_t ShortTermImagelessTracker::TrackObjects(const cv::Mat &mat, const std::vector<Detection> &detections,
std::vector<std::shared_ptr<Tracklet>> *tracklets, float delta_t) {
PROF_START(PROF_COMPONENTS_OT_SHORTTERM_RUN_TRACKER);
int32_t input_img_width = mat.cols;
int32_t input_img_height = mat.rows;
if (input_image_format_ == vas::ColorFormat::NV12 || input_image_format_ == vas::ColorFormat::I420) {
input_img_height = mat.rows / 3 * 2;
}
const cv::Rect2f image_boundary(0.0f, 0.0f, static_cast<float>(input_img_width),
static_cast<float>(input_img_height));
TRACE("Start TrackObjects frame_count_: %d, detection: %d, tracklet: %d ----------------", frame_count_,
detections.size(), tracklets_.size());
bool is_dead = false;
if (image_sz.width != input_img_width || image_sz.height != input_img_height) {
if (image_sz.width != 0 || image_sz.height != 0) {
is_dead = true;
}
image_sz.width = input_img_width;
image_sz.height = input_img_height;
}
PROF_START(PROF_COMPONENTS_OT_SHORTTERM_KALMAN_PREDICTION);
// Predict tracklets state
for (auto &tracklet : tracklets_) {
auto sttimgless_tracklet = std::dynamic_pointer_cast<ShortTermImagelessTracklet>(tracklet);
cv::Rect2f predicted_rect = sttimgless_tracklet->kalman_filter->Predict(delta_t);
sttimgless_tracklet->predicted = predicted_rect;
sttimgless_tracklet->trajectory.push_back(predicted_rect);
sttimgless_tracklet->trajectory_filtered.push_back(predicted_rect);
sttimgless_tracklet->association_delta_t += delta_t;
// Reset association index every frame for new detection input
sttimgless_tracklet->association_idx = kNoMatchDetection;
}
PROF_END(PROF_COMPONENTS_OT_SHORTTERM_KALMAN_PREDICTION);
PROF_START(PROF_COMPONENTS_OT_SHORTTERM_UPDATE_STATUS);
// Conduct tracking of SOT for each tracklet
TRACE(" Update status");
for (auto &tracklet : tracklets_) {
if (is_dead) {
tracklet->status = ST_DEAD;
continue;
}
// tracklet->association_delta_t = 0.0f; // meaning updated by SOT
}
if (is_dead) {
RemoveDeadTracklets();
}
PROF_END(PROF_COMPONENTS_OT_SHORTTERM_UPDATE_STATUS);
PROF_START(PROF_COMPONENTS_OT_SHORTTERM_RUN_ASSOCIATION);
// Tracklet-detection association
int32_t n_detections = static_cast<int32_t>(detections.size());
int32_t n_tracklets = static_cast<int32_t>(tracklets_.size());
std::vector<bool> d_is_associated(n_detections, false);
std::vector<int32_t> t_associated_d_index(n_tracklets, -1);
if (n_detections > 0) {
auto result = associator_.Associate(detections, tracklets_);
d_is_associated = result.first;
t_associated_d_index = result.second;
}
PROF_END(PROF_COMPONENTS_OT_SHORTTERM_RUN_ASSOCIATION);
PROF_START(PROF_COMPONENTS_OT_SHORTTERM_UPDATE_STATUS);
// Update tracklets' state
if (n_detections > 0) {
for (int32_t t = 0; t < n_tracklets; ++t) {
auto &tracklet = tracklets_[t];
if (t_associated_d_index[t] >= 0) {
tracklet->association_delta_t = 0.0f;
int32_t associated_d_index = t_associated_d_index[t];
const cv::Rect2f &d_bounding_box = detections[associated_d_index].rect & image_boundary;
// Apply associated detection to tracklet
tracklet->association_idx = detections[associated_d_index].index;
tracklet->association_fail_count = 0;
tracklet->age = 0;
tracklet->label = detections[associated_d_index].class_label;
auto sttimgless_tracklet = std::dynamic_pointer_cast<ShortTermImagelessTracklet>(tracklet);
if (!sttimgless_tracklet)
continue;
if (sttimgless_tracklet->status == ST_NEW) {
sttimgless_tracklet->trajectory.back() = d_bounding_box;
sttimgless_tracklet->trajectory_filtered.back() =
sttimgless_tracklet->kalman_filter->Correct(sttimgless_tracklet->trajectory.back());
sttimgless_tracklet->status = ST_TRACKED;
} else if (sttimgless_tracklet->status == ST_TRACKED) {
sttimgless_tracklet->trajectory.back() = d_bounding_box;
sttimgless_tracklet->trajectory_filtered.back() =
sttimgless_tracklet->kalman_filter->Correct(sttimgless_tracklet->trajectory.back());
} else if (sttimgless_tracklet->status == ST_LOST) {
sttimgless_tracklet->RenewTrajectory(d_bounding_box);
sttimgless_tracklet->status = ST_TRACKED;
}
} else // Association failure
{
tracklet->association_fail_count++;
if (tracklet->status == ST_NEW) {
tracklet->status = ST_DEAD; // regard non-consecutive association as false alarm
} else if (tracklet->status == ST_TRACKED) {
if (tracklet->association_fail_count > kMaxAssociationLostCount) {
// # association fail > threshold while tracking -> MISSING
tracklet->status = ST_LOST;
tracklet->association_fail_count = 0;
tracklet->age = 0;
}
} else if (tracklet->status == ST_LOST) {
if (tracklet->association_fail_count > kMaxAssociationFailCount) {
// # association fail > threshold while missing -> DEAD
tracklet->status = ST_DEAD;
}
}
}
}
} else // detections.size() == 0
{
for (int32_t t = 0; t < static_cast<int32_t>(tracklets_.size()); ++t) {
auto &tracklet = tracklets_[t];
// Always change ST_NEW to ST_TRACKED: no feature tracking from previous detection input.
if (tracklet->status == ST_NEW) {
tracklet->status = ST_TRACKED;
}
auto sttimgless_tracklet = std::dynamic_pointer_cast<ShortTermImagelessTracklet>(tracklet);
if (!sttimgless_tracklet)
continue;
if (sttimgless_tracklet->status == ST_TRACKED) {
if (sttimgless_tracklet->age > kMaxOutdatedCountInTracked) {
sttimgless_tracklet->status = ST_LOST;
sttimgless_tracklet->association_fail_count = 0;
sttimgless_tracklet->age = 0;
} else {
sttimgless_tracklet->trajectory_filtered.back() =
sttimgless_tracklet->kalman_filter->Correct(sttimgless_tracklet->trajectory.back());
}
}
if (sttimgless_tracklet->status == ST_LOST) {
if (sttimgless_tracklet->age >= kMaxOutdatedCountInLost) {
// # association fail > threshold while missing -> DEAD
sttimgless_tracklet->status = ST_DEAD;
}
}
}
}
PROF_END(PROF_COMPONENTS_OT_SHORTTERM_UPDATE_STATUS);
PROF_START(PROF_COMPONENTS_OT_SHORTTERM_COMPUTE_OCCLUSION);
ComputeOcclusion();
PROF_END(PROF_COMPONENTS_OT_SHORTTERM_COMPUTE_OCCLUSION);
PROF_START(PROF_COMPONENTS_OT_SHORTTERM_REGISTER_OBJECT);
// Register remaining detections as new objects
for (int32_t d = 0; d < static_cast<int32_t>(detections.size()); ++d) {
if (d_is_associated[d] == false) {
if (static_cast<int32_t>(tracklets_.size()) >= max_objects_ && max_objects_ != -1)
continue;
std::unique_ptr<ShortTermImagelessTracklet> tracklet(new ShortTermImagelessTracklet());
tracklet->status = ST_NEW;
tracklet->id = GetNextTrackingID();
tracklet->label = detections[d].class_label;
tracklet->association_idx = detections[d].index;
const cv::Rect2f &bounding_box = detections[d].rect & image_boundary;
tracklet->InitTrajectory(bounding_box);
tracklet->kalman_filter.reset(new KalmanFilterNoOpencv(bounding_box));
tracklets_.push_back(std::move(tracklet));
}
}
PROF_END(PROF_COMPONENTS_OT_SHORTTERM_REGISTER_OBJECT);
RemoveDeadTracklets();
RemoveOutOfBoundTracklets(input_img_width, input_img_height);
TrimTrajectories();
*tracklets = tracklets_;
// Increase age
for (auto &tracklet : tracklets_) {
tracklet->age++;
}
IncreaseFrameCount();
PROF_END(PROF_COMPONENTS_OT_SHORTTERM_RUN_TRACKER);
return 0;
}
void ShortTermImagelessTracker::TrimTrajectories() {
for (auto &tracklet : tracklets_) {
auto &trajectory = tracklet->trajectory;
while (trajectory.size() > kMaxTrajectorySize) {
trajectory.pop_front();
}
//
auto &trajectory_filtered = tracklet->trajectory_filtered;
while (trajectory_filtered.size() > kMaxTrajectorySize) {
trajectory_filtered.pop_front();
}
}
}
}; // namespace ot
}; // namespace vas

View File

@ -0,0 +1,39 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#ifndef VAS_OT_SHORT_TERM_IMAGELESS_TRACKER_HPP
#define VAS_OT_SHORT_TERM_IMAGELESS_TRACKER_HPP
#include "tracker.hpp"
#include <deque>
#include <vector>
namespace vas {
namespace ot {
class ShortTermImagelessTracker : public Tracker {
public:
explicit ShortTermImagelessTracker(vas::ot::Tracker::InitParameters init_param);
virtual ~ShortTermImagelessTracker();
virtual int32_t TrackObjects(const cv::Mat &mat, const std::vector<Detection> &detections,
std::vector<std::shared_ptr<Tracklet>> *tracklets, float delta_t) override;
ShortTermImagelessTracker(const ShortTermImagelessTracker &) = delete;
ShortTermImagelessTracker &operator=(const ShortTermImagelessTracker &) = delete;
private:
void TrimTrajectories();
private:
cv::Size image_sz;
};
}; // namespace ot
}; // namespace vas
#endif // VAS_OT_SHORT_TERM_IMAGELESS_TRACKER_HPP

View File

@ -0,0 +1,132 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#include "short_term_imageless_tracker.hpp"
#include "zero_term_imageless_tracker.hpp"
#include "../../common/exception.hpp"
namespace vas {
namespace ot {
Tracker::Tracker(int32_t max_objects, float min_region_ratio_in_boundary, vas::ColorFormat format, bool class_per_class)
: max_objects_(max_objects), next_id_(1), frame_count_(0),
min_region_ratio_in_boundary_(min_region_ratio_in_boundary), input_image_format_(format),
associator_(ObjectsAssociator(class_per_class)) {
}
Tracker::~Tracker() {
}
Tracker *Tracker::CreateInstance(InitParameters init_parameters) {
TRACE("START CreateInstance Tracker");
Tracker *tracker = nullptr;
if (init_parameters.profile == PROFILE_SHORT_TERM_IMAGELESS) {
tracker = new ShortTermImagelessTracker(init_parameters);
} else if (init_parameters.profile == PROFILE_ZERO_TERM_IMAGELESS) {
tracker = new ZeroTermImagelessTracker(init_parameters);
} else {
throw std::runtime_error("Unsupported tracking type");
}
TRACE(" - max_num_objects(%d)", init_parameters.max_num_objects);
TRACE("END");
return tracker;
}
int32_t Tracker::RemoveObject(const int32_t id) {
if (id == 0)
return -1;
for (auto tracklet = tracklets_.begin(); tracklet != tracklets_.end(); ++tracklet) {
if ((*tracklet)->id == id) {
tracklet = tracklets_.erase(tracklet);
return 0;
}
}
return -1;
}
void Tracker::Reset(void) {
frame_count_ = 0;
tracklets_.clear();
}
int32_t Tracker::GetFrameCount(void) const {
return frame_count_;
}
int32_t Tracker::GetNextTrackingID() {
return next_id_++;
}
void Tracker::IncreaseFrameCount() {
frame_count_++;
}
void Tracker::ComputeOcclusion() {
// Compute occlusion ratio
for (int32_t t0 = 0; t0 < static_cast<int32_t>(tracklets_.size()); ++t0) {
auto &tracklet0 = tracklets_[t0];
if (tracklet0->status != ST_TRACKED)
continue;
const cv::Rect2f &r0 = tracklet0->trajectory.back();
float max_occlusion_ratio = 0.0f;
for (int32_t t1 = 0; t1 < static_cast<int32_t>(tracklets_.size()); ++t1) {
const auto &tracklet1 = tracklets_[t1];
if (t0 == t1 || tracklet1->status == ST_LOST)
continue;
const cv::Rect2f &r1 = tracklet1->trajectory.back();
max_occlusion_ratio = std::max(max_occlusion_ratio, (r0 & r1).area() / r0.area()); // different from IoU
}
tracklets_[t0]->occlusion_ratio = max_occlusion_ratio;
}
}
void Tracker::RemoveOutOfBoundTracklets(int32_t input_width, int32_t input_height, bool is_filtered) {
const cv::Rect2f image_region(0.0f, 0.0f, static_cast<float>(input_width), static_cast<float>(input_height));
for (auto tracklet = tracklets_.begin(); tracklet != tracklets_.end();) {
const cv::Rect2f &object_region =
is_filtered ? (*tracklet)->trajectory_filtered.back() : (*tracklet)->trajectory.back();
if ((image_region & object_region).area() / object_region.area() <
min_region_ratio_in_boundary_) { // only 10% is in image boundary
tracklet = tracklets_.erase(tracklet);
} else {
++tracklet;
}
}
}
void Tracker::RemoveDeadTracklets() {
for (auto tracklet = tracklets_.begin(); tracklet != tracklets_.end();) {
if ((*tracklet)->status == ST_DEAD) {
tracklet = tracklets_.erase(tracklet);
} else {
++tracklet;
}
}
}
bool Tracker::RemoveOneLostTracklet() {
for (auto tracklet = tracklets_.begin(); tracklet != tracklets_.end();) {
if ((*tracklet)->status == ST_LOST) {
// The first tracklet is the oldest
tracklet = tracklets_.erase(tracklet);
return true;
} else {
++tracklet;
}
}
return false;
}
}; // namespace ot
}; // namespace vas

View File

@ -0,0 +1,123 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#ifndef VAS_OT_TRACKER_HPP
#define VAS_OT_TRACKER_HPP
#include "mtt/objects_associator.hpp"
#include "tracklet.hpp"
#include <vas/common.hpp>
#include <cstdint>
#include <deque>
namespace vas {
namespace ot {
const int32_t kDefaultMaxNumObjects = -1;
const float kMaxTargetAreaFactor = 0.8f;
const float kMinRegionRatioInImageBoundary = 0.75f; // MIN_REGION_RATIO_IN_IMAGE_BOUNDARY
class Tracker {
public:
enum Profile {
PROFILE_LONG_TERM = 0, // for long-term tracking usage
PROFILE_SHORT_TERM, // for short-term tracking usage (suitable for using with an object detector)
PROFILE_SHORT_TERM_KCFVAR, // alias of 'PROFILE_SHORT_TERM'. 'PROFILE_SHORT_TERM' will be deprecated
PROFILE_SHORT_TERM_IMAGELESS, // for short-term tracking usage with kalman tracking
PROFILE_ZERO_TERM, // for zero-term tracking usage (only works with object association algorithm, not tracking)
PROFILE_ZERO_TERM_IMAGELESS, // for zero-term tracking usage with kalman tracking
PROFILE_ZERO_TERM_COLOR_HISTOGRAM, // alias of 'PROFILE_ZERO_TERM'. 'PROFILE_ZERO_TERM' will be deprecated
};
class InitParameters {
public:
Profile profile; // tracking type
int32_t max_num_objects;
int32_t max_num_threads; // for Parallelization
vas::ColorFormat format;
bool tracking_per_class;
// Won't be exposed to the external
float min_region_ratio_in_boundary; // For ST, ZT
};
public:
virtual ~Tracker();
/**
* create new object tracker instance
* @param InitParameters
*/
static Tracker *CreateInstance(InitParameters init_parameters);
/**
* perform tracking
*
* @param[in] mat Input frame
* @param[in] detection Newly detected object data vector which will be added to the tracker. put zero length vector
* if there is no new object in the frame.
* @param[in] delta_t Time passed after the latest call to TrackObjects() in seconds. Use 1.0/FPS in case of
* constant frame rate
* @param[out] tracklets Tracked object data vector.
* @return 0 for success. negative value for failure
*/
virtual int32_t TrackObjects(const cv::Mat &mat, const std::vector<Detection> &detections,
std::vector<std::shared_ptr<Tracklet>> *tracklets, float delta_t = 0.033f) = 0;
/**
* remove object
*
* @param[in] id Object id for removing. it should be the 'id' value of the Tracklet
* @return 0 for success. negative value for failure.
*/
int32_t RemoveObject(const int32_t id);
/**
* reset all internal state to its initial.
*
* @return 0 for success. negative value for failure.
*/
void Reset(void);
/**
* get cumulated frame number
*
* @return 0
*/
int32_t GetFrameCount(void) const;
protected:
explicit Tracker(int32_t max_objects, float min_region_ratio_in_boundary, vas::ColorFormat format,
bool class_per_class = true);
Tracker() = delete;
int32_t GetNextTrackingID();
void IncreaseFrameCount();
void ComputeOcclusion();
void RemoveOutOfBoundTracklets(int32_t input_width, int32_t input_height, bool is_filtered = false);
void RemoveDeadTracklets();
bool RemoveOneLostTracklet();
protected:
int32_t max_objects_; // -1 means no limitation
int32_t next_id_;
int32_t frame_count_;
float min_region_ratio_in_boundary_;
vas::ColorFormat input_image_format_;
ObjectsAssociator associator_;
std::vector<std::shared_ptr<Tracklet>> tracklets_;
};
}; // namespace ot
}; // namespace vas
#endif // VAS_OT_TRACKER_HPP

View File

@ -0,0 +1,147 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#include "tracklet.hpp"
#include <sstream>
namespace vas {
namespace ot {
Tracklet::Tracklet()
: id(0), label(-1), association_idx(kNoMatchDetection), status(ST_DEAD), age(0), confidence(0.f),
occlusion_ratio(0.f), association_delta_t(0.f), association_fail_count(0) {
}
Tracklet::~Tracklet() {
}
void Tracklet::ClearTrajectory() {
trajectory.clear();
trajectory_filtered.clear();
}
void Tracklet::InitTrajectory(const cv::Rect2f &bounding_box) {
trajectory.push_back(bounding_box);
trajectory_filtered.push_back(bounding_box);
}
void Tracklet::AddUpdatedTrajectory(const cv::Rect2f &bounding_box, const cv::Rect2f &corrected_box) {
trajectory.push_back(bounding_box);
trajectory_filtered.push_back(corrected_box);
}
void Tracklet::UpdateLatestTrajectory(const cv::Rect2f &bounding_box, const cv::Rect2f &corrected_box) {
trajectory.back() = bounding_box;
trajectory_filtered.back() = corrected_box;
}
void Tracklet::RenewTrajectory(const cv::Rect2f &bounding_box) {
ClearTrajectory();
trajectory.push_back(bounding_box);
trajectory_filtered.push_back(bounding_box);
}
#define DEFINE_STRING_VAR(var_name, value) \
std::stringstream __##var_name; \
__##var_name << value; \
std::string var_name = __##var_name.str();
#define ROUND_F(value, scale) (round((value)*scale) / scale)
std::string Tracklet::Serialize() const {
#ifdef DUMP_OTAV
DEFINE_STRING_VAR(s_id, id);
DEFINE_STRING_VAR(s_label, label);
DEFINE_STRING_VAR(s_association_idx, association_idx);
DEFINE_STRING_VAR(s_status, static_cast<int>(status));
DEFINE_STRING_VAR(s_age, age);
DEFINE_STRING_VAR(s_confidence, ROUND_F(confidence, 100.0));
DEFINE_STRING_VAR(s_occlusion_ratio, ROUND_F(occlusion_ratio, 100.0));
DEFINE_STRING_VAR(s_association_delta_t, association_delta_t);
DEFINE_STRING_VAR(s_association_fail_count, association_fail_count);
DEFINE_STRING_VAR(t_x, ROUND_F(trajectory.back().x, 10.0));
DEFINE_STRING_VAR(t_y, ROUND_F(trajectory.back().y, 10.0));
DEFINE_STRING_VAR(t_w, ROUND_F(trajectory.back().width, 10.0));
DEFINE_STRING_VAR(t_h, ROUND_F(trajectory.back().height, 10.0));
DEFINE_STRING_VAR(tf_x, ROUND_F(trajectory_filtered.back().x, 10.0));
DEFINE_STRING_VAR(tf_y, ROUND_F(trajectory_filtered.back().y, 10.0));
DEFINE_STRING_VAR(tf_w, ROUND_F(trajectory_filtered.back().width, 10.0));
DEFINE_STRING_VAR(tf_h, ROUND_F(trajectory_filtered.back().height, 10.0));
DEFINE_STRING_VAR(p_x, ROUND_F(predicted.x, 10.0));
DEFINE_STRING_VAR(p_y, ROUND_F(predicted.y, 10.0));
DEFINE_STRING_VAR(p_w, ROUND_F(predicted.width, 10.0));
DEFINE_STRING_VAR(p_h, ROUND_F(predicted.height, 10.0));
std::string formatted_msg = "meta:\"" + s_id + "," + s_label + "," + s_association_idx + "," + s_status + "," +
s_age + "," + s_confidence + "," + s_occlusion_ratio + "," + s_association_delta_t +
"," + s_association_fail_count + "\", roi:\"" + t_x + "," + t_y + "," + t_w + "," +
t_h + "\", roif:\"" + tf_x + "," + tf_y + "," + tf_w + "," + tf_h + "\", roip:\"" +
p_x + "," + p_y + "," + p_w + "," + p_h + "\"";
std::string free_msg;
if (otav_msg.size() > 0) {
free_msg = ", msg: [";
for (auto line : otav_msg) {
if (line.size() > 0)
free_msg += "\n\"" + line + "\",";
}
free_msg += "]";
otav_msg.clear();
}
return formatted_msg + free_msg;
#else
return "";
#endif
}
std::deque<cv::Mat> *Tracklet::GetRgbFeatures() {
return nullptr;
}
ZeroTermImagelessTracklet::ZeroTermImagelessTracklet() : Tracklet(), birth_count(1) {
}
ZeroTermImagelessTracklet::~ZeroTermImagelessTracklet() {
}
void ZeroTermImagelessTracklet::RenewTrajectory(const cv::Rect2f &bounding_box) {
float velo_x = bounding_box.x - trajectory.back().x;
float velo_y = bounding_box.y - trajectory.back().y;
cv::Rect rect_predict(int(bounding_box.x + velo_x / 3), int(bounding_box.y + velo_y / 3),
int(bounding_box.width), int(bounding_box.height));
ClearTrajectory();
kalman_filter.reset(new KalmanFilterNoOpencv(bounding_box));
kalman_filter->Predict();
kalman_filter->Correct(rect_predict);
trajectory.push_back(bounding_box);
trajectory_filtered.push_back(bounding_box);
}
ShortTermImagelessTracklet::ShortTermImagelessTracklet() : Tracklet() {
}
ShortTermImagelessTracklet::~ShortTermImagelessTracklet() {
}
void ShortTermImagelessTracklet::RenewTrajectory(const cv::Rect2f &bounding_box) {
float velo_x = bounding_box.x - trajectory.back().x;
float velo_y = bounding_box.y - trajectory.back().y;
cv::Rect rect_predict(int(bounding_box.x + velo_x / 3), int(bounding_box.y + velo_y / 3),
int(bounding_box.width), int(bounding_box.height));
ClearTrajectory();
kalman_filter.reset(new KalmanFilterNoOpencv(bounding_box));
kalman_filter->Predict();
kalman_filter->Correct(rect_predict);
trajectory.push_back(bounding_box);
trajectory_filtered.push_back(bounding_box);
}
}; // namespace ot
}; // namespace vas

View File

@ -0,0 +1,94 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#ifndef VAS_OT_TRACKET_HPP
#define VAS_OT_TRACKET_HPP
#include "kalman_filter/kalman_filter_no_opencv.hpp"
#include <vas/common.hpp>
#include <cstdint>
#include <deque>
namespace vas {
namespace ot {
const int32_t kNoMatchDetection = -1;
enum Status {
ST_DEAD = -1, // dead
ST_NEW = 0, // new
ST_TRACKED = 1, // tracked
ST_LOST = 2 // lost but still alive (in the detection phase if it configured)
};
struct Detection {
cv::Rect2f rect;
int32_t class_label = -1;
int32_t index = -1;
};
class Tracklet {
public:
Tracklet();
virtual ~Tracklet();
public:
void ClearTrajectory();
void InitTrajectory(const cv::Rect2f &bounding_box);
void AddUpdatedTrajectory(const cv::Rect2f &bounding_box, const cv::Rect2f &corrected_box);
void UpdateLatestTrajectory(const cv::Rect2f &bounding_box, const cv::Rect2f &corrected_box);
virtual void RenewTrajectory(const cv::Rect2f &bounding_box);
virtual std::deque<cv::Mat> *GetRgbFeatures();
virtual std::string Serialize() const; // Returns key:value with comma separated format
public:
int32_t id; // If hasnot been assigned : -1 to 0
int32_t label;
int32_t association_idx;
Status status;
int32_t age;
float confidence;
float occlusion_ratio;
float association_delta_t;
int32_t association_fail_count;
std::deque<cv::Rect2f> trajectory;
std::deque<cv::Rect2f> trajectory_filtered;
cv::Rect2f predicted; // Result from Kalman prediction. It is for debugging (OTAV)
mutable std::vector<std::string> otav_msg; // Messages for OTAV
};
class ZeroTermImagelessTracklet : public Tracklet {
public:
ZeroTermImagelessTracklet();
virtual ~ZeroTermImagelessTracklet();
void RenewTrajectory(const cv::Rect2f &bounding_box) override;
public:
int32_t birth_count;
std::unique_ptr<KalmanFilterNoOpencv> kalman_filter;
};
class ShortTermImagelessTracklet : public Tracklet {
public:
ShortTermImagelessTracklet();
virtual ~ShortTermImagelessTracklet();
void RenewTrajectory(const cv::Rect2f &bounding_box) override;
public:
std::unique_ptr<KalmanFilterNoOpencv> kalman_filter;
};
}; // namespace ot
}; // namespace vas
#endif // VAS_OT_TRACKET_HPP

View File

@ -0,0 +1,185 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#include "zero_term_imageless_tracker.hpp"
#include "prof_def.hpp"
#include "../../common/exception.hpp"
#include <cmath>
#include <memory>
namespace vas {
namespace ot {
// const int32_t kMaxAssociationFailCount = 600; // about 20 seconds
const int32_t kMaxAssociationFailCount = 120; // about 4 seconds
const int32_t kMaxTrajectorySize = 30;
const int32_t kMinBirthCount = 3;
/**
*
* ZeroTermImagelessTracker
*
**/
ZeroTermImagelessTracker::ZeroTermImagelessTracker(vas::ot::Tracker::InitParameters init_param)
: Tracker(init_param.max_num_objects, init_param.min_region_ratio_in_boundary, init_param.format,
init_param.tracking_per_class) {
TRACE(" - Created tracker = ZeroTermImagelessTracker");
}
ZeroTermImagelessTracker::~ZeroTermImagelessTracker() {
}
int32_t ZeroTermImagelessTracker::TrackObjects(const cv::Mat &mat, const std::vector<Detection> &detections,
std::vector<std::shared_ptr<Tracklet>> *tracklets, float delta_t) {
PROF_START(PROF_COMPONENTS_OT_ZEROTERM_RUN_TRACKER);
int32_t input_img_width = mat.cols;
int32_t input_img_height = mat.rows;
if (input_image_format_ == vas::ColorFormat::NV12 || input_image_format_ == vas::ColorFormat::I420) {
input_img_height = mat.rows / 3 * 2;
}
const cv::Rect2f image_boundary(0.0f, 0.0f, static_cast<float>(input_img_width),
static_cast<float>(input_img_height));
PROF_START(PROF_COMPONENTS_OT_ZEROTERM_KALMAN_PREDICTION);
// Predict tracklets state
for (auto &tracklet : tracklets_) {
auto zttimgless_tracklet = std::dynamic_pointer_cast<ZeroTermImagelessTracklet>(tracklet);
cv::Rect2f predicted_rect = zttimgless_tracklet->kalman_filter->Predict(delta_t);
zttimgless_tracklet->predicted = predicted_rect;
zttimgless_tracklet->trajectory.push_back(predicted_rect);
zttimgless_tracklet->trajectory_filtered.push_back(predicted_rect);
zttimgless_tracklet->association_delta_t += delta_t;
// Reset association index every frame for new detection input
zttimgless_tracklet->association_idx = kNoMatchDetection;
}
PROF_END(PROF_COMPONENTS_OT_ZEROTERM_KALMAN_PREDICTION);
PROF_START(PROF_COMPONENTS_OT_ZEROTERM_RUN_ASSOCIATION);
// Tracklet-detection association
int32_t n_detections = static_cast<int32_t>(detections.size());
int32_t n_tracklets = static_cast<int32_t>(tracklets_.size());
std::vector<bool> d_is_associated(n_detections, false);
std::vector<int32_t> t_associated_d_index(n_tracklets, -1);
if (n_detections > 0) {
auto result = associator_.Associate(detections, tracklets_);
d_is_associated = result.first;
t_associated_d_index = result.second;
}
PROF_END(PROF_COMPONENTS_OT_ZEROTERM_RUN_ASSOCIATION);
PROF_START(PROF_COMPONENTS_OT_ZEROTERM_UPDATE_STATUS);
// Update tracklets' state
for (int32_t t = 0; t < n_tracklets; ++t) {
auto &tracklet = tracklets_[t];
if (t_associated_d_index[t] >= 0) {
tracklet->association_delta_t = 0.0f;
int32_t associated_d_index = t_associated_d_index[t];
const cv::Rect2f &d_bounding_box = detections[associated_d_index].rect & image_boundary;
// Apply associated detection to tracklet
tracklet->association_idx = detections[associated_d_index].index;
tracklet->association_fail_count = 0;
tracklet->label = detections[associated_d_index].class_label;
auto zttimgless_tracklet = std::dynamic_pointer_cast<ZeroTermImagelessTracklet>(tracklet);
if (!zttimgless_tracklet)
continue;
if (zttimgless_tracklet->status == ST_NEW) {
zttimgless_tracklet->trajectory.back() = d_bounding_box;
zttimgless_tracklet->trajectory_filtered.back() =
zttimgless_tracklet->kalman_filter->Correct(zttimgless_tracklet->trajectory.back());
zttimgless_tracklet->birth_count += 1;
if (zttimgless_tracklet->birth_count >= kMinBirthCount) {
zttimgless_tracklet->status = ST_TRACKED;
}
} else if (zttimgless_tracklet->status == ST_TRACKED) {
zttimgless_tracklet->trajectory.back() = d_bounding_box;
zttimgless_tracklet->trajectory_filtered.back() =
zttimgless_tracklet->kalman_filter->Correct(zttimgless_tracklet->trajectory.back());
} else if (zttimgless_tracklet->status == ST_LOST) {
zttimgless_tracklet->RenewTrajectory(d_bounding_box);
zttimgless_tracklet->status = ST_TRACKED;
}
} else {
if (tracklet->status == ST_NEW) {
tracklet->status = ST_DEAD; // regard non-consecutive association as false alarm
} else if (tracklet->status == ST_TRACKED) {
tracklet->status = ST_LOST;
tracklet->association_fail_count = 0;
} else if (tracklet->status == ST_LOST) {
if (++tracklet->association_fail_count >= kMaxAssociationFailCount) {
// # association fail > threshold while missing -> DEAD
tracklet->status = ST_DEAD;
}
}
}
}
PROF_END(PROF_COMPONENTS_OT_ZEROTERM_UPDATE_STATUS);
PROF_START(PROF_COMPONENTS_OT_ZEROTERM_COMPUTE_OCCLUSION);
ComputeOcclusion();
PROF_END(PROF_COMPONENTS_OT_ZEROTERM_COMPUTE_OCCLUSION);
PROF_START(PROF_COMPONENTS_OT_ZEROTERM_REGISTER_OBJECT);
// Register remaining detections as new objects
for (int32_t d = 0; d < static_cast<int32_t>(detections.size()); ++d) {
if (d_is_associated[d] == false) {
if (static_cast<int32_t>(tracklets_.size()) >= max_objects_ && max_objects_ != -1)
continue;
std::unique_ptr<ZeroTermImagelessTracklet> tracklet(new ZeroTermImagelessTracklet());
tracklet->status = ST_NEW;
tracklet->id = GetNextTrackingID();
tracklet->label = detections[d].class_label;
tracklet->association_idx = detections[d].index;
const cv::Rect2f &bounding_box = detections[d].rect & image_boundary;
tracklet->InitTrajectory(bounding_box);
tracklet->kalman_filter.reset(new KalmanFilterNoOpencv(bounding_box));
tracklets_.push_back(std::move(tracklet));
}
}
PROF_END(PROF_COMPONENTS_OT_ZEROTERM_REGISTER_OBJECT);
RemoveDeadTracklets();
RemoveOutOfBoundTracklets(input_img_width, input_img_height);
TrimTrajectories();
*tracklets = tracklets_;
IncreaseFrameCount();
PROF_END(PROF_COMPONENTS_OT_ZEROTERM_RUN_TRACKER);
return 0;
}
void ZeroTermImagelessTracker::TrimTrajectories() {
for (auto &tracklet : tracklets_) {
auto &trajectory = tracklet->trajectory;
while (trajectory.size() > kMaxTrajectorySize) {
trajectory.pop_front();
}
//
auto &trajectory_filtered = tracklet->trajectory_filtered;
while (trajectory_filtered.size() > kMaxTrajectorySize) {
trajectory_filtered.pop_front();
}
}
}
}; // namespace ot
}; // namespace vas

View File

@ -0,0 +1,37 @@
/*******************************************************************************
* Copyright (C) 2023 Intel Corporation
*
* SPDX-License-Identifier: MIT
******************************************************************************/
#ifndef VAS_OT_ZERO_TERM_IMAGELESS_TRACKER_HPP
#define VAS_OT_ZERO_TERM_IMAGELESS_TRACKER_HPP
#include "tracker.hpp"
#include <deque>
#include <vector>
namespace vas {
namespace ot {
class ZeroTermImagelessTracker : public Tracker {
public:
explicit ZeroTermImagelessTracker(vas::ot::Tracker::InitParameters init_param);
virtual ~ZeroTermImagelessTracker();
virtual int32_t TrackObjects(const cv::Mat &mat, const std::vector<Detection> &detections,
std::vector<std::shared_ptr<Tracklet>> *tracklets, float delta_t) override;
ZeroTermImagelessTracker() = delete;
ZeroTermImagelessTracker(const ZeroTermImagelessTracker &) = delete;
ZeroTermImagelessTracker &operator=(const ZeroTermImagelessTracker &) = delete;
private:
void TrimTrajectories();
};
}; // namespace ot
}; // namespace vas
#endif // VAS_OT_ZERO_TERM_IMAGELESS_TRACKER_HPP

View File

@ -0,0 +1,41 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2023 Intel Corporation
#include <opencv2/gapi/ot.hpp>
#include <opencv2/gapi/cpu/gcpukernel.hpp>
#include <vas/ot.hpp>
namespace cv
{
namespace gapi
{
namespace ot
{
GAPI_EXPORTS_W std::tuple<cv::GArray<cv::Rect>,
cv::GArray<int32_t>,
cv::GArray<uint64_t>,
cv::GArray<TrackingStatus>> track(const cv::GMat& mat,
const cv::GArray<cv::Rect>& detected_rects,
const cv::GArray<int>& detected_class_labels,
float delta)
{
return GTrackFromMat::on(mat, detected_rects, detected_class_labels, delta);
}
GAPI_EXPORTS_W std::tuple<cv::GArray<cv::Rect>,
cv::GArray<int32_t>,
cv::GArray<uint64_t>,
cv::GArray<TrackingStatus>> track(const cv::GFrame& frame,
const cv::GArray<cv::Rect>& detected_rects,
const cv::GArray<int>& detected_class_labels,
float delta)
{
return GTrackFromFrame::on(frame, detected_rects, detected_class_labels, delta);
}
} // namespace ot
} // namespace gapi
} // namespace cv

View File

@ -0,0 +1,163 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2023 Intel Corporation
#include <opencv2/gapi/ot.hpp>
#include <opencv2/gapi/cpu/ot.hpp>
#include <opencv2/gapi/cpu/gcpukernel.hpp>
#include <vas/ot.hpp>
namespace cv
{
namespace gapi
{
namespace ot
{
// Helper functions for OT kernels
namespace {
void GTrackImplSetup(cv::GArrayDesc, cv::GArrayDesc, float,
std::shared_ptr<vas::ot::ObjectTracker>& state,
const ObjectTrackerParams& params) {
vas::ot::ObjectTracker::Builder ot_builder;
ot_builder.max_num_objects = params.max_num_objects;
ot_builder.input_image_format = vas::ColorFormat(params.input_image_format);
ot_builder.tracking_per_class = params.tracking_per_class;
state = ot_builder.Build(vas::ot::TrackingType::ZERO_TERM_IMAGELESS);
}
void GTrackImplPrepare(const std::vector<cv::Rect>& in_rects,
const std::vector<int32_t>& in_class_labels,
float delta,
std::vector<vas::ot::DetectedObject>& detected_objs,
vas::ot::ObjectTracker& state)
{
if (in_rects.size() != in_class_labels.size())
{
cv::util::throw_error(std::invalid_argument("Track() implementation run() method: in_rects and in_class_labels "
"sizes are different."));
}
detected_objs.reserve(in_rects.size());
for (std::size_t i = 0; i < in_rects.size(); ++i)
{
detected_objs.emplace_back(in_rects[i], in_class_labels[i]);
}
state.SetFrameDeltaTime(delta);
}
} // anonymous namespace
GAPI_OCV_KERNEL_ST(GTrackFromMatImpl, cv::gapi::ot::GTrackFromMat, vas::ot::ObjectTracker)
{
static void setup(cv::GMatDesc, cv::GArrayDesc rects_desc,
cv::GArrayDesc labels_desc, float delta,
std::shared_ptr<vas::ot::ObjectTracker>& state,
const cv::GCompileArgs& compile_args)
{
auto params = cv::gapi::getCompileArg<ObjectTrackerParams>(compile_args)
.value_or(ObjectTrackerParams{});
GAPI_Assert(params.input_image_format == 0 && "Only BGR input as cv::GMat is supported for now");
GTrackImplSetup(rects_desc, labels_desc, delta, state, params);
}
static void run(const cv::Mat& in_mat, const std::vector<cv::Rect>& in_rects,
const std::vector<int32_t>& in_class_labels, float delta,
std::vector<cv::Rect>& out_tr_rects,
std::vector<int32_t>& out_rects_classes,
std::vector<uint64_t>& out_tr_ids,
std::vector<TrackingStatus>& out_tr_statuses,
vas::ot::ObjectTracker& state)
{
std::vector<vas::ot::DetectedObject> detected_objs;
GTrackImplPrepare(in_rects, in_class_labels, delta, detected_objs, state);
GAPI_Assert(in_mat.type() == CV_8UC3 && "Input mat is not in BGR format");
auto objects = state.Track(in_mat, detected_objs);
for (auto&& object : objects)
{
out_tr_rects.push_back(object.rect);
out_rects_classes.push_back(object.class_label);
out_tr_ids.push_back(object.tracking_id);
out_tr_statuses.push_back(TrackingStatus(static_cast<int>(object.status)));
}
}
};
GAPI_OCV_KERNEL_ST(GTrackFromFrameImpl, cv::gapi::ot::GTrackFromFrame, vas::ot::ObjectTracker)
{
static void setup(cv::GFrameDesc, cv::GArrayDesc rects_desc,
cv::GArrayDesc labels_desc, float delta,
std::shared_ptr<vas::ot::ObjectTracker>& state,
const cv::GCompileArgs& compile_args)
{
auto params = cv::gapi::getCompileArg<ObjectTrackerParams>(compile_args)
.value_or(ObjectTrackerParams{});
GAPI_Assert(params.input_image_format == 1 && "Only NV12 input as cv::GFrame is supported for now");
GTrackImplSetup(rects_desc, labels_desc, delta, state, params);
}
static void run(const cv::MediaFrame& in_frame, const std::vector<cv::Rect>& in_rects,
const std::vector<int32_t>& in_class_labels, float delta,
std::vector<cv::Rect>& out_tr_rects,
std::vector<int32_t>& out_rects_classes,
std::vector<uint64_t>& out_tr_ids,
std::vector<TrackingStatus>& out_tr_statuses,
vas::ot::ObjectTracker& state)
{
std::vector<vas::ot::DetectedObject> detected_objs;
GTrackImplPrepare(in_rects, in_class_labels, delta, detected_objs, state);
// Extract metadata from MediaFrame and construct cv::Mat atop of it
cv::MediaFrame::View view = in_frame.access(cv::MediaFrame::Access::R);
auto ptrs = view.ptr;
auto strides = view.stride;
auto desc = in_frame.desc();
GAPI_Assert((desc.fmt == cv::MediaFormat::NV12 || desc.fmt == cv::MediaFormat::BGR) \
&& "Input frame is not in NV12 or BGR format");
cv::Mat in;
if (desc.fmt == cv::MediaFormat::NV12) {
GAPI_Assert(ptrs[0] != nullptr && "Y plane pointer is empty");
GAPI_Assert(ptrs[1] != nullptr && "UV plane pointer is empty");
if (strides[0] > 0) {
in = cv::Mat(desc.size, CV_8UC1, ptrs[0], strides[0]);
} else {
in = cv::Mat(desc.size, CV_8UC1, ptrs[0]);
}
}
auto objects = state.Track(in, detected_objs);
for (auto&& object : objects)
{
out_tr_rects.push_back(object.rect);
out_rects_classes.push_back(object.class_label);
out_tr_ids.push_back(object.tracking_id);
out_tr_statuses.push_back(TrackingStatus(static_cast<int>(object.status)));
}
}
};
cv::gapi::GKernelPackage cpu::kernels()
{
return cv::gapi::kernels
<
GTrackFromFrameImpl,
GTrackFromMatImpl
>();
}
} // namespace ot
} // namespace gapi
} // namespace cv

View File

@ -0,0 +1,287 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2023 Intel Corporation
#include "../test_precomp.hpp"
#include <opencv2/gapi/ot.hpp>
#include <opencv2/gapi/cpu/ot.hpp>
#include "opencv2/gapi/streaming/meta.hpp"
#include "opencv2/gapi/streaming/cap.hpp"
namespace {
cv::gapi::ot::TrackingStatus from_string(const std::string& status) {
if (status == "NEW") {
return cv::gapi::ot::TrackingStatus::NEW;
}
else if (status == "TRACKED") {
return cv::gapi::ot::TrackingStatus::TRACKED;
}
else if (status == "LOST") {
return cv::gapi::ot::TrackingStatus::LOST;
}
throw std::runtime_error("String representation for cv::gapi::ot::TrackingStatus: \""
+ status + "\" contains incorrect value!");
}
} // anonymous namespace
namespace opencv_test {
struct FrameDetections {
std::size_t frame_no{};
std::vector<std::vector<cv::Rect>> boxes;
std::vector<std::vector<int32_t>> box_ids;
FrameDetections() {}
FrameDetections(std::size_t in_frame_no, const std::vector<std::vector<cv::Rect>>& in_boxes,
const std::vector<std::vector<int32_t>>& in_box_ids) :
frame_no(in_frame_no),
boxes(in_boxes),
box_ids(in_box_ids) {}
};
struct TrackerReference {
std::size_t frame_no{};
std::vector<std::vector<cv::Rect>> tracked_boxes;
std::vector<std::vector<int32_t>> tracked_box_ids;
std::vector<std::vector<uint64_t>> tracking_ids;
std::vector<std::vector<cv::gapi::ot::TrackingStatus>> tracking_statuses;
TrackerReference() {}
TrackerReference(std::size_t in_frame_no,
const std::vector<std::vector<cv::Rect>>& in_tracked_boxes,
const std::vector<std::vector<int32_t>>& in_tracked_box_ids,
const std::vector<std::vector<uint64_t>>& in_tracking_ids,
const std::vector<std::vector<cv::gapi::ot::TrackingStatus>>&
in_tracking_statuses) :
frame_no(in_frame_no),
tracked_boxes(in_tracked_boxes),
tracked_box_ids(in_tracked_box_ids),
tracking_ids(in_tracking_ids),
tracking_statuses(in_tracking_statuses) {}
};
struct FrameDetectionsParams {
FrameDetections value;
};
struct TrackerReferenceParams {
TrackerReference value;
};
} // namespace opencv_test
namespace cv {
namespace detail {
template<> struct CompileArgTag<opencv_test::FrameDetectionsParams> {
static const char* tag() {
return "org.opencv.test.frame_detections_params";
}
};
} // namespace detail
namespace detail {
template<> struct CompileArgTag<opencv_test::TrackerReferenceParams> {
static const char* tag() {
return "org.opencv.test.tracker_reference_params";
}
};
} // namespace detail
} // namespace cv
namespace opencv_test {
G_API_OP(CvVideo768x576_Detect, <std::tuple<cv::GArray<cv::Rect>, cv::GArray<int32_t>>(cv::GMat)>,
"test.custom.cv_video_768x576_detect") {
static std::tuple<cv::GArrayDesc, cv::GArrayDesc> outMeta(cv::GMatDesc) {
return std::make_tuple(cv::empty_array_desc(), cv::empty_array_desc());
}
};
GAPI_OCV_KERNEL_ST(OCV_CvVideo768x576_Detect, CvVideo768x576_Detect, FrameDetections) {
static void setup(cv::GMatDesc,
std::shared_ptr<FrameDetections> &state,
const cv::GCompileArgs &compileArgs) {
auto params = cv::gapi::getCompileArg<opencv_test::FrameDetectionsParams>(compileArgs)
.value_or(opencv_test::FrameDetectionsParams{ });
state = std::make_shared<FrameDetections>(params.value);
}
static void run(const cv::Mat&,
std::vector<cv::Rect> &out_boxes,
std::vector<int32_t> &out_box_ids,
FrameDetections &state) {
if (state.frame_no < state.boxes.size()) {
out_boxes = state.boxes[state.frame_no];
out_box_ids = state.box_ids[state.frame_no];
++state.frame_no;
}
}
};
G_API_OP(CheckTrackerResults, <cv::GOpaque<bool>(cv::GArray<cv::Rect>, cv::GArray<int32_t>,
cv::GArray<uint64_t>, cv::GArray<cv::gapi::ot::TrackingStatus>)>,
"test.custom.check_tracker_results") {
static cv::GOpaqueDesc outMeta(cv::GArrayDesc, cv::GArrayDesc, cv::GArrayDesc, cv::GArrayDesc) {
return cv::empty_gopaque_desc();
}
};
GAPI_OCV_KERNEL_ST(OCVCheckTrackerResults, CheckTrackerResults, TrackerReference) {
static void setup(cv::GArrayDesc, cv::GArrayDesc,
cv::GArrayDesc, cv::GArrayDesc,
std::shared_ptr<TrackerReference> &state,
const cv::GCompileArgs &compileArgs) {
auto params = cv::gapi::getCompileArg<opencv_test::TrackerReferenceParams>(compileArgs)
.value_or(opencv_test::TrackerReferenceParams{ });
state = std::make_shared<TrackerReference>(params.value);
}
static void run(const std::vector<cv::Rect> &in_tr_rcts,
const std::vector<int32_t> &in_det_ids,
const std::vector<uint64_t> &in_tr_ids,
const std::vector<cv::gapi::ot::TrackingStatus> &in_tr_statuses,
bool& success,
TrackerReference& state) {
if (state.frame_no < state.tracked_boxes.size()) {
auto reference_boxes = state.tracked_boxes[state.frame_no];
auto reference_box_ids = state.tracked_box_ids[state.frame_no];
auto reference_tr_ids = state.tracking_ids[state.frame_no];
auto reference_tr_statuses = state.tracking_statuses[state.frame_no];
success = true;
GAPI_Assert(in_tr_rcts.size() == reference_boxes.size());
GAPI_Assert(in_det_ids.size() == reference_box_ids.size());
GAPI_Assert(in_tr_ids.size() == reference_tr_ids.size());
GAPI_Assert(in_tr_statuses.size() == reference_tr_statuses.size());
for (uint32_t i = 0; (i < in_tr_rcts.size() && success); ++i) {
const cv::Rect& reference_rc = reference_boxes[i];
const cv::Rect& in_rc = in_tr_rcts[i];
success &= (reference_rc == in_rc);
success &= (reference_box_ids[i] == in_det_ids[i]);
success &= (reference_tr_ids[i] == in_tr_ids[i]);
success &= (reference_tr_statuses[i] == in_tr_statuses[i]);
}
++state.frame_no;
}
else {
success = true;
}
}
};
TEST(VASObjectTracker, PipelineTest)
{
constexpr int32_t frames_to_handle = 30;
std::string pathToVideo = opencv_test::findDataFile("cv/video/768x576.avi");
std::vector<std::vector<cv::Rect>> input_boxes(frames_to_handle);
std::vector<std::vector<int32_t>> input_det_ids(frames_to_handle);
std::string path_to_boxes = opencv_test::findDataFile("cv/video/vas_object_tracking/detections_30_frames.yml");
cv::FileStorage fs_input_boxes(path_to_boxes, cv::FileStorage::READ);
cv::FileNode fn_input_boxes = fs_input_boxes.root();
for (auto it = fn_input_boxes.begin(); it != fn_input_boxes.end(); ++it) {
cv::FileNode fn_frame = *it;
std::string frame_name = fn_frame.name();
int frame_no = std::stoi(frame_name.substr(frame_name.find("_") + 1));
for (auto fit = fn_frame.begin(); fit != fn_frame.end(); ++fit) {
cv::FileNode fn_box = *fit;
cv::Rect box((int)fn_box["x"], (int)fn_box["y"],
(int)fn_box["width"], (int)fn_box["height"]);
input_boxes[frame_no].push_back(box);
input_det_ids[frame_no].push_back(fn_box["id"]);
}
}
std::vector<std::vector<cv::Rect>> reference_trackings(frames_to_handle);
std::vector<std::vector<int32_t>> reference_trackings_det_ids(frames_to_handle);
std::vector<std::vector<uint64_t>> reference_trackings_tr_ids(frames_to_handle);
std::vector<std::vector<cv::gapi::ot::TrackingStatus>> reference_trackings_tr_statuses(frames_to_handle);
std::string path_to_trackings = opencv_test::findDataFile("cv/video/vas_object_tracking/trackings_30_frames.yml");
cv::FileStorage fs_reference_trackings(path_to_trackings, cv::FileStorage::READ);
cv::FileNode fn_reference_trackings = fs_reference_trackings.root();
for (auto it = fn_reference_trackings.begin(); it != fn_reference_trackings.end(); ++it) {
cv::FileNode fn_frame = *it;
std::string frame_name = fn_frame.name();
int frame_no = std::stoi(frame_name.substr(frame_name.find("_") + 1));
for (auto fit = fn_frame.begin(); fit != fn_frame.end(); ++fit) {
cv::FileNode fn_tracked_box = *fit;
cv::Rect tracked_box((int)fn_tracked_box["x"], (int)fn_tracked_box["y"],
(int)fn_tracked_box["width"], (int)fn_tracked_box["height"]);
reference_trackings[frame_no].push_back(tracked_box);
reference_trackings_det_ids[frame_no].push_back(fn_tracked_box["id"]);
reference_trackings_tr_ids[frame_no].push_back(int(fn_tracked_box["tracking_id"]));
reference_trackings_tr_statuses[frame_no].push_back(
from_string(fn_tracked_box["tracking_status"]));
}
}
cv::GMat in;
cv::GArray<cv::Rect> detections;
cv::GArray<int> det_ids;
std::tie(detections, det_ids) = CvVideo768x576_Detect::on(in);
constexpr float delta_time = 0.055f;
cv::GArray<cv::Rect> tracking_rects;
cv::GArray<int32_t> tracking_det_ids;
cv::GArray<uint64_t> tracking_ids;
cv::GArray<cv::gapi::ot::TrackingStatus> tracking_statuses;
std::tie(tracking_rects, tracking_det_ids, tracking_ids, tracking_statuses) =
cv::gapi::ot::track(in, detections, det_ids, delta_time);
cv::GOpaque<bool> check_result =
CheckTrackerResults::on(tracking_rects, tracking_det_ids, tracking_ids, tracking_statuses);
cv::GComputation ccomp(cv::GIn(in), cv::GOut(check_result));
opencv_test::FrameDetections fds { 0, input_boxes, input_det_ids };
opencv_test::TrackerReference tr { 0, reference_trackings,
reference_trackings_det_ids,
reference_trackings_tr_ids,
reference_trackings_tr_statuses };
// Graph compilation for streaming mode:
auto compiled =
ccomp.compileStreaming(cv::compile_args(
cv::gapi::combine(cv::gapi::kernels<OCV_CvVideo768x576_Detect,
OCVCheckTrackerResults>(),
cv::gapi::ot::cpu::kernels()),
opencv_test::FrameDetectionsParams{ fds },
opencv_test::TrackerReferenceParams{ tr }));
EXPECT_TRUE(compiled);
EXPECT_FALSE(compiled.running());
compiled.setSource<cv::gapi::wip::GCaptureSource>(pathToVideo);
// Start of streaming:
compiled.start();
EXPECT_TRUE(compiled.running());
// Streaming:
bool success;
std::size_t counter { }, limit { 30 };
while(compiled.pull(cv::gout(success)) && (counter < limit)) {
++counter;
}
compiled.stop();
EXPECT_TRUE(success);
EXPECT_FALSE(compiled.running());
}
} // namespace opencv_test