Merge pull request #22863 from savuor:tsdf_tests_join

### Changes
* Duplicated code removal in TSDF tests by implementing them with fixtures and GTest params
  * e.g. separate OCL tests file removed
  * as a result, more test cases are covered
  * the same's done for perf tests

### Pull Request Readiness Checklist

See details at https://github.com/opencv/opencv/wiki/How_to_contribute#making-a-good-pull-request

- [x] I agree to contribute to the project under Apache 2 License.
- [x] To the best of my knowledge, the proposed patch is not based on a code under GPL or another license that is incompatible with OpenCV
- [x] The PR is proposed to the proper branch
- [x] There is a reference to the original bug report and related work
- [x] There is accuracy test, performance test and test data in opencv_extra repository, if applicable
      Patch to opencv_extra has the same branch name.
- [x] The feature is well documented and sample code can be built with the project CMake
This commit is contained in:
Rostislav Vasilikhin 2022-12-06 10:15:39 +01:00 committed by GitHub
parent a423b07149
commit 86c6e07326
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 597 additions and 1920 deletions

View File

@ -31,7 +31,6 @@ public:
* @param volumeType volume type.
*/
VolumeSettings(VolumeType volumeType);
VolumeSettings(const VolumeSettings& vs);
~VolumeSettings();

View File

@ -416,549 +416,261 @@ void displayColorImage(Mat depth, Mat rgb, Mat points, Mat normals, Mat colors,
static const bool display = false;
// Perf_TSDF_GPU.integrate_mat
#ifdef HAVE_OPENCL
PERF_TEST(Perf_TSDF_GPU, integrate_mat)
#else
PERF_TEST(Perf_TSDF, integrate_mat)
#endif
enum PlatformType
{
VolumeType volumeType = VolumeType::TSDF;
VolumeSettings vs(volumeType);
Volume volume(volumeType, vs);
CPU = 0, GPU = 1
};
CV_ENUM(PlatformTypeEnum, PlatformType::CPU, PlatformType::GPU);
Size frameSize(vs.getRaycastWidth(), vs.getRaycastHeight());
Matx33f intrIntegrate, intrRaycast;
vs.getCameraIntegrateIntrinsics(intrIntegrate);
vs.getCameraRaycastIntrinsics(intrRaycast);
bool onlySemisphere = false;
float depthFactor = vs.getDepthFactor();
Ptr<Scene> scene = Scene::create(frameSize, intrIntegrate, depthFactor, onlySemisphere);
std::vector<Affine3f> poses = scene->getPoses();
enum Sequence
{
ALL = 0, FIRST = 1
};
CV_ENUM(SequenceEnum, Sequence::ALL, Sequence::FIRST);
for (size_t i = 0; i < poses.size(); i++)
enum class VolumeTestSrcType
{
MAT = 0,
ODOMETRY_FRAME = 1
};
// used to store current OpenCL status (on/off) and revert it after test is done
// works even after exceptions thrown in test body
struct OpenCLStatusRevert
{
#ifdef HAVE_OPENCL
OpenCLStatusRevert()
{
Matx44f pose = poses[i].matrix;
Mat depth = scene->depth(pose);
startTimer();
volume.integrate(depth, pose);
stopTimer();
originalOpenCLStatus = cv::ocl::useOpenCL();
}
SANITY_CHECK_NOTHING();
}
// Perf_TSDF_GPU.integrate_frame
#ifdef HAVE_OPENCL
PERF_TEST(Perf_TSDF_GPU, integrate_frame)
#else
PERF_TEST(Perf_TSDF, integrate_frame)
#endif
{
VolumeType volumeType = VolumeType::TSDF;
VolumeSettings vs(volumeType);
Volume volume(volumeType, vs);
Size frameSize(vs.getRaycastWidth(), vs.getRaycastHeight());
Matx33f intrIntegrate, intrRaycast;
vs.getCameraIntegrateIntrinsics(intrIntegrate);
vs.getCameraRaycastIntrinsics(intrRaycast);
bool onlySemisphere = false;
float depthFactor = vs.getDepthFactor();
Ptr<Scene> scene = Scene::create(frameSize, intrIntegrate, depthFactor, onlySemisphere);
std::vector<Affine3f> poses = scene->getPoses();
for (size_t i = 0; i < poses.size(); i++)
~OpenCLStatusRevert()
{
Matx44f pose = poses[i].matrix;
Mat depth = scene->depth(pose);
OdometryFrame odf(noArray(), depth);
startTimer();
volume.integrate(odf, pose);
stopTimer();
cv::ocl::setUseOpenCL(originalOpenCLStatus);
}
SANITY_CHECK_NOTHING();
}
// Perf_TSDF_GPU.raycast_mat
#ifdef HAVE_OPENCL
PERF_TEST(Perf_TSDF_GPU, raycast_mat)
#else
PERF_TEST(Perf_TSDF, raycast_mat)
#endif
{
VolumeType volumeType = VolumeType::TSDF;
VolumeSettings vs(volumeType);
Volume volume(volumeType, vs);
Size frameSize(vs.getRaycastWidth(), vs.getRaycastHeight());
Matx33f intrIntegrate, intrRaycast;
vs.getCameraIntegrateIntrinsics(intrIntegrate);
vs.getCameraRaycastIntrinsics(intrRaycast);
bool onlySemisphere = false;
float depthFactor = vs.getDepthFactor();
Vec3f lightPose = Vec3f::all(0.f);
Ptr<Scene> scene = Scene::create(frameSize, intrIntegrate, depthFactor, onlySemisphere);
std::vector<Affine3f> poses = scene->getPoses();
for (size_t i = 0; i < poses.size(); i++)
void off()
{
Matx44f pose = poses[i].matrix;
Mat depth = scene->depth(pose);
Mat points, normals;
volume.integrate(depth, pose);
startTimer();
volume.raycast(pose, frameSize.height, frameSize.width, intrRaycast, points, normals);
stopTimer();
if (display)
displayImage(depth, points, normals, depthFactor, lightPose);
}
SANITY_CHECK_NOTHING();
}
#ifdef HAVE_OPENCL
// Perf_TSDF_CPU.integrate_mat
PERF_TEST(Perf_TSDF_CPU, integrate_mat)
{
cv::ocl::setUseOpenCL(false);
VolumeType volumeType = VolumeType::TSDF;
VolumeSettings vs(volumeType);
Volume volume(volumeType, vs);
Size frameSize(vs.getRaycastWidth(), vs.getRaycastHeight());
Matx33f intrIntegrate, intrRaycast;
vs.getCameraIntegrateIntrinsics(intrIntegrate);
vs.getCameraRaycastIntrinsics(intrRaycast);
bool onlySemisphere = false;
float depthFactor = vs.getDepthFactor();
Ptr<Scene> scene = Scene::create(frameSize, intrIntegrate, depthFactor, onlySemisphere);
std::vector<Affine3f> poses = scene->getPoses();
for (size_t i = 0; i < poses.size(); i++)
{
Matx44f pose = poses[i].matrix;
Mat depth = scene->depth(pose);
startTimer();
volume.integrate(depth, pose);
stopTimer();
}
SANITY_CHECK_NOTHING();
cv::ocl::setUseOpenCL(true);
}
// Perf_TSDF_CPU.integrate_frame
PERF_TEST(Perf_TSDF_CPU, integrate_frame)
{
cv::ocl::setUseOpenCL(false);
VolumeType volumeType = VolumeType::TSDF;
VolumeSettings vs(volumeType);
Volume volume(volumeType, vs);
Size frameSize(vs.getRaycastWidth(), vs.getRaycastHeight());
Matx33f intrIntegrate, intrRaycast;
vs.getCameraIntegrateIntrinsics(intrIntegrate);
vs.getCameraRaycastIntrinsics(intrRaycast);
bool onlySemisphere = false;
float depthFactor = vs.getDepthFactor();
Ptr<Scene> scene = Scene::create(frameSize, intrIntegrate, depthFactor, onlySemisphere);
std::vector<Affine3f> poses = scene->getPoses();
for (size_t i = 0; i < poses.size(); i++)
{
Matx44f pose = poses[i].matrix;
Mat depth = scene->depth(pose);
OdometryFrame odf(noArray(), depth);
startTimer();
volume.integrate(odf, pose);
stopTimer();
}
SANITY_CHECK_NOTHING();
cv::ocl::setUseOpenCL(true);
}
// Perf_TSDF_CPU.raycast_mat
PERF_TEST(Perf_TSDF_CPU, raycast_mat)
{
cv::ocl::setUseOpenCL(false);
VolumeType volumeType = VolumeType::TSDF;
VolumeSettings vs(volumeType);
Volume volume(volumeType, vs);
Size frameSize(vs.getRaycastWidth(), vs.getRaycastHeight());
Matx33f intrIntegrate, intrRaycast;
vs.getCameraIntegrateIntrinsics(intrIntegrate);
vs.getCameraRaycastIntrinsics(intrRaycast);
bool onlySemisphere = false;
float depthFactor = vs.getDepthFactor();
Vec3f lightPose = Vec3f::all(0.f);
Ptr<Scene> scene = Scene::create(frameSize, intrIntegrate, depthFactor, onlySemisphere);
std::vector<Affine3f> poses = scene->getPoses();
for (size_t i = 0; i < poses.size(); i++)
{
Matx44f pose = poses[i].matrix;
Mat depth = scene->depth(pose);
Mat points, normals;
volume.integrate(depth, pose);
startTimer();
volume.raycast(pose, frameSize.height, frameSize.width, intrRaycast, points, normals);
stopTimer();
if (display)
displayImage(depth, points, normals, depthFactor, lightPose);
}
SANITY_CHECK_NOTHING();
cv::ocl::setUseOpenCL(true);
}
#endif
// Perf_HashTSDF_GPU.integrate_mat
#ifdef HAVE_OPENCL
PERF_TEST(Perf_HashTSDF_GPU, integrate_mat)
bool originalOpenCLStatus;
#else
PERF_TEST(Perf_HashTSDF, integrate_mat)
void off() { }
#endif
};
// CV_ENUM does not support enum class types, so let's implement the class explicitly
namespace
{
VolumeType volumeType = VolumeType::HashTSDF;
VolumeSettings vs(volumeType);
Volume volume(volumeType, vs);
Size frameSize(vs.getRaycastWidth(), vs.getRaycastHeight());
Matx33f intrIntegrate, intrRaycast;
vs.getCameraIntegrateIntrinsics(intrIntegrate);
vs.getCameraRaycastIntrinsics(intrRaycast);
bool onlySemisphere = false;
float depthFactor = vs.getDepthFactor();
Ptr<Scene> scene = Scene::create(frameSize, intrIntegrate, depthFactor, onlySemisphere);
std::vector<Affine3f> poses = scene->getPoses();
for (size_t i = 0; i < poses.size(); i++)
struct VolumeTypeEnum
{
Matx44f pose = poses[i].matrix;
Mat depth = scene->depth(pose);
startTimer();
volume.integrate(depth, pose);
stopTimer();
static const std::array<VolumeType, 3> vals;
static const std::array<std::string, 3> svals;
VolumeTypeEnum(VolumeType v = VolumeType::TSDF) : val(v) {}
operator VolumeType() const { return val; }
void PrintTo(std::ostream *os) const
{
int v = int(val);
if (v >= 0 && v < 3)
{
*os << svals[v];
}
SANITY_CHECK_NOTHING();
else
{
*os << "UNKNOWN";
}
}
static ::testing::internal::ParamGenerator<VolumeTypeEnum> all()
{
return ::testing::Values(VolumeTypeEnum(vals[0]), VolumeTypeEnum(vals[1]), VolumeTypeEnum(vals[2]));
}
private:
VolumeType val;
};
const std::array<VolumeType, 3> VolumeTypeEnum::vals{VolumeType::TSDF, VolumeType::HashTSDF, VolumeType::ColorTSDF};
const std::array<std::string, 3> VolumeTypeEnum::svals{std::string("TSDF"), std::string("HashTSDF"), std::string("ColorTSDF")};
static inline void PrintTo(const VolumeTypeEnum &t, std::ostream *os) { t.PrintTo(os); }
struct VolumeTestSrcTypeEnum
{
static const std::array<VolumeTestSrcType, 2> vals;
static const std::array<std::string, 2> svals;
VolumeTestSrcTypeEnum(VolumeTestSrcType v = VolumeTestSrcType::MAT) : val(v) {}
operator VolumeTestSrcType() const { return val; }
void PrintTo(std::ostream *os) const
{
int v = int(val);
if (v >= 0 && v < 3)
{
*os << svals[v];
}
else
{
*os << "UNKNOWN";
}
}
static ::testing::internal::ParamGenerator<VolumeTestSrcTypeEnum> all()
{
return ::testing::Values(VolumeTestSrcTypeEnum(vals[0]), VolumeTestSrcTypeEnum(vals[1]));
}
private:
VolumeTestSrcType val;
};
const std::array<VolumeTestSrcType, 2> VolumeTestSrcTypeEnum::vals{VolumeTestSrcType::MAT, VolumeTestSrcType::ODOMETRY_FRAME};
const std::array<std::string, 2> VolumeTestSrcTypeEnum::svals{std::string("UMat"), std::string("OdometryFrame")};
static inline void PrintTo(const VolumeTestSrcTypeEnum &t, std::ostream *os) { t.PrintTo(os); }
}
// Perf_HashTSDF_GPU.integrate_frame
#ifdef HAVE_OPENCL
PERF_TEST(Perf_HashTSDF_GPU, integrate_frame)
#else
PERF_TEST(Perf_HashTSDF, integrate_frame)
#endif
typedef std::tuple<PlatformTypeEnum, VolumeTypeEnum> PlatformVolumeType;
class VolumePerfFixture : public perf::TestBaseWithParam<std::tuple<PlatformVolumeType, VolumeTestSrcTypeEnum, SequenceEnum>>
{
VolumeType volumeType = VolumeType::HashTSDF;
VolumeSettings vs(volumeType);
Volume volume(volumeType, vs);
Size frameSize(vs.getRaycastWidth(), vs.getRaycastHeight());
Matx33f intrIntegrate, intrRaycast;
vs.getCameraIntegrateIntrinsics(intrIntegrate);
vs.getCameraRaycastIntrinsics(intrRaycast);
bool onlySemisphere = false;
float depthFactor = vs.getDepthFactor();
Ptr<Scene> scene = Scene::create(frameSize, intrIntegrate, depthFactor, onlySemisphere);
std::vector<Affine3f> poses = scene->getPoses();
for (size_t i = 0; i < poses.size(); i++)
protected:
void SetUp() override
{
Matx44f pose = poses[i].matrix;
Mat depth = scene->depth(pose);
OdometryFrame odf(noArray(), depth);
TestBase::SetUp();
startTimer();
volume.integrate(odf, pose);
stopTimer();
auto p = GetParam();
gpu = std::get<0>(std::get<0>(p));
volumeType = std::get<1>(std::get<0>(p));
}
SANITY_CHECK_NOTHING();
}
testSrcType = std::get<1>(p);
repeat1st = (std::get<2>(p) == Sequence::FIRST);
if (!gpu)
oclStatus.off();
// Perf_HashTSDF_GPU.raycast_mat
#ifdef HAVE_OPENCL
PERF_TEST(Perf_HashTSDF_GPU, raycast_mat)
#else
PERF_TEST(Perf_HashTSDF, raycast_mat)
#endif
{
VolumeType volumeType = VolumeType::HashTSDF;
VolumeSettings vs(volumeType);
Volume volume(volumeType, vs);
volume = makePtr<Volume>(volumeType, vs);
Size frameSize(vs.getRaycastWidth(), vs.getRaycastHeight());
Matx33f intrIntegrate, intrRaycast;
frameSize = Size(vs.getRaycastWidth(), vs.getRaycastHeight());
Matx33f intrIntegrate;
vs.getCameraIntegrateIntrinsics(intrIntegrate);
vs.getCameraRaycastIntrinsics(intrRaycast);
bool onlySemisphere = false;
float depthFactor = vs.getDepthFactor();
Vec3f lightPose = Vec3f::all(0.f);
Ptr<Scene> scene = Scene::create(frameSize, intrIntegrate, depthFactor, onlySemisphere);
std::vector<Affine3f> poses = scene->getPoses();
for (size_t i = 0; i < poses.size(); i++)
{
Matx44f pose = poses[i].matrix;
Mat depth = scene->depth(pose);
Mat points, normals;
volume.integrate(depth, pose);
startTimer();
volume.raycast(pose, frameSize.height, frameSize.width, intrRaycast, points, normals);
stopTimer();
if (display)
displayImage(depth, points, normals, depthFactor, lightPose);
depthFactor = vs.getDepthFactor();
scene = Scene::create(frameSize, intrIntegrate, depthFactor, onlySemisphere);
poses = scene->getPoses();
}
SANITY_CHECK_NOTHING();
}
#ifdef HAVE_OPENCL
// Perf_HashTSDF_CPU.integrate_mat
PERF_TEST(Perf_HashTSDF_CPU, integrate_mat)
bool gpu;
VolumeType volumeType;
VolumeTestSrcType testSrcType;
bool repeat1st;
OpenCLStatusRevert oclStatus;
Ptr<Volume> volume;
Size frameSize;
Matx33f intrRaycast;
Ptr<Scene> scene;
std::vector<Affine3f> poses;
float depthFactor;
};
PERF_TEST_P_(VolumePerfFixture, integrate)
{
cv::ocl::setUseOpenCL(false);
VolumeType volumeType = VolumeType::HashTSDF;
VolumeSettings vs(volumeType);
Volume volume(volumeType, vs);
Size frameSize(vs.getRaycastWidth(), vs.getRaycastHeight());
Matx33f intrIntegrate, intrRaycast;
vs.getCameraIntegrateIntrinsics(intrIntegrate);
vs.getCameraRaycastIntrinsics(intrRaycast);
bool onlySemisphere = false;
float depthFactor = vs.getDepthFactor();
Ptr<Scene> scene = Scene::create(frameSize, intrIntegrate, depthFactor, onlySemisphere);
std::vector<Affine3f> poses = scene->getPoses();
for (size_t i = 0; i < poses.size(); i++)
{
Matx44f pose = poses[i].matrix;
Mat depth = scene->depth(pose);
startTimer();
volume.integrate(depth, pose);
stopTimer();
}
SANITY_CHECK_NOTHING();
cv::ocl::setUseOpenCL(true);
}
// Perf_HashTSDF_CPU.integrate_frame
PERF_TEST(Perf_HashTSDF_CPU, integrate_frame)
{
cv::ocl::setUseOpenCL(false);
VolumeType volumeType = VolumeType::HashTSDF;
VolumeSettings vs(volumeType);
Volume volume(volumeType, vs);
Size frameSize(vs.getRaycastWidth(), vs.getRaycastHeight());
Matx33f intrIntegrate, intrRaycast;
vs.getCameraIntegrateIntrinsics(intrIntegrate);
vs.getCameraRaycastIntrinsics(intrRaycast);
bool onlySemisphere = false;
float depthFactor = vs.getDepthFactor();
Ptr<Scene> scene = Scene::create(frameSize, intrIntegrate, depthFactor, onlySemisphere);
std::vector<Affine3f> poses = scene->getPoses();
for (size_t i = 0; i < poses.size(); i++)
{
Matx44f pose = poses[i].matrix;
Mat depth = scene->depth(pose);
OdometryFrame odf(noArray(), depth);
startTimer();
volume.integrate(odf, pose);
stopTimer();
}
SANITY_CHECK_NOTHING();
cv::ocl::setUseOpenCL(true);
}
// Perf_HashTSDF_CPU.raycast_mat
PERF_TEST(Perf_HashTSDF_CPU, raycast_mat)
{
cv::ocl::setUseOpenCL(false);
VolumeType volumeType = VolumeType::HashTSDF;
VolumeSettings vs(volumeType);
Volume volume(volumeType, vs);
Size frameSize(vs.getRaycastWidth(), vs.getRaycastHeight());
Matx33f intrIntegrate, intrRaycast;
vs.getCameraIntegrateIntrinsics(intrIntegrate);
vs.getCameraRaycastIntrinsics(intrRaycast);
bool onlySemisphere = false;
float depthFactor = vs.getDepthFactor();
Vec3f lightPose = Vec3f::all(0.f);
Ptr<Scene> scene = Scene::create(frameSize, intrIntegrate, depthFactor, onlySemisphere);
std::vector<Affine3f> poses = scene->getPoses();
for (size_t i = 0; i < poses.size(); i++)
{
Matx44f pose = poses[i].matrix;
Mat depth = scene->depth(pose);
Mat points, normals;
volume.integrate(depth, pose);
startTimer();
volume.raycast(pose, frameSize.height, frameSize.width, intrRaycast, points, normals);
stopTimer();
if (display)
displayImage(depth, points, normals, depthFactor, lightPose);
}
SANITY_CHECK_NOTHING();
cv::ocl::setUseOpenCL(true);
}
#endif
// Perf_ColorTSDF_CPU.integrate_mat
#ifdef HAVE_OPENCL
PERF_TEST(Perf_ColorTSDF_CPU, integrate_mat)
#else
PERF_TEST(Perf_ColorTSDF, integrate_mat)
#endif
{
VolumeType volumeType = VolumeType::ColorTSDF;
VolumeSettings vs(volumeType);
Volume volume(volumeType, vs);
Size frameSize(vs.getRaycastWidth(), vs.getRaycastHeight());
Matx33f intrIntegrate, intrRaycast;
vs.getCameraIntegrateIntrinsics(intrIntegrate);
vs.getCameraRaycastIntrinsics(intrRaycast);
bool onlySemisphere = false;
float depthFactor = vs.getDepthFactor();
Ptr<Scene> scene = Scene::create(frameSize, intrIntegrate, depthFactor, onlySemisphere);
std::vector<Affine3f> poses = scene->getPoses();
for (size_t i = 0; i < poses.size(); i++)
for (size_t i = 0; i < (repeat1st ? 1 : poses.size()); i++)
{
Matx44f pose = poses[i].matrix;
Mat depth = scene->depth(pose);
Mat rgb = scene->rgb(pose);
UMat urgb, udepth;
depth.copyTo(udepth);
rgb.copyTo(urgb);
OdometryFrame odf(urgb, udepth);
bool done = false;
while (repeat1st ? next() : !done)
{
startTimer();
volume.integrate(depth, rgb, pose);
if (testSrcType == VolumeTestSrcType::MAT)
if (volumeType == VolumeType::ColorTSDF)
volume->integrate(udepth, urgb, pose);
else
volume->integrate(udepth, pose);
else if (testSrcType == VolumeTestSrcType::ODOMETRY_FRAME)
volume->integrate(odf, pose);
stopTimer();
// perf check makes sense only for identical states
if (repeat1st)
volume->reset();
done = true;
}
}
SANITY_CHECK_NOTHING();
}
// Perf_ColorTSDF_CPU.integrate_frame
#ifdef HAVE_OPENCL
PERF_TEST(Perf_ColorTSDF_CPU, integrate_frame)
#else
PERF_TEST(Perf_ColorTSDF, integrate_frame)
#endif
PERF_TEST_P_(VolumePerfFixture, raycast)
{
VolumeType volumeType = VolumeType::ColorTSDF;
VolumeSettings vs(volumeType);
Volume volume(volumeType, vs);
Size frameSize(vs.getRaycastWidth(), vs.getRaycastHeight());
Matx33f intrIntegrate, intrRaycast;
vs.getCameraIntegrateIntrinsics(intrIntegrate);
vs.getCameraRaycastIntrinsics(intrRaycast);
bool onlySemisphere = false;
float depthFactor = vs.getDepthFactor();
Ptr<Scene> scene = Scene::create(frameSize, intrIntegrate, depthFactor, onlySemisphere);
std::vector<Affine3f> poses = scene->getPoses();
for (size_t i = 0; i < poses.size(); i++)
for (size_t i = 0; i < (repeat1st ? 1 : poses.size()); i++)
{
Matx44f pose = poses[i].matrix;
Mat depth = scene->depth(pose);
Mat rgb = scene->rgb(pose);
OdometryFrame odf(rgb, depth);
UMat urgb, udepth;
depth.copyTo(udepth);
rgb.copyTo(urgb);
OdometryFrame odf(urgb, udepth);
if (testSrcType == VolumeTestSrcType::MAT)
if (volumeType == VolumeType::ColorTSDF)
volume->integrate(udepth, urgb, pose);
else
volume->integrate(udepth, pose);
else if (testSrcType == VolumeTestSrcType::ODOMETRY_FRAME)
volume->integrate(odf, pose);
UMat upoints, unormals, ucolors;
bool done = false;
while (repeat1st ? next() : !done)
{
startTimer();
volume.integrate(odf, pose);
if (volumeType == VolumeType::ColorTSDF)
volume->raycast(pose, frameSize.height, frameSize.width, intrRaycast, upoints, unormals, ucolors);
else
volume->raycast(pose, frameSize.height, frameSize.width, intrRaycast, upoints, unormals);
stopTimer();
done = true;
}
SANITY_CHECK_NOTHING();
}
// Perf_ColorTSDF_CPU.raycast_mat
#ifdef HAVE_OPENCL
PERF_TEST(Perf_ColorTSDF_CPU, raycast_mat)
#else
PERF_TEST(Perf_ColorTSDF, raycast_mat)
#endif
{
VolumeType volumeType = VolumeType::ColorTSDF;
VolumeSettings vs(volumeType);
Volume volume(volumeType, vs);
Size frameSize(vs.getRaycastWidth(), vs.getRaycastHeight());
Matx33f intrIntegrate, intrRaycast;
vs.getCameraIntegrateIntrinsics(intrIntegrate);
vs.getCameraRaycastIntrinsics(intrRaycast);
bool onlySemisphere = false;
float depthFactor = vs.getDepthFactor();
Vec3f lightPose = Vec3f::all(0.f);
Ptr<Scene> scene = Scene::create(frameSize, intrIntegrate, depthFactor, onlySemisphere);
std::vector<Affine3f> poses = scene->getPoses();
for (size_t i = 0; i < poses.size(); i++)
if (display)
{
Matx44f pose = poses[i].matrix;
Mat depth = scene->depth(pose);
Mat rgb = scene->rgb(pose);
Mat points, normals, colors;
points = upoints.getMat(ACCESS_READ);
normals = unormals.getMat(ACCESS_READ);
colors = ucolors.getMat(ACCESS_READ);
startTimer();
volume.integrate(depth, rgb, pose);
startTimer();
volume.raycast(pose, frameSize.height, frameSize.width, intrRaycast, points, normals, colors);
stopTimer();
if (display)
Vec3f lightPose = Vec3f::all(0.f);
if (volumeType == VolumeType::ColorTSDF)
displayColorImage(depth, rgb, points, normals, colors, depthFactor, lightPose);
else
displayImage(depth, points, normals, depthFactor, lightPose);
}
}
SANITY_CHECK_NOTHING();
}
//TODO: fix it when ColorTSDF gets GPU version
INSTANTIATE_TEST_CASE_P(Volume, VolumePerfFixture, /*::testing::Combine(PlatformTypeEnum::all(), VolumeTypeEnum::all())*/
::testing::Combine(
::testing::Values(PlatformVolumeType {PlatformType::CPU, VolumeType::TSDF},
PlatformVolumeType {PlatformType::CPU, VolumeType::HashTSDF},
PlatformVolumeType {PlatformType::CPU, VolumeType::ColorTSDF},
PlatformVolumeType {PlatformType::GPU, VolumeType::TSDF},
PlatformVolumeType {PlatformType::GPU, VolumeType::HashTSDF}),
VolumeTestSrcTypeEnum::all(), SequenceEnum::all()));
}} // namespace

View File

@ -1243,16 +1243,15 @@ void fetchNormalsFromHashTsdfVolumeUnit(
Matx44f _pose;
settings.getVolumePose(_pose);
const Affine3f pose = Affine3f(_pose);
const Affine3f pose(_pose);
auto HashPushNormals = [&](const ptype& point, const int* position) {
//Affine3f invPose(pose.inv());
Affine3f invPose(pose.inv());
Point3f p = fromPtype(point);
Point3f n = nan3;
if (!isNaN(p))
{
//Point3f voxelPoint = invPose * p;
Point3f voxelPoint = p;
Point3f voxelPoint = invPose * p;
n = pose.rotation() * getNormalVoxel(voxelPoint, voxelSizeInv, volumeUnitDegree, volDims, volUnitsData, volumeUnits);
}
normals(position[0], position[1]) = toPtype(n);
@ -1262,7 +1261,7 @@ void fetchNormalsFromHashTsdfVolumeUnit(
}
#ifdef HAVE_OPENCL
void olc_fetchNormalsFromHashTsdfVolumeUnit(
void ocl_fetchNormalsFromHashTsdfVolumeUnit(
const VolumeSettings& settings, const int volumeUnitDegree, InputArray _volUnitsData, InputArray _volUnitsDataCopy,
const CustomHashSet& hashTable, InputArray _points, OutputArray _normals)
{
@ -1332,6 +1331,10 @@ void fetchPointsNormalsFromHashTsdfVolumeUnit(
const Vec4i volDims;
settings.getVolumeStrides(volDims);
Matx44f mpose;
settings.getVolumePose(mpose);
const Affine3f pose(mpose);
std::vector<Vec3i> totalVolUnits;
for (const auto& keyvalue : volumeUnits)
{
@ -1343,6 +1346,7 @@ void fetchPointsNormalsFromHashTsdfVolumeUnit(
bool needNormals(_normals.needed());
Mutex mutex;
//TODO: this is incorrect; a 0-surface should be captured instead of all non-zero voxels
auto HashFetchPointsNormalsInvoker = [&](const Range& range)
{
std::vector<ptype> points, normals;
@ -1363,14 +1367,15 @@ void fetchPointsNormalsFromHashTsdfVolumeUnit(
cv::Vec3i voxelIdx(x, y, z);
TsdfVoxel voxel = _at(volUnitsData, voxelIdx, it->second.index, volResolution.x, volDims);
// floatToTsdf(1.0) == -128
if (voxel.tsdf != -128 && voxel.weight != 0)
{
Point3f point = base_point + voxelCoordToVolume(voxelIdx, voxelSize);
localPoints.push_back(toPtype(point));
localPoints.push_back(toPtype(pose * point));
if (needNormals)
{
Point3f normal = getNormalVoxel(point, voxelSizeInv, volumeUnitDegree, volDims, volUnitsData, volumeUnits);
localNormals.push_back(toPtype(normal));
localNormals.push_back(toPtype(pose.rotation() * normal));
}
}
}
@ -1459,6 +1464,10 @@ void ocl_fetchPointsNormalsFromHashTsdfVolumeUnit(
const Vec4i volDims;
settings.getVolumeStrides(volDims);
Matx44f mpose;
settings.getVolumePose(mpose);
const Affine3f pose(mpose);
Range _fetchRange(0, hashTable.last);
const int nstripes = -1;
@ -1466,6 +1475,7 @@ void ocl_fetchPointsNormalsFromHashTsdfVolumeUnit(
bool needNormals(_normals.needed());
Mutex mutex;
//TODO: this is incorrect; a 0-surface should be captured instead of all non-zero voxels
auto _HashFetchPointsNormalsInvoker = [&](const Range& range)
{
std::vector<ptype> points, normals;
@ -1485,15 +1495,16 @@ void ocl_fetchPointsNormalsFromHashTsdfVolumeUnit(
cv::Vec3i voxelIdx(x, y, z);
TsdfVoxel voxel = new_at(volUnitsDataCopy, voxelIdx, row, volumeUnitResolution, volDims);
// floatToTsdf(1.0) == -128
if (voxel.tsdf != -128 && voxel.weight != 0)
{
Point3f point = base_point + voxelCoordToVolume(voxelIdx, voxelSize);
localPoints.push_back(toPtype(point));
localPoints.push_back(toPtype(pose * point));
if (needNormals)
{
Point3f normal = ocl_getNormalVoxel(point, voxelSizeInv, volumeUnitDegree, volDims, volUnitsDataCopy, hashTable);
localNormals.push_back(toPtype(normal));
localNormals.push_back(toPtype(pose.rotation() * normal));
}
}
}

View File

@ -310,7 +310,7 @@ void ocl_raycastHashTsdfVolumeUnit(
const VolumeSettings& settings, const Matx44f& cameraPose, int height, int width, InputArray intr, const int volumeUnitDegree,
const CustomHashSet& hashTable, InputArray _volUnitsData, OutputArray _points, OutputArray _normals);
void olc_fetchNormalsFromHashTsdfVolumeUnit(
void ocl_fetchNormalsFromHashTsdfVolumeUnit(
const VolumeSettings& settings, const int volumeUnitDegree, InputArray _volUnitsData, InputArray _volUnitsDataCopy,
const CustomHashSet& hashTable, InputArray _points, OutputArray _normals);

View File

@ -1176,7 +1176,7 @@ inline void coord(const Mat& volume, const TsdfVoxel* volDataStart, std::vector<
if (limits)
{
const TsdfVoxel& voxeld = volDataStart[(x + shift.x) * volDims[0] +
const TsdfVoxel &voxeld = volDataStart[(x + shift.x) * volDims[0] +
(y + shift.y) * volDims[1] +
(z + shift.z) * volDims[2]];
float vd = tsdfToFloat(voxeld.tsdf);
@ -1273,8 +1273,6 @@ void fetchPointsNormalsFromTsdfVolumeUnit(const VolumeSettings& settings, InputA
parallel_for_(fetchRange, FetchPointsNormalsInvoker, nstripes);
std::vector<ptype> points, normals;
for (size_t i = 0; i < pVecs.size(); i++)
{

View File

@ -324,7 +324,7 @@ void HashTsdfVolume::fetchNormals(InputArray points, OutputArray normals) const
fetchNormalsFromHashTsdfVolumeUnit(settings, volUnitsData, volumeUnits, volumeUnitDegree, points, normals);
#else
if (useGPU)
olc_fetchNormalsFromHashTsdfVolumeUnit(settings, volumeUnitDegree, gpu_volUnitsData, volUnitsDataCopy, hashTable, points, normals);
ocl_fetchNormalsFromHashTsdfVolumeUnit(settings, volumeUnitDegree, gpu_volUnitsData, volUnitsDataCopy, hashTable, points, normals);
else
fetchNormalsFromHashTsdfVolumeUnit(settings, cpu_volUnitsData, cpu_volumeUnits, volumeUnitDegree, points, normals);

View File

@ -1,682 +0,0 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html
#include "../test_precomp.hpp"
#include "opencv2/ts/ocl_test.hpp"
#ifdef HAVE_OPENCL
namespace opencv_test {
namespace {
using namespace cv;
/** Reprojects screen point to camera space given z coord. */
struct Reprojector
{
Reprojector() {}
inline Reprojector(Matx33f intr)
{
fxinv = 1.f / intr(0, 0), fyinv = 1.f / intr(1, 1);
cx = intr(0, 2), cy = intr(1, 2);
}
template<typename T>
inline cv::Point3_<T> operator()(cv::Point3_<T> p) const
{
T x = p.z * (p.x - cx) * fxinv;
T y = p.z * (p.y - cy) * fyinv;
return cv::Point3_<T>(x, y, p.z);
}
float fxinv, fyinv, cx, cy;
};
template<class Scene>
struct RenderInvoker : ParallelLoopBody
{
RenderInvoker(Mat_<float>& _frame, Affine3f _pose,
Reprojector _reproj, float _depthFactor, bool _onlySemisphere)
: ParallelLoopBody(),
frame(_frame),
pose(_pose),
reproj(_reproj),
depthFactor(_depthFactor),
onlySemisphere(_onlySemisphere)
{ }
virtual void operator ()(const cv::Range& r) const
{
for (int y = r.start; y < r.end; y++)
{
float* frameRow = frame[y];
for (int x = 0; x < frame.cols; x++)
{
float pix = 0;
Point3f orig = pose.translation();
// direction through pixel
Point3f screenVec = reproj(Point3f((float)x, (float)y, 1.f));
float xyt = 1.f / (screenVec.x * screenVec.x +
screenVec.y * screenVec.y + 1.f);
Point3f dir = normalize(Vec3f(pose.rotation() * screenVec));
// screen space axis
dir.y = -dir.y;
const float maxDepth = 20.f;
const float maxSteps = 256;
float t = 0.f;
for (int step = 0; step < maxSteps && t < maxDepth; step++)
{
Point3f p = orig + dir * t;
float d = Scene::map(p, onlySemisphere);
if (d < 0.000001f)
{
float depth = std::sqrt(t * t * xyt);
pix = depth * depthFactor;
break;
}
t += d;
}
frameRow[x] = pix;
}
}
}
Mat_<float>& frame;
Affine3f pose;
Reprojector reproj;
float depthFactor;
bool onlySemisphere;
};
struct Scene
{
virtual ~Scene() {}
static Ptr<Scene> create(Size sz, Matx33f _intr, float _depthFactor, bool onlySemisphere);
virtual Mat depth(Affine3f pose) = 0;
virtual std::vector<Affine3f> getPoses() = 0;
};
struct SemisphereScene : Scene
{
const int framesPerCycle = 72;
const float nCycles = 0.25f;
const Affine3f startPose = Affine3f(Vec3f(0.f, 0.f, 0.f), Vec3f(1.5f, 0.3f, -2.1f));
Size frameSize;
Matx33f intr;
float depthFactor;
bool onlySemisphere;
SemisphereScene(Size sz, Matx33f _intr, float _depthFactor, bool _onlySemisphere) :
frameSize(sz), intr(_intr), depthFactor(_depthFactor), onlySemisphere(_onlySemisphere)
{ }
static float map(Point3f p, bool onlySemisphere)
{
float plane = p.y + 0.5f;
Point3f spherePose = p - Point3f(-0.0f, 0.3f, 1.1f);
float sphereRadius = 0.5f;
float sphere = (float)cv::norm(spherePose) - sphereRadius;
float sphereMinusBox = sphere;
float subSphereRadius = 0.05f;
Point3f subSpherePose = p - Point3f(0.3f, -0.1f, -0.3f);
float subSphere = (float)cv::norm(subSpherePose) - subSphereRadius;
float res;
if (!onlySemisphere)
res = min({ sphereMinusBox, subSphere, plane });
else
res = sphereMinusBox;
return res;
}
Mat depth(Affine3f pose) override
{
Mat_<float> frame(frameSize);
Reprojector reproj(intr);
Range range(0, frame.rows);
parallel_for_(range, RenderInvoker<SemisphereScene>(frame, pose, reproj, depthFactor, onlySemisphere));
return std::move(frame);
}
std::vector<Affine3f> getPoses() override
{
std::vector<Affine3f> poses;
for (int i = 0; i < framesPerCycle * nCycles; i++)
{
float angle = (float)(CV_2PI * i / framesPerCycle);
Affine3f pose;
pose = pose.rotate(startPose.rotation());
pose = pose.rotate(Vec3f(0.f, -0.5f, 0.f) * angle);
pose = pose.translate(Vec3f(startPose.translation()[0] * sin(angle),
startPose.translation()[1],
startPose.translation()[2] * cos(angle)));
poses.push_back(pose);
}
return poses;
}
};
Ptr<Scene> Scene::create(Size sz, Matx33f _intr, float _depthFactor, bool _onlySemisphere)
{
return makePtr<SemisphereScene>(sz, _intr, _depthFactor, _onlySemisphere);
}
// this is a temporary solution
// ----------------------------
typedef cv::Vec4f ptype;
typedef cv::Mat_< ptype > Points;
typedef Points Normals;
typedef Size2i Size;
template<int p>
inline float specPow(float x)
{
if (p % 2 == 0)
{
float v = specPow<p / 2>(x);
return v * v;
}
else
{
float v = specPow<(p - 1) / 2>(x);
return v * v * x;
}
}
template<>
inline float specPow<0>(float /*x*/)
{
return 1.f;
}
template<>
inline float specPow<1>(float x)
{
return x;
}
inline cv::Vec3f fromPtype(const ptype& x)
{
return cv::Vec3f(x[0], x[1], x[2]);
}
inline Point3f normalize(const Vec3f& v)
{
double nv = sqrt(v[0] * v[0] + v[1] * v[1] + v[2] * v[2]);
return v * (nv ? 1. / nv : 0.);
}
void renderPointsNormals(InputArray _points, InputArray _normals, OutputArray image, Affine3f lightPose)
{
Size sz = _points.size();
image.create(sz, CV_8UC4);
Points points = _points.getMat();
Normals normals = _normals.getMat();
Mat_<Vec4b> img = image.getMat();
Range range(0, sz.height);
const int nstripes = -1;
parallel_for_(range, [&](const Range&)
{
for (int y = range.start; y < range.end; y++)
{
Vec4b* imgRow = img[y];
const ptype* ptsRow = points[y];
const ptype* nrmRow = normals[y];
for (int x = 0; x < sz.width; x++)
{
Point3f p = fromPtype(ptsRow[x]);
Point3f n = fromPtype(nrmRow[x]);
Vec4b color;
if (cvIsNaN(p.x) || cvIsNaN(p.y) || cvIsNaN(p.z))
{
color = Vec4b(0, 32, 0, 0);
}
else
{
const float Ka = 0.3f; //ambient coeff
const float Kd = 0.5f; //diffuse coeff
const float Ks = 0.2f; //specular coeff
const int sp = 20; //specular power
const float Ax = 1.f; //ambient color, can be RGB
const float Dx = 1.f; //diffuse color, can be RGB
const float Sx = 1.f; //specular color, can be RGB
const float Lx = 1.f; //light color
Point3f l = normalize(lightPose.translation() - Vec3f(p));
Point3f v = normalize(-Vec3f(p));
Point3f r = normalize(Vec3f(2.f * n * n.dot(l) - l));
uchar ix = (uchar)((Ax * Ka * Dx + Lx * Kd * Dx * max(0.f, n.dot(l)) +
Lx * Ks * Sx * specPow<sp>(max(0.f, r.dot(v)))) * 255.f);
color = Vec4b(ix, ix, ix, 0);
}
imgRow[x] = color;
}
}
}, nstripes);
}
// ----------------------------
void displayImage(Mat depth, Mat points, Mat normals, float depthFactor, Vec3f lightPose)
{
Mat image;
patchNaNs(points);
imshow("depth", depth * (1.f / depthFactor / 4.f));
renderPointsNormals(points, normals, image, lightPose);
imshow("render", image);
waitKey(2000);
destroyAllWindows();
}
void normalsCheck(Mat normals)
{
Vec4f vector;
for (auto pvector = normals.begin<Vec4f>(); pvector < normals.end<Vec4f>(); pvector++)
{
vector = *pvector;
if (!cvIsNaN(vector[0]))
{
float length = vector[0] * vector[0] +
vector[1] * vector[1] +
vector[2] * vector[2];
ASSERT_LT(abs(1 - length), 0.0001f) << "There is normal with length != 1";
}
}
}
int counterOfValid(Mat points)
{
Vec4f* v;
int i, j;
int count = 0;
for (i = 0; i < points.rows; ++i)
{
v = (points.ptr<Vec4f>(i));
for (j = 0; j < points.cols; ++j)
{
if ((v[j])[0] != 0 ||
(v[j])[1] != 0 ||
(v[j])[2] != 0)
{
count++;
}
}
}
return count;
}
enum class VolumeTestFunction
{
RAYCAST = 0,
FETCH_NORMALS = 1,
FETCH_POINTS_NORMALS = 2
};
enum class VolumeTestSrcType
{
MAT = 0,
ODOMETRY_FRAME = 1
};
void normal_test_custom_framesize(VolumeType volumeType, VolumeTestFunction testFunction, VolumeTestSrcType testSrcType)
{
VolumeSettings vs(volumeType);
Volume volume(volumeType, vs);
Size frameSize(vs.getRaycastWidth(), vs.getRaycastHeight());
Matx33f intrIntegrate, intrRaycast;
vs.getCameraIntegrateIntrinsics(intrIntegrate);
vs.getCameraRaycastIntrinsics(intrRaycast);
bool onlySemisphere = true;
float depthFactor = vs.getDepthFactor();
Vec3f lightPose = Vec3f::all(0.f);
Ptr<Scene> scene = Scene::create(frameSize, intrIntegrate, depthFactor, onlySemisphere);
std::vector<Affine3f> poses = scene->getPoses();
Mat depth = scene->depth(poses[0]);
UMat udepth;
depth.copyTo(udepth);
UMat upoints, unormals, utmpnormals;
Mat points, normals;
AccessFlag af = ACCESS_READ;
OdometryFrame odf(noArray(), udepth);
if (testSrcType == VolumeTestSrcType::MAT)
volume.integrate(depth, poses[0].matrix);
else
volume.integrate(odf, poses[0].matrix);
if (testFunction == VolumeTestFunction::RAYCAST)
{
volume.raycast(poses[0].matrix, frameSize.height, frameSize.width, intrRaycast, upoints, unormals);
}
else if (testFunction == VolumeTestFunction::FETCH_NORMALS)
{
if (testSrcType == VolumeTestSrcType::MAT)
{
// takes only point from raycast for checking fetched normals on the display
volume.raycast(poses[0].matrix, frameSize.height, frameSize.width, intrRaycast, upoints, utmpnormals);
// volume.fetchPointsNormals(upoints, utmpnormals);
volume.fetchNormals(upoints, unormals);
}
}
else if (testFunction == VolumeTestFunction::FETCH_POINTS_NORMALS)
{
if (testSrcType == VolumeTestSrcType::MAT) // Odometry frame or Mats
{
volume.fetchPointsNormals(upoints, unormals);
}
}
normals = unormals.getMat(af);
points = upoints.getMat(af);
if (testFunction == VolumeTestFunction::RAYCAST && cvtest::debugLevel > 0)
displayImage(depth, points, normals, depthFactor, lightPose);
normalsCheck(normals);
}
void normal_test_common_framesize(VolumeType volumeType, VolumeTestFunction testFunction, VolumeTestSrcType testSrcType)
{
VolumeSettings vs(volumeType);
Volume volume(volumeType, vs);
Size frameSize(vs.getRaycastWidth(), vs.getRaycastHeight());
Matx33f intrIntegrate, intrRaycast;
vs.getCameraIntegrateIntrinsics(intrIntegrate);
vs.getCameraRaycastIntrinsics(intrRaycast);
bool onlySemisphere = true;
float depthFactor = vs.getDepthFactor();
Vec3f lightPose = Vec3f::all(0.f);
Ptr<Scene> scene = Scene::create(frameSize, intrIntegrate, depthFactor, onlySemisphere);
std::vector<Affine3f> poses = scene->getPoses();
Mat depth = scene->depth(poses[0]);
UMat udepth;
depth.copyTo(udepth);
UMat upoints, unormals, utmpnormals;
Mat points, normals;
AccessFlag af = ACCESS_READ;
OdometryFrame odf(noArray(), udepth);
if (testSrcType == VolumeTestSrcType::MAT)
volume.integrate(depth, poses[0].matrix);
else
volume.integrate(odf, poses[0].matrix);
if (testFunction == VolumeTestFunction::RAYCAST)
{
volume.raycast(poses[0].matrix, upoints, unormals);
}
else if (testFunction == VolumeTestFunction::FETCH_NORMALS)
{
if (testSrcType == VolumeTestSrcType::MAT)
{
// takes only point from raycast for checking fetched normals on the display
volume.raycast(poses[0].matrix, upoints, utmpnormals);
//volume.fetchPointsNormals(upoints, utmpnormals);
volume.fetchNormals(upoints, unormals);
}
}
else if (testFunction == VolumeTestFunction::FETCH_POINTS_NORMALS)
{
if (testSrcType == VolumeTestSrcType::MAT) // Odometry frame or Mats
{
volume.fetchPointsNormals(upoints, unormals);
}
}
normals = unormals.getMat(af);
points = upoints.getMat(af);
if (testFunction == VolumeTestFunction::RAYCAST && cvtest::debugLevel > 0)
displayImage(depth, points, normals, depthFactor, lightPose);
normalsCheck(normals);
}
void valid_points_test_custom_framesize(VolumeType volumeType, VolumeTestSrcType testSrcType)
{
VolumeSettings vs(volumeType);
Volume volume(volumeType, vs);
Size frameSize(vs.getRaycastWidth(), vs.getRaycastHeight());
Matx33f intrIntegrate, intrRaycast;
vs.getCameraIntegrateIntrinsics(intrIntegrate);
vs.getCameraRaycastIntrinsics(intrRaycast);
bool onlySemisphere = true;
float depthFactor = vs.getDepthFactor();
Vec3f lightPose = Vec3f::all(0.f);
Ptr<Scene> scene = Scene::create(frameSize, intrIntegrate, depthFactor, onlySemisphere);
std::vector<Affine3f> poses = scene->getPoses();
Mat depth = scene->depth(poses[0]);
UMat udepth;
depth.copyTo(udepth);
UMat upoints, unormals;
UMat upoints1, unormals1;
Mat points, normals;
AccessFlag af = ACCESS_READ;
int anfas, profile;
OdometryFrame odf(noArray(), udepth);
if (testSrcType == VolumeTestSrcType::MAT)
volume.integrate(depth, poses[0].matrix);
else
volume.integrate(odf, poses[0].matrix);
volume.raycast(poses[0].matrix, frameSize.height, frameSize.width, intrRaycast, upoints, unormals);
normals = unormals.getMat(af);
points = upoints.getMat(af);
patchNaNs(points);
anfas = counterOfValid(points);
if (cvtest::debugLevel > 0)
displayImage(depth, points, normals, depthFactor, lightPose);
volume.raycast(poses[17].matrix, frameSize.height, frameSize.width, intrRaycast, upoints1, unormals1);
normals = unormals1.getMat(af);
points = upoints1.getMat(af);
patchNaNs(points);
profile = counterOfValid(points);
if (cvtest::debugLevel > 0)
displayImage(depth, points, normals, depthFactor, lightPose);
// TODO: why profile == 2*anfas ?
float percentValidity = float(anfas) / float(profile);
ASSERT_NE(profile, 0) << "There is no points in profile";
ASSERT_NE(anfas, 0) << "There is no points in anfas";
ASSERT_LT(abs(0.5 - percentValidity), 0.3) << "percentValidity out of [0.3; 0.7] (percentValidity=" << percentValidity << ")";
}
void valid_points_test_common_framesize(VolumeType volumeType, VolumeTestSrcType testSrcType)
{
VolumeSettings vs(volumeType);
Volume volume(volumeType, vs);
Size frameSize(vs.getRaycastWidth(), vs.getRaycastHeight());
Matx33f intrIntegrate, intrRaycast;
vs.getCameraIntegrateIntrinsics(intrIntegrate);
vs.getCameraRaycastIntrinsics(intrRaycast);
bool onlySemisphere = true;
float depthFactor = vs.getDepthFactor();
Vec3f lightPose = Vec3f::all(0.f);
Ptr<Scene> scene = Scene::create(frameSize, intrIntegrate, depthFactor, onlySemisphere);
std::vector<Affine3f> poses = scene->getPoses();
Mat depth = scene->depth(poses[0]);
UMat udepth;
depth.copyTo(udepth);
UMat upoints, unormals;
UMat upoints1, unormals1;
Mat points, normals;
AccessFlag af = ACCESS_READ;
int anfas, profile;
OdometryFrame odf(noArray(), udepth);
if (testSrcType == VolumeTestSrcType::MAT)
volume.integrate(depth, poses[0].matrix);
else
volume.integrate(odf, poses[0].matrix);
volume.raycast(poses[0].matrix, upoints, unormals);
normals = unormals.getMat(af);
points = upoints.getMat(af);
patchNaNs(points);
anfas = counterOfValid(points);
if (cvtest::debugLevel > 0)
displayImage(depth, points, normals, depthFactor, lightPose);
volume.raycast(poses[17].matrix, upoints1, unormals1);
normals = unormals1.getMat(af);
points = upoints1.getMat(af);
patchNaNs(points);
profile = counterOfValid(points);
if (cvtest::debugLevel > 0)
displayImage(depth, points, normals, depthFactor, lightPose);
// TODO: why profile == 2*anfas ?
float percentValidity = float(anfas) / float(profile);
ASSERT_NE(profile, 0) << "There is no points in profile";
ASSERT_NE(anfas, 0) << "There is no points in anfas";
ASSERT_LT(abs(0.5 - percentValidity), 0.3) << "percentValidity out of [0.3; 0.7] (percentValidity=" << percentValidity << ")";
}
TEST(TSDF_GPU, raycast_custom_framesize_normals_mat)
{
normal_test_custom_framesize(VolumeType::TSDF, VolumeTestFunction::RAYCAST, VolumeTestSrcType::MAT);
}
TEST(TSDF_GPU, raycast_custom_framesize_normals_frame)
{
normal_test_custom_framesize(VolumeType::TSDF, VolumeTestFunction::RAYCAST, VolumeTestSrcType::ODOMETRY_FRAME);
}
TEST(TSDF_GPU, raycast_common_framesize_normals_mat)
{
normal_test_common_framesize(VolumeType::TSDF, VolumeTestFunction::RAYCAST, VolumeTestSrcType::MAT);
}
TEST(TSDF_GPU, raycast_common_framesize_normals_frame)
{
normal_test_common_framesize(VolumeType::TSDF, VolumeTestFunction::RAYCAST, VolumeTestSrcType::ODOMETRY_FRAME);
}
TEST(TSDF_GPU, fetch_points_normals)
{
normal_test_custom_framesize(VolumeType::TSDF, VolumeTestFunction::FETCH_POINTS_NORMALS, VolumeTestSrcType::MAT);
}
TEST(TSDF_GPU, fetch_normals)
{
normal_test_custom_framesize(VolumeType::TSDF, VolumeTestFunction::FETCH_NORMALS, VolumeTestSrcType::MAT);
}
TEST(TSDF_GPU, valid_points_custom_framesize_mat)
{
valid_points_test_custom_framesize(VolumeType::TSDF, VolumeTestSrcType::MAT);
}
TEST(TSDF_GPU, valid_points_custom_framesize_frame)
{
valid_points_test_custom_framesize(VolumeType::TSDF, VolumeTestSrcType::ODOMETRY_FRAME);
}
TEST(TSDF_GPU, valid_points_common_framesize_mat)
{
valid_points_test_common_framesize(VolumeType::TSDF, VolumeTestSrcType::MAT);
}
TEST(TSDF_GPU, valid_points_common_framesize_frame)
{
valid_points_test_common_framesize(VolumeType::TSDF, VolumeTestSrcType::ODOMETRY_FRAME);
}
TEST(HashTSDF_GPU, raycast_custom_framesize_normals_mat)
{
normal_test_custom_framesize(VolumeType::HashTSDF, VolumeTestFunction::RAYCAST, VolumeTestSrcType::MAT);
}
TEST(HashTSDF_GPU, raycast_custom_framesize_normals_frame)
{
normal_test_custom_framesize(VolumeType::HashTSDF, VolumeTestFunction::RAYCAST, VolumeTestSrcType::ODOMETRY_FRAME);
}
TEST(HashTSDF_GPU, raycast_common_framesize_normals_mat)
{
normal_test_common_framesize(VolumeType::HashTSDF, VolumeTestFunction::RAYCAST, VolumeTestSrcType::MAT);
}
TEST(HashTSDF_GPU, raycast_common_framesize_normals_frame)
{
normal_test_common_framesize(VolumeType::HashTSDF, VolumeTestFunction::RAYCAST, VolumeTestSrcType::ODOMETRY_FRAME);
}
TEST(HashTSDF_GPU, fetch_points_normals)
{
normal_test_custom_framesize(VolumeType::HashTSDF, VolumeTestFunction::FETCH_POINTS_NORMALS, VolumeTestSrcType::MAT);
}
TEST(HashTSDF_GPU, fetch_normals)
{
normal_test_custom_framesize(VolumeType::HashTSDF, VolumeTestFunction::FETCH_NORMALS, VolumeTestSrcType::MAT);
}
TEST(HashTSDF_GPU, valid_points_custom_framesize_mat)
{
valid_points_test_custom_framesize(VolumeType::HashTSDF, VolumeTestSrcType::MAT);
}
TEST(HashTSDF_GPU, valid_points_custom_framesize_frame)
{
valid_points_test_custom_framesize(VolumeType::HashTSDF, VolumeTestSrcType::ODOMETRY_FRAME);
}
TEST(HashTSDF_GPU, valid_points_common_framesize_mat)
{
valid_points_test_common_framesize(VolumeType::HashTSDF, VolumeTestSrcType::MAT);
}
TEST(HashTSDF_GPU, valid_points_common_framesize_frame)
{
valid_points_test_common_framesize(VolumeType::HashTSDF, VolumeTestSrcType::ODOMETRY_FRAME);
}
}
} // namespace
#endif

File diff suppressed because it is too large Load Diff