Skip to content

Commit

Permalink
iOS various updates (#1340)
Browse files Browse the repository at this point in the history
* Added Data Recording Mode. Added option to filter ARKit localization jumps.

* Implemented max acc relocalization filtering (working on iOS)

* Fixed android build, added re-localization max acceleration parameter

* ARCore Java: Fixed pose of depth not available at stamp requested

* Android log fix

* Added libLAS support

* ios: added LAS support

* updated dep install script to skip libraries already installed

* CameraMobile: Fixed origin not updated if updateOnRender() is used

* Default max opt error increased to 2x to reduce number of loop closures rejected. OptimizerGTSAM: updated gravity noise model to use same sigma for both parameters.

* Updated license

* bump 0.21.7

* updated license date
  • Loading branch information
matlabbe authored Oct 7, 2024
1 parent 409ef73 commit 595f200
Show file tree
Hide file tree
Showing 41 changed files with 1,794 additions and 1,038 deletions.
21 changes: 20 additions & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ SET(CMAKE_MODULE_PATH "${PROJECT_SOURCE_DIR}/cmake_modules")
#######################
SET(RTABMAP_MAJOR_VERSION 0)
SET(RTABMAP_MINOR_VERSION 21)
SET(RTABMAP_PATCH_VERSION 6)
SET(RTABMAP_PATCH_VERSION 7)
SET(RTABMAP_VERSION
${RTABMAP_MAJOR_VERSION}.${RTABMAP_MINOR_VERSION}.${RTABMAP_PATCH_VERSION})

Expand Down Expand Up @@ -177,6 +177,7 @@ option(WITH_TORCH "Include Torch support (SuperPoint)" OFF)
option(WITH_PYTHON "Include Python3 support (PyMatcher, PyDetector)" OFF)
option(WITH_PYTHON_THREADING "Use more than one Python interpreter." OFF)
option(WITH_PDAL "Include PDAL support" ON)
option(WITH_LIBLAS "Include libLAS support" OFF)
option(WITH_CUDASIFT "Include CudaSift support (this fork https://github.com/matlabbe/CudaSift)" OFF)
option(WITH_FREENECT "Include Freenect support" ON)
option(WITH_FREENECT2 "Include Freenect2 support" ON)
Expand Down Expand Up @@ -416,6 +417,13 @@ IF(WITH_PDAL)
ENDIF(PDAL_FOUND)
ENDIF(WITH_PDAL)

IF(WITH_LIBLAS)
FIND_PACKAGE(libLAS QUIET)
IF(libLAS_FOUND)
MESSAGE(STATUS "Found libLAS ${libLAS_VERSION}: ${libLAS_INCLUDE_DIRS}")
ENDIF(libLAS_FOUND)
ENDIF(WITH_LIBLAS)

IF(WITH_CUDASIFT)
FIND_PACKAGE(CudaSift 3 QUIET)
IF(CudaSift_FOUND)
Expand Down Expand Up @@ -950,6 +958,9 @@ ENDIF(NOT opengv_FOUND OR NOT WITH_OPENGV)
IF(NOT PDAL_FOUND)
SET(PDAL "//")
ENDIF(NOT PDAL_FOUND)
IF(NOT libLAS_FOUND)
SET(LIBLAS "//")
ENDIF(NOT libLAS_FOUND)
IF(NOT CudaSift_FOUND)
SET(CUDASIFT "//")
ENDIF(NOT CudaSift_FOUND)
Expand Down Expand Up @@ -1427,6 +1438,14 @@ ELSE()
MESSAGE(STATUS " With PDAL = NO (PDAL not found)")
ENDIF()

IF(libLAS_FOUND)
MESSAGE(STATUS " With libLAS ${libLAS_VERSION} = YES (License: BSD)")
ELSEIF(NOT WITH_LIBLAS)
MESSAGE(STATUS " With libLAS = NO (WITH_LIBLAS=OFF)")
ELSE()
MESSAGE(STATUS " With libLAS = NO (libLAS not found)")
ENDIF()

IF(CudaSift_FOUND)
MESSAGE(STATUS " With CudaSift = YES (License: MIT)")
ELSEIF(NOT WITH_CUDASIFT)
Expand Down
1 change: 1 addition & 0 deletions Version.h.in
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@FASTCV@#define RTABMAP_FASTCV
@OPENGV@#define RTABMAP_OPENGV
@PDAL@#define RTABMAP_PDAL
@LIBLAS@#define RTABMAP_LIBLAS
@CUDASIFT@#define RTABMAP_CUDASIFT
@LOAM@#define RTABMAP_LOAM
@FLOAM@#define RTABMAP_FLOAM
Expand Down
79 changes: 13 additions & 66 deletions app/android/jni/CameraARCore.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ namespace rtabmap {
//////////////////////////////
// CameraARCore
//////////////////////////////
CameraARCore::CameraARCore(void* env, void* context, void* activity, bool depthFromMotion, bool smoothing):
CameraMobile(smoothing),
CameraARCore::CameraARCore(void* env, void* context, void* activity, bool depthFromMotion, bool smoothing, float upstreamRelocalizationAccThr):
CameraMobile(smoothing, upstreamRelocalizationAccThr),
env_(env),
context_(context),
activity_(activity),
Expand Down Expand Up @@ -125,6 +125,8 @@ bool CameraARCore::init(const std::string & calibrationFolder, const std::string
{
close();

CameraMobile::init(calibrationFolder, cameraName);

UScopeMutex lock(arSessionMutex_);

ArInstallStatus install_status;
Expand Down Expand Up @@ -272,54 +274,6 @@ void CameraARCore::close()
CameraMobile::close();
}

LaserScan CameraARCore::scanFromPointCloudData(
const float * pointCloudData,
int points,
const Transform & pose,
const CameraModel & model,
const cv::Mat & rgb,
std::vector<cv::KeyPoint> * kpts,
std::vector<cv::Point3f> * kpts3D)
{
if(pointCloudData && points>0)
{
cv::Mat scanData(1, points, CV_32FC4);
float * ptr = scanData.ptr<float>();
for(unsigned int i=0;i<points; ++i)
{
cv::Point3f pt(pointCloudData[i*4], pointCloudData[i*4 + 1], pointCloudData[i*4 + 2]);
pt = util3d::transformPoint(pt, pose.inverse()*rtabmap_world_T_opengl_world);
ptr[i*4] = pt.x;
ptr[i*4 + 1] = pt.y;
ptr[i*4 + 2] = pt.z;

//get color from rgb image
cv::Point3f org= pt;
pt = util3d::transformPoint(pt, opticalRotationInv);
int u,v;
model.reproject(pt.x, pt.y, pt.z, u, v);
unsigned char r=255,g=255,b=255;
if(model.inFrame(u, v))
{
b=rgb.at<cv::Vec3b>(v,u).val[0];
g=rgb.at<cv::Vec3b>(v,u).val[1];
r=rgb.at<cv::Vec3b>(v,u).val[2];
if(kpts)
kpts->push_back(cv::KeyPoint(u,v,3));
if(kpts3D)
kpts3D->push_back(org);
}
*(int*)&ptr[i*4 + 3] = int(b) | (int(g) << 8) | (int(r) << 16);

//confidence
//*(int*)&ptr[i*4 + 3] = (int(pointCloudData[i*4 + 3] * 255.0f) << 8) | (int(255) << 16);

}
return LaserScan::backwardCompatibility(scanData, 0, 10, rtabmap::Transform::getIdentity());
}
return LaserScan();
}

void CameraARCore::setScreenRotationAndSize(ScreenRotation colorCameraToDisplayRotation, int width, int height)
{
CameraMobile::setScreenRotationAndSize(colorCameraToDisplayRotation, width, height);
Expand Down Expand Up @@ -385,12 +339,6 @@ SensorData CameraARCore::updateDataOnRender(Transform & pose)
/*near=*/0.1f, /*far=*/100.f,
glm::value_ptr(projectionMatrix_));

// adjust origin
if(!getOriginOffset().isNull())
{
viewMatrix_ = glm::inverse(rtabmap::glmFromTransform(rtabmap::opengl_world_T_rtabmap_world * getOriginOffset() *rtabmap::rtabmap_world_T_opengl_world)*glm::inverse(viewMatrix_));
}

ArTrackingState camera_tracking_state;
ArCamera_getTrackingState(arSession_, ar_camera, &camera_tracking_state);

Expand All @@ -402,11 +350,12 @@ SensorData CameraARCore::updateDataOnRender(Transform & pose)
ArCamera_getPose(arSession_, ar_camera, arPose_);
ArPose_getPoseRaw(arSession_, arPose_, pose_raw);
Transform poseArCore = Transform(pose_raw[4], pose_raw[5], pose_raw[6], pose_raw[0], pose_raw[1], pose_raw[2], pose_raw[3]);
poseArCore = rtabmap::rtabmap_world_T_opengl_world * poseArCore * rtabmap::opengl_world_T_rtabmap_world;
pose = rtabmap::rtabmap_world_T_opengl_world * poseArCore * rtabmap::opengl_world_T_rtabmap_world;

if(poseArCore.isNull())
if(pose.isNull())
{
LOGE("CameraARCore: Pose is null");
return data;
}

// Get calibration parameters
Expand Down Expand Up @@ -530,7 +479,11 @@ SensorData CameraARCore::updateDataOnRender(Transform & pose)
#endif
if(pointCloudData && points>0)
{
scan = scanFromPointCloudData(pointCloudData, points, poseArCore, model, rgb, &kpts, &kpts3);
cv::Mat pointCloudDataMat(1, points, CV_32FC4, (void *)pointCloudData);
scan = scanFromPointCloudData(pointCloudDataMat, pose, model, rgb, &kpts, &kpts3);
#ifndef DISABLE_LOG
LOGI("valid scan points = %d", scan.size());
#endif
}
}
else
Expand All @@ -541,15 +494,9 @@ SensorData CameraARCore::updateDataOnRender(Transform & pose)
data = SensorData(scan, rgb, depthFromMotion_?getOcclusionImage():cv::Mat(), model, 0, stamp);
data.setFeatures(kpts, kpts3, cv::Mat());

if(!poseArCore.isNull())
if(!pose.isNull())
{
pose = poseArCore;
this->poseReceived(pose, stamp);
// adjust origin
if(!getOriginOffset().isNull())
{
pose = getOriginOffset() * pose;
}
}
}
}
Expand Down
12 changes: 1 addition & 11 deletions app/android/jni/CameraARCore.h
Original file line number Diff line number Diff line change
Expand Up @@ -50,17 +50,7 @@ namespace rtabmap {

class CameraARCore : public CameraMobile {
public:
static LaserScan scanFromPointCloudData(
const float * pointCloudData,
int points,
const Transform & pose,
const CameraModel & model,
const cv::Mat & rgb,
std::vector<cv::KeyPoint> * kpts = 0,
std::vector<cv::Point3f> * kpts3D = 0);

public:
CameraARCore(void* env, void* context, void* activity, bool depthFromMotion = false, bool smoothing = false);
CameraARCore(void* env, void* context, void* activity, bool depthFromMotion = false, bool smoothing = false, float upstreamRelocalizationAccThr = 0.0f);
virtual ~CameraARCore();

virtual void setScreenRotationAndSize(ScreenRotation colorCameraToDisplayRotation, int width, int height);
Expand Down
17 changes: 4 additions & 13 deletions app/android/jni/CameraAREngine.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,8 @@ namespace rtabmap {
//////////////////////////////
// CameraAREngine
//////////////////////////////
CameraAREngine::CameraAREngine(void* env, void* context, void* activity, bool smoothing):
CameraMobile(smoothing),
CameraAREngine::CameraAREngine(void* env, void* context, void* activity, bool smoothing, float upstreamRelocalizationAccThr):
CameraMobile(smoothing, upstreamRelocalizationAccThr),
env_(env),
context_(context),
activity_(activity),
Expand All @@ -66,6 +66,8 @@ bool CameraAREngine::init(const std::string & calibrationFolder, const std::stri
{
close();

CameraMobile::init(calibrationFolder, cameraName);

UScopeMutex lock(arSessionMutex_);

HwArInstallStatus install_status;
Expand Down Expand Up @@ -236,12 +238,6 @@ SensorData CameraAREngine::updateDataOnRender(Transform & pose)
/*near=*/0.1f, /*far=*/100.f,
glm::value_ptr(projectionMatrix_));

// adjust origin
if(!getOriginOffset().isNull())
{
viewMatrix_ = glm::inverse(rtabmap::glmFromTransform(rtabmap::opengl_world_T_rtabmap_world * getOriginOffset() *rtabmap::rtabmap_world_T_opengl_world)*glm::inverse(viewMatrix_));
}

HwArTrackingState camera_tracking_state;
HwArCamera_getTrackingState(arSession_, ar_camera, &camera_tracking_state);

Expand Down Expand Up @@ -334,11 +330,6 @@ SensorData CameraAREngine::updateDataOnRender(Transform & pose)
{
pose = rtabmap::rtabmap_world_T_opengl_world * pose * rtabmap::opengl_world_T_rtabmap_world;
this->poseReceived(pose, stamp);
// adjust origin
if(!getOriginOffset().isNull())
{
pose = getOriginOffset() * pose;
}
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion app/android/jni/CameraAREngine.h
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ namespace rtabmap {

class CameraAREngine : public CameraMobile {
public:
CameraAREngine(void* env, void* context, void* activity, bool smoothing = false);
CameraAREngine(void* env, void* context, void* activity, bool smoothing = false, float upstreamRelocalizationAccThr = 0.0f);
virtual ~CameraAREngine();

virtual void setScreenRotationAndSize(ScreenRotation colorCameraToDisplayRotation, int width, int height);
Expand Down
Loading

0 comments on commit 595f200

Please sign in to comment.