diff --git a/cfg/domi_cam0.yaml b/cfg/domi_cam0.yaml new file mode 100644 index 00000000..d2698318 --- /dev/null +++ b/cfg/domi_cam0.yaml @@ -0,0 +1,13 @@ +image_width: 752 +image_height: 480 +camera_name: cam0 +camera_matrix: + rows: 3 + cols: 3 + data: [393.016201519, 0.0, 360.133022189, 0.0, 392.357540103, 239.827503562, 0.0, 0.0, 1.0] +distortion_model: equidistant +distortion_coefficients: + rows: 1 + cols: 4 + data: [-0.0524237801608, -0.00545225097947, -0.00275082473186, 0.00117687962781] + diff --git a/cfg/domi_cam1.yaml b/cfg/domi_cam1.yaml new file mode 100644 index 00000000..8be9b5cb --- /dev/null +++ b/cfg/domi_cam1.yaml @@ -0,0 +1,13 @@ +image_width: 752 +image_height: 480 +camera_name: cam1 +camera_matrix: + rows: 3 + cols: 3 + data: [399.348057011, 0.0, 386.463445149, 0.0, 398.349131629, 249.821188712, 0.0, 0.0, 1.0] +distortion_model: equidistant +distortion_coefficients: + rows: 1 + cols: 4 + data: [-0.0577566077926, -0.0163210986871, 0.0108510337124, -0.00372778210309] + diff --git a/cfg/domi_cam2.yaml b/cfg/domi_cam2.yaml new file mode 100644 index 00000000..f5d5f166 --- /dev/null +++ b/cfg/domi_cam2.yaml @@ -0,0 +1,13 @@ +image_width: 752 +image_height: 480 +camera_name: cam2 +camera_matrix: + rows: 3 + cols: 3 + data: [401.647215983, 0.0, 364.57630651, 0.0, 400.20768313, 237.330914431, 0.0, 0.0, 1.0] +distortion_model: equidistant +distortion_coefficients: + rows: 1 + cols: 4 + data: [-0.0645622778177, -0.0139883244162, 0.0160581204973, -0.00758677666188] + diff --git a/cfg/domi_cam3.yaml b/cfg/domi_cam3.yaml new file mode 100644 index 00000000..104dfaa6 --- /dev/null +++ b/cfg/domi_cam3.yaml @@ -0,0 +1,13 @@ +image_width: 752 +image_height: 480 +camera_name: cam3 +camera_matrix: + rows: 3 + cols: 3 + data: [398.9944782, 0.0, 364.41599021, 0.0, 398.080578743, 275.477777432, 0.0, 0.0, 1.0] +distortion_model: equidistant +distortion_coefficients: + rows: 1 + cols: 4 + data: [-0.0560625387897, -0.014037695708, 0.00638405850267, -0.00151939229656] + diff --git a/cfg/domi_cam4.yaml b/cfg/domi_cam4.yaml new file mode 100644 index 00000000..a084e054 --- /dev/null +++ b/cfg/domi_cam4.yaml @@ -0,0 +1,13 @@ +image_width: 752 +image_height: 480 +camera_name: cam4 +camera_matrix: + rows: 3 + cols: 3 + data: [400.103070384, 0.0, 371.400642253, 0.0, 399.068157312, 248.047178007, 0.0, 0.0, 1.0] +distortion_model: equidistant +distortion_coefficients: + rows: 1 + cols: 4 + data: [-0.0586905828624, -0.00419400201291, -0.00142056808843, 0.000320459725526] + diff --git a/cfg/euroc_cam0.yaml b/cfg/euroc_cam0.yaml index 193db731..a29033fa 100644 --- a/cfg/euroc_cam0.yaml +++ b/cfg/euroc_cam0.yaml @@ -12,26 +12,3 @@ distortion_coefficients: cols: 5 data: [-0.28340811, 0.07395907, 0.00019359, 1.76187114e-05, 0.0] -###### Original Calibration File ###### -## General sensor definitions. -#sensor_type: camera -#comment: VI-Sensor cam0 (MT9M034) -# -## Sensor extrinsics wrt. the body-frame. -#T_BS: -# cols: 4 -# rows: 4 -# data: [0.0148655429818, -0.999880929698, 0.00414029679422, -0.0216401454975, -# 0.999557249008, 0.0149672133247, 0.025715529948, -0.064676986768, -# -0.0257744366974, 0.00375618835797, 0.999660727178, 0.00981073058949, -# 0.0, 0.0, 0.0, 1.0] -# -## Camera specific definitions. -#rate_hz: 20 -#resolution: [752, 480] -#camera_model: pinhole -#intrinsics: [458.654, 457.296, 367.215, 248.375] #fu, fv, cu, cv -#distortion_model: radial-tangential -#distortion_coefficients: [-0.28340811, 0.07395907, 0.00019359, 1.76187114e-05] - - diff --git a/cfg/euroc_cam0_new.yaml b/cfg/euroc_cam0_new.yaml new file mode 100644 index 00000000..5eff8c8f --- /dev/null +++ b/cfg/euroc_cam0_new.yaml @@ -0,0 +1,13 @@ +###### Camera Calibration File For Cam 0 of Euroc Datasets ###### +image_width: 752 +image_height: 480 +camera_name: cam0 +camera_matrix: + rows: 3 + cols: 3 + data: [458.654, 0.0, 367.215, 0.0, 457.296, 248.375, 0.0, 0.0, 1.0] +distortion_model: plumb_bob +distortion_coefficients: + rows: 1 + cols: 5 +data: [-0.28340811, 0.07395907, 0.00019359, 1.76187114e-05, 0.0] diff --git a/cfg/imu.yaml b/cfg/imu.yaml new file mode 100644 index 00000000..2cfed98f --- /dev/null +++ b/cfg/imu.yaml @@ -0,0 +1,10 @@ +#Accelerometers +accelerometer_noise_density: 1.86e-03 #Noise density (continuous-time) +accelerometer_random_walk: 4.33e-04 #Bias random walk + +#Gyroscopes +gyroscope_noise_density: 1.87e-04 #Noise density (continuous-time) +gyroscope_random_walk: 2.66e-05 #Bias random walk + +rostopic: /mavros/imu/data #the IMU ROS topic +update_rate: 200.0 #Hz (for discretization of the values above) \ No newline at end of file diff --git a/cfg/rovio.info b/cfg/rovio.info index f237fe5d..ec11a378 100644 --- a/cfg/rovio.info +++ b/cfg/rovio.info @@ -8,14 +8,14 @@ Common } Camera0 { - CalibrationFile ; Camera-Calibration file for intrinsics - qCM_x 0.00666398307551; X-entry of IMU to Camera quaterion (Hamilton) - qCM_y -0.0079168224269; Y-entry of IMU to Camera quaterion (Hamilton) - qCM_z -0.701985972528; Z-entry of IMU to Camera quaterion (Hamilton) - qCM_w 0.712115587266; W-entry of IMU to Camera quaterion (Hamilton) - MrMC_x -0.0111674199187; X-entry of IMU to Camera vector (expressed in IMU CF) [m] - MrMC_y -0.0574640920022; Y-entry of IMU to Camera vector (expressed in IMU CF) [m] - MrMC_z 0.0207586947896; Z-entry of IMU to Camera vector (expressed in IMU CF) [m] + CalibrationFile ; Camera-Calibration file for intrinsics + qCM_x 0.498529507038; X-entry of IMU to Camera quaterion (Hamilton) + qCM_y -0.496012173235; Y-entry of IMU to Camera quaterion (Hamilton) + qCM_z 0.511762063883; Z-entry of IMU to Camera quaterion (Hamilton) + qCM_w 0.493497562897; W-entry of IMU to Camera quaterion (Hamilton) + MrMC_x 0.000218693879012; X-entry of IMU to Camera vector (expressed in IMU CF) [m] + MrMC_y -0.00011610537335; Y-entry of IMU to Camera vector (expressed in IMU CF) [m] + MrMC_z -0.000499672135095; Z-entry of IMU to Camera vector (expressed in IMU CF) [m] } Camera1 { @@ -146,31 +146,33 @@ Prediction { PredictionNoise { - pos_0 1e-4; X-covariance parameter of position prediction [m^2/s] - pos_1 1e-4; Y-covariance parameter of position prediction [m^2/s] - pos_2 1e-4; Z-covariance parameter of position prediction [m^2/s] - vel_0 4e-6; X-covariance parameter of velocity prediction [m^2/s^3] - vel_1 4e-6; Y-covariance parameter of velocity prediction [m^2/s^3] - vel_2 4e-6; Z-covariance parameter of velocity prediction [m^2/s^3] - acb_0 1e-8; X-covariance parameter of accelerometer bias prediction [m^2/s^5] - acb_1 1e-8; Y-covariance parameter of accelerometer bias prediction [m^2/s^5] - acb_2 1e-8; Z-covariance parameter of accelerometer bias prediction [m^2/s^5] - gyb_0 3.8e-7; X-covariance parameter of gyroscope bias prediction [rad^2/s^3] - gyb_1 3.8e-7; Y-covariance parameter of gyroscope bias prediction [rad^2/s^3] - gyb_2 3.8e-7; Z-covariance parameter of gyroscope bias prediction [rad^2/s^3] - vep 1e-8; Covariance parameter of linear extrinsics prediction [m^2/s] - att_0 7.6e-7; X-Covariance parameter of attitude prediction [rad^2/s] - att_1 7.6e-7; Y-Covariance parameter of attitude prediction [rad^2/s] - att_2 7.6e-7; Z-Covariance parameter of attitude prediction [rad^2/s] - vea 1e-8; Covariance parameter of rotational extrinsics prediction [rad^2/s] - dep 0.0001; Covariance parameter of distance prediction [m^2/s] - nor 0.00001; Covariance parameter of bearing vector prediction [rad^2/s] + pos_0 1e-4; X-covariance parameter of position prediction [m^2/s] + pos_1 1e-4; Y-covariance parameter of position prediction [m^2/s] + pos_2 1e-4; Z-covariance parameter of position prediction [m^2/s] + vel_0 4e-5; 4e-6 X-covariance parameter of velocity prediction [m^2/s^3] + vel_1 4e-5; Y-covariance parameter of velocity prediction [m^2/s^3] + vel_2 4e-5; Z-covariance parameter of velocity prediction [m^2/s^3] + acb_0 9e-6; X-covariance parameter of accelerometer bias prediction [m^2/s^5] + acb_1 9e-6; Y-covariance parameter of accelerometer bias prediction [m^2/s^5] + acb_2 9e-6; Z-covariance parameter of accelerometer bias prediction [m^2/s^5] + gyb_0 2e-8; X-covariance parameter of gyroscope bias prediction [rad^2/s^3] + gyb_1 2e-8; Y-covariance parameter of gyroscope bias prediction [rad^2/s^3] + gyb_2 2e-8; Z-covariance parameter of gyroscope bias prediction [rad^2/s^3] + vep 1e-6; Covariance parameter of linear extrinsics prediction [m^2/s] + att_0 7.6e-6; 7.6e-7 X-Covariance parameter of attitude prediction [rad^2/s] + att_1 7.6e-6; Y-Covariance parameter of attitude prediction [rad^2/s] + att_2 7.6e-6; Z-Covariance parameter of attitude prediction [rad^2/s] + vea 1e-6; Covariance parameter of rotational extrinsics prediction [rad^2/s] + dep 0.0001; Covariance parameter of distance prediction [m^2/s] + nor 0.00001; Covariance parameter of bearing vector prediction [rad^2/s] + + } MotionDetection { - inertialMotionRorTh 0.1; Treshold on rotational rate for motion detection [rad/s] - inertialMotionAccTh 0.5; Treshold on acceleration for motion detection [m/s^2] - } + inertialMotionRorTh 0.1; Treshold on rotational rate for motion detection [rad/s] + inertialMotionAccTh 0.5; Treshold on acceleration for motion detection [m/s^2] + } } PoseUpdate { @@ -202,14 +204,14 @@ PoseUpdate qVM_x 0; X-entry of quaterion representing IMU to reference body coordinate frame of pose measurement (Hamilton) qVM_y 0; Y-entry of quaterion representing IMU to reference body coordinate frame of pose measurement (Hamilton) qVM_z 0; Z-entry of quaterion representing IMU to reference body coordinate frame of pose measurement (Hamilton) - qVM_w 1; W-entry of quaterion representing IMU to reference body coordinate frame of pose measurement (Hamilton) + qVM_w -1; W-entry of quaterion representing IMU to reference body coordinate frame of pose measurement (Hamilton) MrMV_x 0; X-entry of vector representing IMU to reference body coordinate frame of pose measurement MrMV_y 0; Y-entry of vector representing IMU to reference body coordinate frame of pose measurement MrMV_z 0; Z-entry of vector representing IMU to reference body coordinate frame of pose measurement qWI_x 0; X-entry of quaterion representing World to reference inertial coordinate frame of pose measurement (Hamilton) qWI_y 0; Y-entry of quaterion representing World to reference inertial coordinate frame of pose measurement (Hamilton) qWI_z 0; Z-entry of quaterion representing World to reference inertial coordinate frame of pose measurement (Hamilton) - qWI_w 1; W-entry of quaterion representing World to reference inertial coordinate frame of pose measurement (Hamilton) + qWI_w -1; W-entry of quaterion representing World to reference inertial coordinate frame of pose measurement (Hamilton) IrIW_x 0; X-entry of vector representing World to reference inertial coordinate frame of pose measurement IrIW_y 0; Y-entry of vector representing World to reference inertial coordinate frame of pose measurement IrIW_z 0; Z-entry of vector representing World to reference inertial coordinate frame of pose measurement @@ -227,4 +229,4 @@ VelocityUpdate qAM_y 0 qAM_z 0 qAM_w 1 -} +} \ No newline at end of file diff --git a/cfg/rovio_domi.info b/cfg/rovio_domi.info new file mode 100644 index 00000000..c1911b41 --- /dev/null +++ b/cfg/rovio_domi.info @@ -0,0 +1,269 @@ +; You can partially override parameter set in this file by creating your own subset of parameter in a separate info-file and include it with: +; #include "/home/user/workspace/rovio/cfg/rovio_custom.info" +Common +{ + doVECalibration false; Should the camera-IMU extrinsics be calibrated online + depthType 1; Type of depth parametrization (0: normal, 1: inverse depth, 2: log, 3: hyperbolic) + verbose false; Is the verbose active + healthCheck false; +} +Camera0 +{ + CalibrationFile ; Camera-Calibration file for intrinsics + qCM_x -0.0141053692505; X-entry of IMU to Camera quaterion (Hamilton) + qCM_y -0.00445598914788; Y-entry of IMU to Camera quaterion (Hamilton) + qCM_z 0.736220553648; Z-entry of IMU to Camera quaterion (Hamilton) + qCM_w 0.676579987219; W-entry of IMU to Camera quaterion (Hamilton) + MrMC_x -0.013725561541; X-entry of IMU to Camera vector (expressed in IMU CF) [m] + MrMC_y 0.0458722860308; Y-entry of IMU to Camera vector (expressed in IMU CF) [m] + MrMC_z 0.00949435362177; Z-entry of IMU to Camera vector (expressed in IMU CF) [m] +} +Camera2 +{ + CalibrationFile ; Camera-Calibration file for intrinsics + qCM_x -0.717540995787; X-entry of IMU to Camera quaterion (Hamilton) + qCM_y -0.00571832829522; Y-entry of IMU to Camera quaterion (Hamilton) + qCM_z 0.0123040970923; Z-entry of IMU to Camera quaterion (Hamilton) + qCM_w 0.696384110446; W-entry of IMU to Camera quaterion (Hamilton) + MrMC_x 0.0323515084872; X-entry of IMU to Camera vector (expressed in IMU CF) [m] + MrMC_y -0.0480190261945; Y-entry of IMU to Camera vector (expressed in IMU CF) [m] + MrMC_z -0.0324906232515; Z-entry of IMU to Camera vector (expressed in IMU CF) [m] +} +Camera3 +{ + CalibrationFile ; Camera-Calibration file for intrinsics + qCM_x 0.498284808832; X-entry of IMU to Camera quaterion (Hamilton) + qCM_y -0.500933590992; Y-entry of IMU to Camera quaterion (Hamilton) + qCM_z 0.506194778687; Z-entry of IMU to Camera quaterion (Hamilton) + qCM_w 0.494514542489; W-entry of IMU to Camera quaterion (Hamilton) + MrMC_x 0.0266940973582; X-entry of IMU to Camera vector (expressed in IMU CF) [m] + MrMC_y 0.0446798994558; Y-entry of IMU to Camera vector (expressed in IMU CF) [m] + MrMC_z -0.00861710075576; Z-entry of IMU to Camera vector (expressed in IMU CF) [m] +} +Camera4 +{ + CalibrationFile ; Camera-Calibration file for intrinsics + qCM_x 0.490198388917; X-entry of IMU to Camera quaterion (Hamilton) + qCM_y 0.5116843721; Y-entry of IMU to Camera quaterion (Hamilton) + qCM_z -0.490599785917; Z-entry of IMU to Camera quaterion (Hamilton) + qCM_w 0.50714543566; W-entry of IMU to Camera quaterion (Hamilton) + MrMC_x -0.064799845427; X-entry of IMU to Camera vector (expressed in IMU CF) [m] + MrMC_y 0.042329395001; Y-entry of IMU to Camera vector (expressed in IMU CF) [m] + MrMC_z -0.00623084134628; Z-entry of IMU to Camera vector (expressed in IMU CF) [m] +} +Camera1 +{ + CalibrationFile ; Camera-Calibration file for intrinsics + qCM_x -0.00953751922809; X-entry of IMU to Camera quaterion (Hamilton) + qCM_y -0.703628160853; Y-entry of IMU to Camera quaterion (Hamilton) + qCM_z -0.710497111443; Z-entry of IMU to Camera quaterion (Hamilton) + qCM_w 0.00320961269892; W-entry of IMU to Camera quaterion (Hamilton) + MrMC_x 0.0341624938784; X-entry of IMU to Camera vector (expressed in IMU CF) [m] + MrMC_y 0.0489333125579; Y-entry of IMU to Camera vector (expressed in IMU CF) [m] + MrMC_z -0.0250547932613; Z-entry of IMU to Camera vector (expressed in IMU CF) [m] +} + +Init +{ + State + { + pos_0 0; X-entry of initial position (world to IMU in world) [m] + pos_1 0; Y-entry of initial position (world to IMU in world) [m] + pos_2 0; Z-entry of initial position (world to IMU in world) [m] + vel_0 0; X-entry of initial velocity (robocentric, IMU) [m/s] + vel_1 0; Y-entry of initial velocity (robocentric, IMU) [m/s] + vel_2 0; Z-entry of initial velocity (robocentric, IMU) [m/s] + acb_0 0; X-entry of accelerometer bias [m/s^2] + acb_1 0; Y-entry of accelerometer bias [m/s^2] + acb_2 0; Z-entry of accelerometer bias [m/s^2] + gyb_0 0; X-entry of gyroscope bias [rad/s] + gyb_1 0; Y-entry of gyroscope bias [rad/s] + gyb_2 0; Z-entry of gyroscope bias [rad/s] + att_x 0; X-entry of initial attitude (IMU to world, Hamilton) + att_y 0; Y-entry of initial attitude (IMU to world, Hamilton) + att_z 0; Z-entry of initial attitude (IMU to world, Hamilton) + att_w 1; W-entry of initial attitude (IMU to world, Hamilton) + } + Covariance + { + pos_0 0.0001; X-Covariance of initial position [m^2] + pos_1 0.0001; Y-Covariance of initial position [m^2] + pos_2 0.0001; Z-Covariance of initial position [m^2] + vel_0 1.0; X-Covariance of initial velocity [m^2/s^2] + vel_1 1.0; Y-Covariance of initial velocity [m^2/s^2] + vel_2 1.0; Z-Covariance of initial velocity [m^2/s^2] + acb_0 4e-4; X-Covariance of initial accelerometer bias [m^2/s^4] + acb_1 4e-4; Y-Covariance of initial accelerometer bias [m^2/s^4] + acb_2 4e-4; Z-Covariance of initial accelerometer bias [m^2/s^4] + gyb_0 3e-4; X-Covariance of initial gyroscope bias [rad^2/s^2] + gyb_1 3e-4; Y-Covariance of initial gyroscope bias [rad^2/s^2] + gyb_2 3e-4; Z-Covariance of initial gyroscope bias [rad^2/s^2] + vep 0.0001; Covariance of initial linear camera-IMU extrinsics, same for all entries [m^2] + att_0 0.1; X-Covariance of initial attitude [rad^2] + att_1 0.1; Y-Covariance of initial attitude [rad^2] + att_2 0.1; Z-Covariance of initial attitude [rad^2] + vea 0.01; Covariance of initial rotational camera-IMU extrinsics, same for all entries [rad^2] + } +} +ImgUpdate +{ + updateVecNormTermination 1e-4; + maxNumIteration 20; + doPatchWarping true; Should the patches be warped + doFrameVisualisation true; Should the frames be visualized + showCandidates false; Should the possible feature candidates be displayed + visualizePatches false; Should the patches be visualized + useDirectMethod true; Should the EKF-innovation be based on direct intensity error (o.w. reprojection error) + startLevel 2; Highest patch level which is being employed (must be smaller than the hardcoded template parameter) + endLevel 1; Lowest patch level which is being employed + nDetectionBuckets 100; Number of discretization buckets used during the candidates selection + MahalanobisTh 9.21; Mahalanobis treshold for the update, 5.8858356 + UpdateNoise + { + pix 2; Covariance used for the reprojection error, 1/focal_length is roughly 1 pixel std [rad] (~1/f ~ 1/400^2 = 1/160000) + int 400; Covariance used for the photometric error [intensity^2] + } + initCovFeature_0 0.5; Covariance for the initial distance (Relative to initialization depth [m^2/m^2]) + initCovFeature_1 1e-5; Covariance for the initial bearing vector, x-component [rad^2] + initCovFeature_2 1e-5; Covariance for the initial bearing vector, y-component [rad^2] + initDepth 0.5; Initial value for the initial distance parameter + startDetectionTh 0.8; Threshold for detecting new features (min: 0, max: 1) + scoreDetectionExponent 0.25; Exponent used for weighting the distance between candidates + penaltyDistance 100; Maximal distance which gets penalized during bucketing [pix] + zeroDistancePenalty 100; Penalty for zero distance (max: nDetectionBuckets) + statLocalQualityRange 10; Number of frames for local quality evaluation + statLocalVisibilityRange 100; Number of frames for local visibility evaluation + statMinGlobalQualityRange 100; Minimum number of frames for obtaining maximal global quality + trackingUpperBound 0.9; Threshold for local quality for min overall global quality + trackingLowerBound 0.8; Threshold for local quality for max overall global quality + minTrackedAndFreeFeatures 0.75; Minimum of amount of feature which are either tracked or free + removalFactor 1.1; Factor for enforcing feature removal if not enough free + minRelativeSTScore 0.75; Minimum relative ST-score for extracting new feature patch + minAbsoluteSTScore 5.0; Minimum absolute ST-score for extracting new feature patch + minTimeBetweenPatchUpdate 1.0; Minimum time between new multilevel patch extrection [s] + fastDetectionThreshold 5; Fast corner detector treshold while adding new feature + alignConvergencePixelRange 10; Assumed convergence range for image alignment (gets scaled with the level) [pixels] + alignCoverageRatio 2; How many sigma of the uncertainty should be covered in the adaptive alignement + alignMaxUniSample 1; Maximal number of alignment seeds on one side -> total number of sample = 2n+1. Carefull can get very costly if diverging! + patchRejectionTh 50.0; If the average itensity error is larger than this the feauture is rejected [intensity], if smaller 0 the no check is performed + alignmentHuberNormThreshold 10; Intensity error threshold for Huber norm (enabled if > 0) + alignmentGaussianWeightingSigma -1; Width of Gaussian which is used for pixel error weighting (enabled if > 0) + alignmentGradientExponent 0.0; Exponent used for gradient based weighting of residuals + useIntensityOffsetForAlignment true; Should an intensity offset between the patches be considered + useIntensitySqewForAlignment true; Should an intensity sqew between the patches be considered + removeNegativeFeatureAfterUpdate true; Should feature with negative distance get removed + maxUncertaintyToDepthRatioForDepthInitialization 0.3; If set to 0.0 the depth is initialized with the standard value provided above, otherwise ROVIO attempts to figure out a median depth in each frame + useCrossCameraMeasurements false; Should cross measurements between frame be used. Might be turned of in calibration phase. + doStereoInitialization false; Should a stereo match be used for feature initialization. + noiseGainForOffCamera 10.0; Factor added on update noise if not main camera + discriminativeSamplingDistance 0.02; Sampling distance for checking discriminativity of patch (if <= 0.0 no check is performed). + discriminativeSamplingGain 1.1; Gain for threshold above which the samples must lie (if <= 1.0 the patchRejectionTh is used). + MotionDetection + { + isEnabled 0; Is the motion detection enabled + rateOfMovingFeaturesTh 0.5; Amount of feature with motion for overall motion detection + pixelCoordinateMotionTh 1.0; Threshold for motion detection for patched [pixels] + minFeatureCountForNoMotionDetection 5; Min feature count in frame for motion detection + } + ZeroVelocityUpdate + { + UpdateNoise + { + vel_0 0.01; X-Covariance of zero velocity update [m^2/s^2] + vel_1 0.01; Y-Covariance of zero velocity update [m^2/s^2] + vel_2 0.01; Z-Covariance of zero velocity update [m^2/s^2] + } + MahalanobisTh0 7.689997599999999; Mahalanobid distance for zero velocity updates + minNoMotionTime 1.0; Min duration of no-motion + isEnabled 0; Should zero velocity update be applied, only works if MotionDetection.isEnabled is true + } + + addGlobalBest false; Should the best features of all cameras be added (true) or the best of each camera (false) + histogramEqualize false; +} +Prediction +{ + PredictionNoise + { + pos_0 1e-4; X-covariance parameter of position prediction [m^2/s] + pos_1 1e-4; Y-covariance parameter of position prediction [m^2/s] + pos_2 1e-4; Z-covariance parameter of position prediction [m^2/s] + vel_0 4e-6; X-covariance parameter of velocity prediction [m^2/s^3] + vel_1 4e-6; Y-covariance parameter of velocity prediction [m^2/s^3] + vel_2 4e-6; Z-covariance parameter of velocity prediction [m^2/s^3] + acb_0 1e-8; X-covariance parameter of accelerometer bias prediction [m^2/s^5] + acb_1 1e-8; Y-covariance parameter of accelerometer bias prediction [m^2/s^5] + acb_2 1e-8; Z-covariance parameter of accelerometer bias prediction [m^2/s^5] + gyb_0 3.8e-7; X-covariance parameter of gyroscope bias prediction [rad^2/s^3] + gyb_1 3.8e-7; Y-covariance parameter of gyroscope bias prediction [rad^2/s^3] + gyb_2 3.8e-7; Z-covariance parameter of gyroscope bias prediction [rad^2/s^3] + vep 1e-8; Covariance parameter of linear extrinsics prediction [m^2/s] + att_0 7.6e-7; X-Covariance parameter of attitude prediction [rad^2/s] + att_1 7.6e-7; Y-Covariance parameter of attitude prediction [rad^2/s] + att_2 7.6e-7; Z-Covariance parameter of attitude prediction [rad^2/s] + vea 1e-8; Covariance parameter of rotational extrinsics prediction [rad^2/s] + dep 0.0001; Covariance parameter of distance prediction [m^2/s] + nor 0.00001; Covariance parameter of bearing vector prediction [rad^2/s] + } + MotionDetection + { + inertialMotionRorTh 0.1; Treshold on rotational rate for motion detection [rad/s] + inertialMotionAccTh 0.5; Treshold on acceleration for motion detection [m/s^2] + } +} +PoseUpdate +{ + UpdateNoise + { + pos_0 0.01; X-Covariance of linear pose measurement update [m^2] + pos_1 0.01; Y-Covariance of linear pose measurement update [m^2] + pos_2 0.01; Z-Covariance of linear pose measurement update [m^2] + att_0 0.01; X-Covariance of rotational pose measurement update [rad^2] + att_1 0.01; Y-Covariance of rotational pose measurement update [rad^2] + att_2 0.01; Z-Covariance of rotational pose measurement update [rad^2] + } + init_cov_IrIW 1; Covariance of initial pose between inertial frames, linear part [m^2] + init_cov_qWI 1; Covariance of initial pose between inertial frames, rotational part [rad^2] + init_cov_MrMV 1; Covariance of initial pose between inertial frames, linear part [m^2] + init_cov_qVM 1; Covariance of initial pose between inertial frames, rotational part [rad^2] + pre_cov_IrIW 1e-4; Covariance parameter of pose between inertial frames, linear part [m^2/s] + pre_cov_qWI 1e-4; Covariance parameter of pose between inertial frames, rotational part [rad^2/s] + pre_cov_MrMV 1e-4; Covariance parameter of pose between inertial frames, linear part [m^2/s] + pre_cov_qVM 1e-4; Covariance parameter of pose between inertial frames, rotational part [rad^2/s] + MahalanobisTh0 12.6511204; Mahalonobis distance treshold of pose measurement + doVisualization false; Should the measured poses be vizualized + enablePosition true; Should the linear part be used during the update + enableAttitude true; Should the rotation part be used during the update (e.g. set to fals eif feeding GPS-measurement) + noFeedbackToRovio true; By default the pose update is only used for aligning coordinate frame. Set to false if ROVIO should benefit frome the posed measurements. + doInertialAlignmentAtStart true; Should the transformation between I and W be explicitly computed and set with the first pose measurement. + timeOffset 0.0; Time offset added to the pose measurement timestamps + useOdometryCov false; Should the UpdateNoise position covariance be scaled using the covariance in the Odometry message + qVM_x 0; X-entry of quaterion representing IMU to reference body coordinate frame of pose measurement (Hamilton) + qVM_y 0; Y-entry of quaterion representing IMU to reference body coordinate frame of pose measurement (Hamilton) + qVM_z 0; Z-entry of quaterion representing IMU to reference body coordinate frame of pose measurement (Hamilton) + qVM_w 1; W-entry of quaterion representing IMU to reference body coordinate frame of pose measurement (Hamilton) + MrMV_x 0; X-entry of vector representing IMU to reference body coordinate frame of pose measurement + MrMV_y 0; Y-entry of vector representing IMU to reference body coordinate frame of pose measurement + MrMV_z 0; Z-entry of vector representing IMU to reference body coordinate frame of pose measurement + qWI_x 0; X-entry of quaterion representing World to reference inertial coordinate frame of pose measurement (Hamilton) + qWI_y 0; Y-entry of quaterion representing World to reference inertial coordinate frame of pose measurement (Hamilton) + qWI_z 0; Z-entry of quaterion representing World to reference inertial coordinate frame of pose measurement (Hamilton) + qWI_w 1; W-entry of quaterion representing World to reference inertial coordinate frame of pose measurement (Hamilton) + IrIW_x 0; X-entry of vector representing World to reference inertial coordinate frame of pose measurement + IrIW_y 0; Y-entry of vector representing World to reference inertial coordinate frame of pose measurement + IrIW_z 0; Z-entry of vector representing World to reference inertial coordinate frame of pose measurement +} +VelocityUpdate +{ + UpdateNoise + { + vel_0 0.0001 + vel_1 0.0001 + vel_2 0.0001 + } + MahalanobisTh0 7.689997599999999 + qAM_x 0 + qAM_y 0 + qAM_z 0 + qAM_w 1 +} diff --git a/cfg/rovio_domi.info_new b/cfg/rovio_domi.info_new new file mode 100644 index 00000000..b06399be --- /dev/null +++ b/cfg/rovio_domi.info_new @@ -0,0 +1,234 @@ +Common +{ + doVECalibration true + verbose false + depthType 1 +} +PoseUpdate +{ + doVisualization true +} +Init +{ + Covariance + { + pos_0 1 + pos_1 1 + pos_2 1 + vel_0 1 + vel_1 1 + vel_2 1 + acb_0 1 + acb_1 1 + acb_2 1 + gyb_0 1 + gyb_1 1 + gyb_2 1 + att_0 1 + att_1 1 + att_2 1 + vep 1 + vea 1 + } + State + { + att_x 0 + att_y 0 + att_z 0 + att_w 1 + gyb_0 0 + gyb_1 0 + gyb_2 0 + acb_0 0 + acb_1 0 + acb_2 0 + vel_0 0 + vel_1 0 + vel_2 0 + pos_0 0 + pos_1 0 + pos_2 0 + } +} +Camera0 +{ + qCM_x 0 + qCM_y 0 + qCM_z 0 + qCM_w 1 + MrMC_x 0 + MrMC_y 0 + MrMC_z 0 + CalibrationFile "" +} +Camera1 +{ + qCM_x 0 + qCM_y 0 + qCM_z 0 + qCM_w 1 + MrMC_x 0 + MrMC_y 0 + MrMC_z 0 + CalibrationFile "" +} +Camera2 +{ + qCM_x 0 + qCM_y 0 + qCM_z 0 + qCM_w 1 + MrMC_x 0 + MrMC_y 0 + MrMC_z 0 + CalibrationFile "" +} +VelocityUpdate +{ + UpdateNoise + { + vel_0 0.0001 + vel_1 0.0001 + vel_2 0.0001 + } + MahalanobisTh0 7.689997599999999 + qAM_x 0 + qAM_y 0 + qAM_z 0 + qAM_w 1 +} +PoseUpdate +{ + enablePosition true + enableAttitude true + noFeedbackToRovio true + doInertialAlignmentAtStart true + useOdometryCov false + UpdateNoise + { + pos_0 0 + pos_1 0 + pos_2 0 + att_0 0 + att_1 0 + att_2 0 + } + MahalanobisTh0 12.6511204 + qVM_x 0 + qVM_y 0 + qVM_z 0 + qVM_w 1 + MrMV_x 0 + MrMV_y 0 + MrMV_z 0 + qWI_x 0 + qWI_y 0 + qWI_z 0 + qWI_w 1 + IrIW_x 0 + IrIW_y 0 + IrIW_z 0 + timeOffset 0 +} +ImgUpdate +{ + doPatchWarping false + MotionDetection + { + isEnabled false + minFeatureCountForNoMotionDetection 5 + rateOfMovingFeaturesTh 0.5 + pixelCoordinateMotionTh 1 + } + useDirectMethod true + doFrameVisualisation true + visualizePatches false + removeNegativeFeatureAfterUpdate true + useCrossCameraMeasurements true + doStereoInitialization true + useIntensityOffsetForAlignment true + useIntensitySqewForAlignment true + statLocalQualityRange 10 + statLocalVisibilityRange 100 + statMinGlobalQualityRange 100 + maxNumIteration 10 + startLevel 3 + endLevel 1 + nDetectionBuckets 100 + fastDetectionThreshold 10 + alignMaxUniSample 5 + updateVecNormTermination 9.9999999999999995e-07 + MahalanobisTh 5.8858356000000001 + initCovFeature_0 1 + initCovFeature_1 1 + initCovFeature_2 1 + initDepth 0.5 + startDetectionTh 0.90000000000000002 + scoreDetectionExponent 0.5 + penaltyDistance 20 + zeroDistancePenalty 100 + trackingUpperBound 0.90000000000000002 + trackingLowerBound 0.10000000000000001 + minTrackedAndFreeFeatures 0.5 + minRelativeSTScore 0.20000000000000001 + minAbsoluteSTScore 0.20000000000000001 + minTimeBetweenPatchUpdate 1 + removalFactor 1.1000000000000001 + patchRejectionTh 10 + maxUncertaintyToDepthRatioForDepthInitialization 0.29999999999999999 + UpdateNoise + { + pix 2 + int 10000 + } + noiseGainForOffCamera 4 + alignConvergencePixelRange 1 + alignCoverageRatio 2 + alignmentHuberNormThreshold 0 + alignmentGaussianWeightingSigma 2 + alignmentGradientExponent 0 + discriminativeSamplingDistance 0 + discriminativeSamplingGain 0 + ZeroVelocityUpdate + { + isEnabled false + UpdateNoise + { + vel_0 0.0001 + vel_1 0.0001 + vel_2 0.0001 + } + minNoMotionTime 1 + MahalanobisTh0 7.689997599999999 + } +} +Prediction +{ + PredictionNoise + { + pos_0 0.0001 + pos_1 0.0001 + pos_2 0.0001 + vel_0 0.0001 + vel_1 0.0001 + vel_2 0.0001 + acb_0 0.0001 + acb_1 0.0001 + acb_2 0.0001 + gyb_0 0.0001 + gyb_1 0.0001 + gyb_2 0.0001 + att_0 0.0001 + att_1 0.0001 + att_2 0.0001 + vep 0.0001 + vea 0.0001 + nor 0.0001 + dep 0.0001 + } + MotionDetection + { + inertialMotionRorTh 0.10000000000000001 + inertialMotionAccTh 0.10000000000000001 + } +} diff --git a/cfg/rovio_down.info b/cfg/rovio_down.info new file mode 100644 index 00000000..c97b141d --- /dev/null +++ b/cfg/rovio_down.info @@ -0,0 +1,232 @@ +; You can partially override parameter set in this file by creating your own subset of parameter in a separate info-file and include it with: +; #include "/home/user/workspace/rovio/cfg/rovio_custom.info" +Common +{ + doVECalibration true; Should the camera-IMU extrinsics be calibrated online + depthType 1; Type of depth parametrization (0: normal, 1: inverse depth, 2: log, 3: hyperbolic) + verbose false; Is the verbose active +} +Camera0 +{ + CalibrationFile ; Camera-Calibration file for intrinsics + qCM_x 0.708709126204; X-entry of IMU to Camera quaterion (Hamilton) + qCM_y -0.704791804906; Y-entry of IMU to Camera quaterion (Hamilton) + qCM_z 0.00387651358845; Z-entry of IMU to Camera quaterion (Hamilton) + qCM_w 0.031382460299; W-entry of IMU to Camera quaterion (Hamilton) + MrMC_x -0.054286735468; X-entry of IMU to Camera vector (expressed in IMU CF) [m] + MrMC_y 0.0755568214257; Y-entry of IMU to Camera vector (expressed in IMU CF) [m] + MrMC_z 0.0951657050294; Z-entry of IMU to Camera vector (expressed in IMU CF) [m] +} +Camera1 +{ + CalibrationFile ; Camera-Calibration file for intrinsics + qCM_x 0.00151329637706; X-entry of IMU to Camera quaterion (Hamilton) + qCM_y -0.0123329249764; Y-entry of IMU to Camera quaterion (Hamilton) + qCM_z -0.702657352863; Z-entry of IMU to Camera quaterion (Hamilton) + qCM_w 0.711419885414; W-entry of IMU to Camera quaterion (Hamilton) + MrMC_x -0.0091929160801; X-entry of IMU to Camera vector (expressed in IMU CF) [m] + MrMC_y 0.0540646643676; Y-entry of IMU to Camera vector (expressed in IMU CF) [m] + MrMC_z 0.0190387660614; Z-entry of IMU to Camera vector (expressed in IMU CF) [m] +} +Init +{ + State + { + pos_0 0; X-entry of initial position (world to IMU in world) [m] + pos_1 0; Y-entry of initial position (world to IMU in world) [m] + pos_2 0; Z-entry of initial position (world to IMU in world) [m] + vel_0 0; X-entry of initial velocity (robocentric, IMU) [m/s] + vel_1 0; Y-entry of initial velocity (robocentric, IMU) [m/s] + vel_2 0; Z-entry of initial velocity (robocentric, IMU) [m/s] + acb_0 0; X-entry of accelerometer bias [m/s^2] + acb_1 0; Y-entry of accelerometer bias [m/s^2] + acb_2 0; Z-entry of accelerometer bias [m/s^2] + gyb_0 0; X-entry of gyroscope bias [rad/s] + gyb_1 0; Y-entry of gyroscope bias [rad/s] + gyb_2 0; Z-entry of gyroscope bias [rad/s] + att_x 0; X-entry of initial attitude (IMU to world, Hamilton) + att_y 0; Y-entry of initial attitude (IMU to world, Hamilton) + att_z 0; Z-entry of initial attitude (IMU to world, Hamilton) + att_w 1; W-entry of initial attitude (IMU to world, Hamilton) + } + Covariance + { + pos_0 0.0001; X-Covariance of initial position [m^2] + pos_1 0.0001; Y-Covariance of initial position [m^2] + pos_2 0.0001; Z-Covariance of initial position [m^2] + vel_0 1.0; X-Covariance of initial velocity [m^2/s^2] + vel_1 1.0; Y-Covariance of initial velocity [m^2/s^2] + vel_2 1.0; Z-Covariance of initial velocity [m^2/s^2] + acb_0 4e-4; X-Covariance of initial accelerometer bias [m^2/s^4] + acb_1 4e-4; Y-Covariance of initial accelerometer bias [m^2/s^4] + acb_2 4e-4; Z-Covariance of initial accelerometer bias [m^2/s^4] + gyb_0 3e-4; X-Covariance of initial gyroscope bias [rad^2/s^2] + gyb_1 3e-4; Y-Covariance of initial gyroscope bias [rad^2/s^2] + gyb_2 3e-4; Z-Covariance of initial gyroscope bias [rad^2/s^2] + vep 0.0001; Covariance of initial linear camera-IMU extrinsics, same for all entries [m^2] + att_0 0.1; X-Covariance of initial attitude [rad^2] + att_1 0.1; Y-Covariance of initial attitude [rad^2] + att_2 0.1; Z-Covariance of initial attitude [rad^2] + vea 0.01; Covariance of initial rotational camera-IMU extrinsics, same for all entries [rad^2] + } +} +ImgUpdate +{ + updateVecNormTermination 1e-4; + maxNumIteration 20; + doPatchWarping true; Should the patches be warped + doFrameVisualisation true; Should the frames be visualized + visualizePatches false; Should the patches be visualized + useDirectMethod true; Should the EKF-innovation be based on direct intensity error (o.w. reprojection error) + startLevel 2; Highest patch level which is being employed (must be smaller than the hardcoded template parameter) + endLevel 1; Lowest patch level which is being employed + nDetectionBuckets 100; Number of discretization buckets used during the candidates selection + MahalanobisTh 9.21; Mahalanobis treshold for the update, 5.8858356 + UpdateNoise + { + pix 2; Covariance used for the reprojection error, 1/focal_length is roughly 1 pixel std [rad] (~1/f ~ 1/400^2 = 1/160000) + int 400; Covariance used for the photometric error [intensity^2] + } + initCovFeature_0 0.5; Covariance for the initial distance (Relative to initialization depth [m^2/m^2]) + initCovFeature_1 1e-5; Covariance for the initial bearing vector, x-component [rad^2] + initCovFeature_2 1e-5; Covariance for the initial bearing vector, y-component [rad^2] + initDepth 0.5; Initial value for the initial distance parameter + startDetectionTh 0.8; Threshold for detecting new features (min: 0, max: 1) + scoreDetectionExponent 0.25; Exponent used for weighting the distance between candidates + penaltyDistance 100; Maximal distance which gets penalized during bucketing [pix] + zeroDistancePenalty 100; Penalty for zero distance (max: nDetectionBuckets) + statLocalQualityRange 10; Number of frames for local quality evaluation + statLocalVisibilityRange 100; Number of frames for local visibility evaluation + statMinGlobalQualityRange 100; Minimum number of frames for obtaining maximal global quality + trackingUpperBound 0.9; Threshold for local quality for min overall global quality + trackingLowerBound 0.8; Threshold for local quality for max overall global quality + minTrackedAndFreeFeatures 0.75; Minimum of amount of feature which are either tracked or free + removalFactor 1.1; Factor for enforcing feature removal if not enough free + minRelativeSTScore 0.75; Minimum relative ST-score for extracting new feature patch + minAbsoluteSTScore 5.0; Minimum absolute ST-score for extracting new feature patch + minTimeBetweenPatchUpdate 1.0; Minimum time between new multilevel patch extrection [s] + fastDetectionThreshold 5; Fast corner detector treshold while adding new feature + alignConvergencePixelRange 10; Assumed convergence range for image alignment (gets scaled with the level) [pixels] + alignCoverageRatio 2; How many sigma of the uncertainty should be covered in the adaptive alignement + alignMaxUniSample 1; Maximal number of alignment seeds on one side -> total number of sample = 2n+1. Carefull can get very costly if diverging! + patchRejectionTh 50.0; If the average itensity error is larger than this the feauture is rejected [intensity], if smaller 0 the no check is performed + alignmentHuberNormThreshold 10; Intensity error threshold for Huber norm (enabled if > 0) + alignmentGaussianWeightingSigma -1; Width of Gaussian which is used for pixel error weighting (enabled if > 0) + alignmentGradientExponent 0.0; Exponent used for gradient based weighting of residuals + useIntensityOffsetForAlignment true; Should an intensity offset between the patches be considered + useIntensitySqewForAlignment true; Should an intensity sqew between the patches be considered + removeNegativeFeatureAfterUpdate true; Should feature with negative distance get removed + maxUncertaintyToDepthRatioForDepthInitialization 0.3; If set to 0.0 the depth is initialized with the standard value provided above, otherwise ROVIO attempts to figure out a median depth in each frame + useCrossCameraMeasurements true; Should cross measurements between frame be used. Might be turned of in calibration phase. + doStereoInitialization true; Should a stereo match be used for feature initialization. + noiseGainForOffCamera 10.0; Factor added on update noise if not main camera + discriminativeSamplingDistance 0.02; Sampling distance for checking discriminativity of patch (if <= 0.0 no check is performed). + discriminativeSamplingGain 1.1; Gain for threshold above which the samples must lie (if <= 1.0 the patchRejectionTh is used). + MotionDetection + { + isEnabled 0; Is the motion detection enabled + rateOfMovingFeaturesTh 0.5; Amount of feature with motion for overall motion detection + pixelCoordinateMotionTh 1.0; Threshold for motion detection for patched [pixels] + minFeatureCountForNoMotionDetection 5; Min feature count in frame for motion detection + } + ZeroVelocityUpdate + { + UpdateNoise + { + vel_0 0.01; X-Covariance of zero velocity update [m^2/s^2] + vel_1 0.01; Y-Covariance of zero velocity update [m^2/s^2] + vel_2 0.01; Z-Covariance of zero velocity update [m^2/s^2] + } + MahalanobisTh0 7.689997599999999; Mahalanobid distance for zero velocity updates + minNoMotionTime 1.0; Min duration of no-motion + isEnabled 0; Should zero velocity update be applied, only works if MotionDetection.isEnabled is true + } +} +Prediction +{ + PredictionNoise + { + pos_0 1e-4; X-covariance parameter of position prediction [m^2/s] + pos_1 1e-4; Y-covariance parameter of position prediction [m^2/s] + pos_2 1e-4; Z-covariance parameter of position prediction [m^2/s] + vel_0 4e-5; 4e-6 X-covariance parameter of velocity prediction [m^2/s^3] + vel_1 4e-5; Y-covariance parameter of velocity prediction [m^2/s^3] + vel_2 4e-5; Z-covariance parameter of velocity prediction [m^2/s^3] + acb_0 9e-6; X-covariance parameter of accelerometer bias prediction [m^2/s^5] + acb_1 9e-6; Y-covariance parameter of accelerometer bias prediction [m^2/s^5] + acb_2 9e-6; Z-covariance parameter of accelerometer bias prediction [m^2/s^5] + gyb_0 2e-8; X-covariance parameter of gyroscope bias prediction [rad^2/s^3] + gyb_1 2e-8; Y-covariance parameter of gyroscope bias prediction [rad^2/s^3] + gyb_2 2e-8; Z-covariance parameter of gyroscope bias prediction [rad^2/s^3] + vep 1e-6; Covariance parameter of linear extrinsics prediction [m^2/s] + att_0 7.6e-6; 7.6e-7 X-Covariance parameter of attitude prediction [rad^2/s] + att_1 7.6e-6; Y-Covariance parameter of attitude prediction [rad^2/s] + att_2 7.6e-6; Z-Covariance parameter of attitude prediction [rad^2/s] + vea 1e-6; Covariance parameter of rotational extrinsics prediction [rad^2/s] + dep 0.0001; Covariance parameter of distance prediction [m^2/s] + nor 0.00001; Covariance parameter of bearing vector prediction [rad^2/s] + + + } + MotionDetection + { + inertialMotionRorTh 0.1; Treshold on rotational rate for motion detection [rad/s] + inertialMotionAccTh 0.5; Treshold on acceleration for motion detection [m/s^2] + } +} +PoseUpdate +{ + UpdateNoise + { + pos_0 0.01; X-Covariance of linear pose measurement update [m^2] + pos_1 0.01; Y-Covariance of linear pose measurement update [m^2] + pos_2 0.01; Z-Covariance of linear pose measurement update [m^2] + att_0 0.01; X-Covariance of rotational pose measurement update [rad^2] + att_1 0.01; Y-Covariance of rotational pose measurement update [rad^2] + att_2 0.01; Z-Covariance of rotational pose measurement update [rad^2] + } + init_cov_IrIW 1; Covariance of initial pose between inertial frames, linear part [m^2] + init_cov_qWI 1; Covariance of initial pose between inertial frames, rotational part [rad^2] + init_cov_MrMV 1; Covariance of initial pose between inertial frames, linear part [m^2] + init_cov_qVM 1; Covariance of initial pose between inertial frames, rotational part [rad^2] + pre_cov_IrIW 1e-4; Covariance parameter of pose between inertial frames, linear part [m^2/s] + pre_cov_qWI 1e-4; Covariance parameter of pose between inertial frames, rotational part [rad^2/s] + pre_cov_MrMV 1e-4; Covariance parameter of pose between inertial frames, linear part [m^2/s] + pre_cov_qVM 1e-4; Covariance parameter of pose between inertial frames, rotational part [rad^2/s] + MahalanobisTh0 12.6511204; Mahalonobis distance treshold of pose measurement + doVisualization false; Should the measured poses be vizualized + enablePosition true; Should the linear part be used during the update + enableAttitude true; Should the rotation part be used during the update (e.g. set to fals eif feeding GPS-measurement) + noFeedbackToRovio true; By default the pose update is only used for aligning coordinate frame. Set to false if ROVIO should benefit frome the posed measurements. + doInertialAlignmentAtStart true; Should the transformation between I and W be explicitly computed and set with the first pose measurement. + timeOffset 0.0; Time offset added to the pose measurement timestamps + useOdometryCov false; Should the UpdateNoise position covariance be scaled using the covariance in the Odometry message + qVM_x 0; X-entry of quaterion representing IMU to reference body coordinate frame of pose measurement (Hamilton) + qVM_y 0; Y-entry of quaterion representing IMU to reference body coordinate frame of pose measurement (Hamilton) + qVM_z 0; Z-entry of quaterion representing IMU to reference body coordinate frame of pose measurement (Hamilton) + qVM_w -1; W-entry of quaterion representing IMU to reference body coordinate frame of pose measurement (Hamilton) + MrMV_x 0; X-entry of vector representing IMU to reference body coordinate frame of pose measurement + MrMV_y 0; Y-entry of vector representing IMU to reference body coordinate frame of pose measurement + MrMV_z 0; Z-entry of vector representing IMU to reference body coordinate frame of pose measurement + qWI_x 0; X-entry of quaterion representing World to reference inertial coordinate frame of pose measurement (Hamilton) + qWI_y 0; Y-entry of quaterion representing World to reference inertial coordinate frame of pose measurement (Hamilton) + qWI_z 0; Z-entry of quaterion representing World to reference inertial coordinate frame of pose measurement (Hamilton) + qWI_w -1; W-entry of quaterion representing World to reference inertial coordinate frame of pose measurement (Hamilton) + IrIW_x 0; X-entry of vector representing World to reference inertial coordinate frame of pose measurement + IrIW_y 0; Y-entry of vector representing World to reference inertial coordinate frame of pose measurement + IrIW_z 0; Z-entry of vector representing World to reference inertial coordinate frame of pose measurement +} +VelocityUpdate +{ + UpdateNoise + { + vel_0 0.0001 + vel_1 0.0001 + vel_2 0.0001 + } + MahalanobisTh0 7.689997599999999 + qAM_x 0 + qAM_y 0 + qAM_z 0 + qAM_w 1 +} \ No newline at end of file diff --git a/cfg/rovio_orig.info b/cfg/rovio_orig.info new file mode 100644 index 00000000..f237fe5d --- /dev/null +++ b/cfg/rovio_orig.info @@ -0,0 +1,230 @@ +; You can partially override parameter set in this file by creating your own subset of parameter in a separate info-file and include it with: +; #include "/home/user/workspace/rovio/cfg/rovio_custom.info" +Common +{ + doVECalibration true; Should the camera-IMU extrinsics be calibrated online + depthType 1; Type of depth parametrization (0: normal, 1: inverse depth, 2: log, 3: hyperbolic) + verbose false; Is the verbose active +} +Camera0 +{ + CalibrationFile ; Camera-Calibration file for intrinsics + qCM_x 0.00666398307551; X-entry of IMU to Camera quaterion (Hamilton) + qCM_y -0.0079168224269; Y-entry of IMU to Camera quaterion (Hamilton) + qCM_z -0.701985972528; Z-entry of IMU to Camera quaterion (Hamilton) + qCM_w 0.712115587266; W-entry of IMU to Camera quaterion (Hamilton) + MrMC_x -0.0111674199187; X-entry of IMU to Camera vector (expressed in IMU CF) [m] + MrMC_y -0.0574640920022; Y-entry of IMU to Camera vector (expressed in IMU CF) [m] + MrMC_z 0.0207586947896; Z-entry of IMU to Camera vector (expressed in IMU CF) [m] +} +Camera1 +{ + CalibrationFile ; Camera-Calibration file for intrinsics + qCM_x 0.00151329637706; X-entry of IMU to Camera quaterion (Hamilton) + qCM_y -0.0123329249764; Y-entry of IMU to Camera quaterion (Hamilton) + qCM_z -0.702657352863; Z-entry of IMU to Camera quaterion (Hamilton) + qCM_w 0.711419885414; W-entry of IMU to Camera quaterion (Hamilton) + MrMC_x -0.0091929160801; X-entry of IMU to Camera vector (expressed in IMU CF) [m] + MrMC_y 0.0540646643676; Y-entry of IMU to Camera vector (expressed in IMU CF) [m] + MrMC_z 0.0190387660614; Z-entry of IMU to Camera vector (expressed in IMU CF) [m] +} +Init +{ + State + { + pos_0 0; X-entry of initial position (world to IMU in world) [m] + pos_1 0; Y-entry of initial position (world to IMU in world) [m] + pos_2 0; Z-entry of initial position (world to IMU in world) [m] + vel_0 0; X-entry of initial velocity (robocentric, IMU) [m/s] + vel_1 0; Y-entry of initial velocity (robocentric, IMU) [m/s] + vel_2 0; Z-entry of initial velocity (robocentric, IMU) [m/s] + acb_0 0; X-entry of accelerometer bias [m/s^2] + acb_1 0; Y-entry of accelerometer bias [m/s^2] + acb_2 0; Z-entry of accelerometer bias [m/s^2] + gyb_0 0; X-entry of gyroscope bias [rad/s] + gyb_1 0; Y-entry of gyroscope bias [rad/s] + gyb_2 0; Z-entry of gyroscope bias [rad/s] + att_x 0; X-entry of initial attitude (IMU to world, Hamilton) + att_y 0; Y-entry of initial attitude (IMU to world, Hamilton) + att_z 0; Z-entry of initial attitude (IMU to world, Hamilton) + att_w 1; W-entry of initial attitude (IMU to world, Hamilton) + } + Covariance + { + pos_0 0.0001; X-Covariance of initial position [m^2] + pos_1 0.0001; Y-Covariance of initial position [m^2] + pos_2 0.0001; Z-Covariance of initial position [m^2] + vel_0 1.0; X-Covariance of initial velocity [m^2/s^2] + vel_1 1.0; Y-Covariance of initial velocity [m^2/s^2] + vel_2 1.0; Z-Covariance of initial velocity [m^2/s^2] + acb_0 4e-4; X-Covariance of initial accelerometer bias [m^2/s^4] + acb_1 4e-4; Y-Covariance of initial accelerometer bias [m^2/s^4] + acb_2 4e-4; Z-Covariance of initial accelerometer bias [m^2/s^4] + gyb_0 3e-4; X-Covariance of initial gyroscope bias [rad^2/s^2] + gyb_1 3e-4; Y-Covariance of initial gyroscope bias [rad^2/s^2] + gyb_2 3e-4; Z-Covariance of initial gyroscope bias [rad^2/s^2] + vep 0.0001; Covariance of initial linear camera-IMU extrinsics, same for all entries [m^2] + att_0 0.1; X-Covariance of initial attitude [rad^2] + att_1 0.1; Y-Covariance of initial attitude [rad^2] + att_2 0.1; Z-Covariance of initial attitude [rad^2] + vea 0.01; Covariance of initial rotational camera-IMU extrinsics, same for all entries [rad^2] + } +} +ImgUpdate +{ + updateVecNormTermination 1e-4; + maxNumIteration 20; + doPatchWarping true; Should the patches be warped + doFrameVisualisation true; Should the frames be visualized + visualizePatches false; Should the patches be visualized + useDirectMethod true; Should the EKF-innovation be based on direct intensity error (o.w. reprojection error) + startLevel 2; Highest patch level which is being employed (must be smaller than the hardcoded template parameter) + endLevel 1; Lowest patch level which is being employed + nDetectionBuckets 100; Number of discretization buckets used during the candidates selection + MahalanobisTh 9.21; Mahalanobis treshold for the update, 5.8858356 + UpdateNoise + { + pix 2; Covariance used for the reprojection error, 1/focal_length is roughly 1 pixel std [rad] (~1/f ~ 1/400^2 = 1/160000) + int 400; Covariance used for the photometric error [intensity^2] + } + initCovFeature_0 0.5; Covariance for the initial distance (Relative to initialization depth [m^2/m^2]) + initCovFeature_1 1e-5; Covariance for the initial bearing vector, x-component [rad^2] + initCovFeature_2 1e-5; Covariance for the initial bearing vector, y-component [rad^2] + initDepth 0.5; Initial value for the initial distance parameter + startDetectionTh 0.8; Threshold for detecting new features (min: 0, max: 1) + scoreDetectionExponent 0.25; Exponent used for weighting the distance between candidates + penaltyDistance 100; Maximal distance which gets penalized during bucketing [pix] + zeroDistancePenalty 100; Penalty for zero distance (max: nDetectionBuckets) + statLocalQualityRange 10; Number of frames for local quality evaluation + statLocalVisibilityRange 100; Number of frames for local visibility evaluation + statMinGlobalQualityRange 100; Minimum number of frames for obtaining maximal global quality + trackingUpperBound 0.9; Threshold for local quality for min overall global quality + trackingLowerBound 0.8; Threshold for local quality for max overall global quality + minTrackedAndFreeFeatures 0.75; Minimum of amount of feature which are either tracked or free + removalFactor 1.1; Factor for enforcing feature removal if not enough free + minRelativeSTScore 0.75; Minimum relative ST-score for extracting new feature patch + minAbsoluteSTScore 5.0; Minimum absolute ST-score for extracting new feature patch + minTimeBetweenPatchUpdate 1.0; Minimum time between new multilevel patch extrection [s] + fastDetectionThreshold 5; Fast corner detector treshold while adding new feature + alignConvergencePixelRange 10; Assumed convergence range for image alignment (gets scaled with the level) [pixels] + alignCoverageRatio 2; How many sigma of the uncertainty should be covered in the adaptive alignement + alignMaxUniSample 1; Maximal number of alignment seeds on one side -> total number of sample = 2n+1. Carefull can get very costly if diverging! + patchRejectionTh 50.0; If the average itensity error is larger than this the feauture is rejected [intensity], if smaller 0 the no check is performed + alignmentHuberNormThreshold 10; Intensity error threshold for Huber norm (enabled if > 0) + alignmentGaussianWeightingSigma -1; Width of Gaussian which is used for pixel error weighting (enabled if > 0) + alignmentGradientExponent 0.0; Exponent used for gradient based weighting of residuals + useIntensityOffsetForAlignment true; Should an intensity offset between the patches be considered + useIntensitySqewForAlignment true; Should an intensity sqew between the patches be considered + removeNegativeFeatureAfterUpdate true; Should feature with negative distance get removed + maxUncertaintyToDepthRatioForDepthInitialization 0.3; If set to 0.0 the depth is initialized with the standard value provided above, otherwise ROVIO attempts to figure out a median depth in each frame + useCrossCameraMeasurements true; Should cross measurements between frame be used. Might be turned of in calibration phase. + doStereoInitialization true; Should a stereo match be used for feature initialization. + noiseGainForOffCamera 10.0; Factor added on update noise if not main camera + discriminativeSamplingDistance 0.02; Sampling distance for checking discriminativity of patch (if <= 0.0 no check is performed). + discriminativeSamplingGain 1.1; Gain for threshold above which the samples must lie (if <= 1.0 the patchRejectionTh is used). + MotionDetection + { + isEnabled 0; Is the motion detection enabled + rateOfMovingFeaturesTh 0.5; Amount of feature with motion for overall motion detection + pixelCoordinateMotionTh 1.0; Threshold for motion detection for patched [pixels] + minFeatureCountForNoMotionDetection 5; Min feature count in frame for motion detection + } + ZeroVelocityUpdate + { + UpdateNoise + { + vel_0 0.01; X-Covariance of zero velocity update [m^2/s^2] + vel_1 0.01; Y-Covariance of zero velocity update [m^2/s^2] + vel_2 0.01; Z-Covariance of zero velocity update [m^2/s^2] + } + MahalanobisTh0 7.689997599999999; Mahalanobid distance for zero velocity updates + minNoMotionTime 1.0; Min duration of no-motion + isEnabled 0; Should zero velocity update be applied, only works if MotionDetection.isEnabled is true + } +} +Prediction +{ + PredictionNoise + { + pos_0 1e-4; X-covariance parameter of position prediction [m^2/s] + pos_1 1e-4; Y-covariance parameter of position prediction [m^2/s] + pos_2 1e-4; Z-covariance parameter of position prediction [m^2/s] + vel_0 4e-6; X-covariance parameter of velocity prediction [m^2/s^3] + vel_1 4e-6; Y-covariance parameter of velocity prediction [m^2/s^3] + vel_2 4e-6; Z-covariance parameter of velocity prediction [m^2/s^3] + acb_0 1e-8; X-covariance parameter of accelerometer bias prediction [m^2/s^5] + acb_1 1e-8; Y-covariance parameter of accelerometer bias prediction [m^2/s^5] + acb_2 1e-8; Z-covariance parameter of accelerometer bias prediction [m^2/s^5] + gyb_0 3.8e-7; X-covariance parameter of gyroscope bias prediction [rad^2/s^3] + gyb_1 3.8e-7; Y-covariance parameter of gyroscope bias prediction [rad^2/s^3] + gyb_2 3.8e-7; Z-covariance parameter of gyroscope bias prediction [rad^2/s^3] + vep 1e-8; Covariance parameter of linear extrinsics prediction [m^2/s] + att_0 7.6e-7; X-Covariance parameter of attitude prediction [rad^2/s] + att_1 7.6e-7; Y-Covariance parameter of attitude prediction [rad^2/s] + att_2 7.6e-7; Z-Covariance parameter of attitude prediction [rad^2/s] + vea 1e-8; Covariance parameter of rotational extrinsics prediction [rad^2/s] + dep 0.0001; Covariance parameter of distance prediction [m^2/s] + nor 0.00001; Covariance parameter of bearing vector prediction [rad^2/s] + } + MotionDetection + { + inertialMotionRorTh 0.1; Treshold on rotational rate for motion detection [rad/s] + inertialMotionAccTh 0.5; Treshold on acceleration for motion detection [m/s^2] + } +} +PoseUpdate +{ + UpdateNoise + { + pos_0 0.01; X-Covariance of linear pose measurement update [m^2] + pos_1 0.01; Y-Covariance of linear pose measurement update [m^2] + pos_2 0.01; Z-Covariance of linear pose measurement update [m^2] + att_0 0.01; X-Covariance of rotational pose measurement update [rad^2] + att_1 0.01; Y-Covariance of rotational pose measurement update [rad^2] + att_2 0.01; Z-Covariance of rotational pose measurement update [rad^2] + } + init_cov_IrIW 1; Covariance of initial pose between inertial frames, linear part [m^2] + init_cov_qWI 1; Covariance of initial pose between inertial frames, rotational part [rad^2] + init_cov_MrMV 1; Covariance of initial pose between inertial frames, linear part [m^2] + init_cov_qVM 1; Covariance of initial pose between inertial frames, rotational part [rad^2] + pre_cov_IrIW 1e-4; Covariance parameter of pose between inertial frames, linear part [m^2/s] + pre_cov_qWI 1e-4; Covariance parameter of pose between inertial frames, rotational part [rad^2/s] + pre_cov_MrMV 1e-4; Covariance parameter of pose between inertial frames, linear part [m^2/s] + pre_cov_qVM 1e-4; Covariance parameter of pose between inertial frames, rotational part [rad^2/s] + MahalanobisTh0 12.6511204; Mahalonobis distance treshold of pose measurement + doVisualization false; Should the measured poses be vizualized + enablePosition true; Should the linear part be used during the update + enableAttitude true; Should the rotation part be used during the update (e.g. set to fals eif feeding GPS-measurement) + noFeedbackToRovio true; By default the pose update is only used for aligning coordinate frame. Set to false if ROVIO should benefit frome the posed measurements. + doInertialAlignmentAtStart true; Should the transformation between I and W be explicitly computed and set with the first pose measurement. + timeOffset 0.0; Time offset added to the pose measurement timestamps + useOdometryCov false; Should the UpdateNoise position covariance be scaled using the covariance in the Odometry message + qVM_x 0; X-entry of quaterion representing IMU to reference body coordinate frame of pose measurement (Hamilton) + qVM_y 0; Y-entry of quaterion representing IMU to reference body coordinate frame of pose measurement (Hamilton) + qVM_z 0; Z-entry of quaterion representing IMU to reference body coordinate frame of pose measurement (Hamilton) + qVM_w 1; W-entry of quaterion representing IMU to reference body coordinate frame of pose measurement (Hamilton) + MrMV_x 0; X-entry of vector representing IMU to reference body coordinate frame of pose measurement + MrMV_y 0; Y-entry of vector representing IMU to reference body coordinate frame of pose measurement + MrMV_z 0; Z-entry of vector representing IMU to reference body coordinate frame of pose measurement + qWI_x 0; X-entry of quaterion representing World to reference inertial coordinate frame of pose measurement (Hamilton) + qWI_y 0; Y-entry of quaterion representing World to reference inertial coordinate frame of pose measurement (Hamilton) + qWI_z 0; Z-entry of quaterion representing World to reference inertial coordinate frame of pose measurement (Hamilton) + qWI_w 1; W-entry of quaterion representing World to reference inertial coordinate frame of pose measurement (Hamilton) + IrIW_x 0; X-entry of vector representing World to reference inertial coordinate frame of pose measurement + IrIW_y 0; Y-entry of vector representing World to reference inertial coordinate frame of pose measurement + IrIW_z 0; Z-entry of vector representing World to reference inertial coordinate frame of pose measurement +} +VelocityUpdate +{ + UpdateNoise + { + vel_0 0.0001 + vel_1 0.0001 + vel_2 0.0001 + } + MahalanobisTh0 7.689997599999999 + qAM_x 0 + qAM_y 0 + qAM_z 0 + qAM_w 1 +} diff --git a/include/rovio/FeatureManager.hpp b/include/rovio/FeatureManager.hpp index 2fe0917b..4c50b5fe 100644 --- a/include/rovio/FeatureManager.hpp +++ b/include/rovio/FeatureManager.hpp @@ -265,7 +265,7 @@ class FeatureSetManager{ * @param initTime - Current time (time at which the MultilevelPatchFeature%s are created from the candidates list). * @param l1 - Start pyramid level for the Shi-Tomasi Score computation of MultilevelPatchFeature%s extracted from the candidates list. * @param l2 - End pyramid level for the Shi-Tomasi Score computation of MultilevelPatchFeature%s extracted from the candidates list. - * @param maxAddedFeature - Maximal number of features which should be added to the mlpSet. + * @param maxAddedFeature - Maximal number of features which should be added to the mlpSet. * @param nDetectionBuckets - Number of buckets. * @param scoreDetectionExponent - Choose it between [0 1]. 1 : Candidate features are sorted linearly into the buckets, depending on their Shi-Tomasi score. * 0 : All candidate features are filled into the highest bucket. @@ -389,6 +389,175 @@ class FeatureSetManager{ return newFeatureIDs; } +//////////////////////////////////////////////////////////////////////////// FUNCTION FOR ADDING BEST GLOBAL FEATURES +//////////////////////////////////////////////////////////////////////////// added by Leonie Traffelet + + /** \brief Adds the best MultilevelPatchFeature%s from a candidates list to an existing MultilevelPatchSet of ALL CAMERAS. + * + * This function takes a given feature candidate list of all cameras and builds, in a first step, + * MultilevelPatchFeature%s out of it using a given image pyramid. For each MultilevelPatchFeature the + * corresponding Shi-Tomasi Score is computed. + * In a second step, the candidate MultilevelPatchFeature%s are sorted and + * placed into buckets of their detection camera, depending on their individual Shi-Tomasi Score. MultilevelPatchFeature%s in a high bucket + * (high bucket index) have higher Shi-Tomasi Scores than MultilevelPatchFeature%s which have been placed into a + * low bucket. + * In a third step, the MultilevelPatchFeature%s in the buckets are reordered, depending on their distance + * to already existing features in the given MultilevelPatchSet. A small distance to an existing feature is punished, + * by moving the concerned candidate MultilevelPatchFeature into a lower bucket. + * Finally the best MultilevelPatchFeatures of each camera bucket are compared to each other and the best ones are added to the existing MultilevelPatchSet. + * + * @param candidates[nCam] - Array containing the list of candidate feature coordinates per camera. + * @param pyr[nCam] - Image pyramids of all the cameras used to extract the MultilevelPatchFeature%s from the candidates list. + * @param initTime - Current time (time at which the MultilevelPatchFeature%s are created from the candidates list). + * @param l1 - Start pyramid level for the Shi-Tomasi Score computation of MultilevelPatchFeature%s extracted from the candidates list. + * @param l2 - End pyramid level for the Shi-Tomasi Score computation of MultilevelPatchFeature%s extracted from the candidates list. + * @param maxAddedFeature - Maximal number of features which should be added to the mlpSet. + * @param nDetectionBuckets - Number of buckets. + * @param scoreDetectionExponent - Choose it between [0 1]. 1 : Candidate features are sorted linearly into the buckets, depending on their Shi-Tomasi score. + * 0 : All candidate features are filled into the highest bucket. + * A small scoreDetectionExponent forces more candidate features into high buckets. + * @param penaltyDistance - If a candidate feature has a smaller distance to an existing feature in the mlpSet, it is punished (shifted in an lower bucket) dependent of its actual distance to the existing feature. + * @param zeroDistancePenalty - A candidate feature in a specific bucket is shifted zeroDistancePenalty-buckets back to a lower bucket if it has zero distance to an existing feature in the mlpSet. + * @param requireMax - Should the adding of maxAddedFeature be enforced? + * @param minScore - Shi-Tomasi Score threshold for the best (highest Shi-Tomasi Score) MultilevelPatchFeature extracted from the candidates list. + * If the best MultilevelPatchFeature has a Shi-Tomasi Score less than or equal this threshold, the function aborts and returns an empty map. + * + * @return an unordered_set, holding the indizes of the MultilevelPatchSet, at which the new MultilevelPatchFeature%s have been added (from the candidates list). + */ + + std::unordered_set addBestGlobalCandidates(const FeatureCoordinatesVec (&candidates)[nCam], const ImagePyramid (&pyr)[nCam], const double initTime, + const int l1, const int l2, const int maxAddedFeature, const int nDetectionBuckets, const double scoreDetectionExponent, + const double penaltyDistance, const double zeroDistancePenalty, const bool requireMax, const float minScore){ + std::unordered_set newFeatureIDs; + std::vector> multilevelPatches[nCam]; + std::vector>> buckets(nCam, std::vector>(nDetectionBuckets)); + + double d2; + double t2 = pow(penaltyDistance,2); + bool doDelete; + unsigned int newBucketID; + + for(int camID = 0;camID maxScore) maxScore = multilevelPatches[camID].back().s_; + } else { + multilevelPatches[camID].back().s_ = -1; + } + } + if(maxScore <= minScore){ + break; + } + + // Make buckets and fill based on score + float relScore; + for(int i=0;i 0.0){ + newBucketID = std::ceil((nDetectionBuckets-1)*(pow(relScore,static_cast(scoreDetectionExponent)))); + if(newBucketID>nDetectionBuckets-1) newBucketID = nDetectionBuckets-1; + buckets[camID][newBucketID].insert(i); + } + } + + // Move buckets based on current features + FeatureCoordinates featureCoordinates; + FeatureDistance featureDistance; + for(unsigned int i=0;itransformFeature(camID,*(features_[i].mpCoordinates_),*(features_[i].mpDistance_),featureCoordinates,featureDistance); + if(featureCoordinates.isInFront()){ + for (unsigned int bucketID = 1;bucketID < nDetectionBuckets;bucketID++) { + for (auto it_cand = buckets[camID][bucketID].begin();it_cand != buckets[camID][bucketID].end();) { + doDelete = false; + d2 = std::pow(featureCoordinates.get_c().x - candidates[camID][*it_cand].get_c().x,2) + std::pow(featureCoordinates.get_c().y - candidates[camID][*it_cand].get_c().y,2); // Squared distance between the existing feature and the candidate feature. + if(d2 > nfCandidates(2, std::vector(nCam)); + std::vector bestBucketID(nCam, nDetectionBuckets-1); + + + while(addedCount < maxAddedFeature && getValidCount() != nMax) { + // get best features in each camera + for (int camID = 0; camID < nCam; camID++){ + for (int bucketID = bestBucketID[camID];bucketID >= 0+static_cast(!requireMax);bucketID--) { + //std::cout << "in bucket for loop. camera: " << camID << " Bucket: " << bucketID << std::endl; + if(!buckets[camID][bucketID].empty()){ + nfCandidates[0][camID] = bucketID; + nfCandidates[1][camID] = *(buckets[camID][bucketID].begin()); + bestBucketID[camID] = bucketID; + break; + } + } + } + + // get best of these + const int nfCam = std::max_element(nfCandidates[1].begin(), nfCandidates[1].end()) - nfCandidates[1].begin(); + const int nfBucket = nfCandidates[0][nfCam]; + const int nf = nfCandidates[1][nfCam]; + + buckets[nfCam][nfBucket].erase(nf); + const int ind = makeNewFeature(nfCam); + features_[ind].mpCoordinates_->set_c(candidates[nfCam][nf].get_c()); + features_[ind].mpCoordinates_->camID_ = nfCam; + features_[ind].mpCoordinates_->set_warp_identity(); + features_[ind].mpCoordinates_->mpCamera_ = &mpMultiCamera_->cameras_[nfCam]; + *(features_[ind].mpMultilevelPatch_) = multilevelPatches[nfCam][nf]; + if(ind >= 0){ + newFeatureIDs.insert(ind); + } + addedCount++; + for (unsigned int bucketID2 = 1;bucketID2 <= nfBucket;bucketID2++) { + for (auto it_cand = buckets[nfCam][bucketID2].begin();it_cand != buckets[nfCam][bucketID2].end();) { + doDelete = false; + d2 = std::pow(candidates[nfCam][nf].get_c().x - candidates[nfCam][*it_cand].get_c().x,2) + std::pow(candidates[nfCam][nf].get_c().y - candidates[nfCam][*it_cand].get_c().y,2); + if(d2,int>& mp1, const std::tuple,int>&mp2){ return std::get<1>(mp1).s_ > std::get<1>(mp2).s_; } diff --git a/include/rovio/HealthMonitor.hpp b/include/rovio/HealthMonitor.hpp new file mode 100644 index 00000000..8b6f245f --- /dev/null +++ b/include/rovio/HealthMonitor.hpp @@ -0,0 +1,90 @@ +// This file was stolen straight out of maplab + +#ifndef ROVIO_HEALTH_MONITOR_H_ +#define ROVIO_HEALTH_MONITOR_H_ + +#include +#include + +#include "rovio/CoordinateTransform/RovioOutput.hpp" +#include "rovio/RovioFilter.hpp" + +namespace rovio { +class RovioHealthMonitor { + public: + EIGEN_MAKE_ALIGNED_OPERATOR_NEW + + RovioHealthMonitor() : num_subsequent_unhealthy_updates_(0) {} + + // Returns true if healthy; false if unhealthy and reset was triggered. + bool shouldResetEstimator(const std::vector& distance_covs_in, const StandardOutput& imu_output) { + float feature_distance_covariance_median = 0; + std::vector distance_covs = distance_covs_in; + if (!distance_covs.empty()) { + const size_t middle_index = distance_covs.size() / 2; + std::nth_element(distance_covs.begin(), + distance_covs.begin() + middle_index, + distance_covs.end()); + feature_distance_covariance_median = distance_covs[middle_index]; + } + const float BvB_norm = imu_output.BvB().norm(); + if ((BvB_norm > kVelocityToConsiderStatic) && ((BvB_norm > kUnhealthyVelocity) || + (feature_distance_covariance_median > kUnhealthyFeatureDistanceCov))) { + ++num_subsequent_unhealthy_updates_; + std::cout << "Estimator fault counter: " + << num_subsequent_unhealthy_updates_ << "/" + << kMaxSubsequentUnhealthyUpdates << ". Might reset soon."; + if (num_subsequent_unhealthy_updates_ > kMaxSubsequentUnhealthyUpdates) { + std::cout << "Will reset ROVIOLI. Velocity norm: " << BvB_norm + << " (limit: " << kUnhealthyVelocity + << "), median of feature distance covariances: " + << feature_distance_covariance_median + << " (limit: " << kUnhealthyFeatureDistanceCov << ")."; + return true; + } + } else { + if (feature_distance_covariance_median < kHealthyFeatureDistanceCov) { + if (std::abs(feature_distance_covariance_median - + last_safe_pose_.feature_distance_covariance_median) < + kHealthyFeatureDistanceCovIncrement) { + last_safe_pose_.failsafe_WrWB = imu_output.WrWB(); + last_safe_pose_.failsafe_qBW = imu_output.qBW(); + last_safe_pose_.feature_distance_covariance_median = feature_distance_covariance_median; + } + } + num_subsequent_unhealthy_updates_ = 0; + } + return false; + } + + Eigen::Vector3d failsafe_WrWB() { return last_safe_pose_.failsafe_WrWB; } + + kindr::RotationQuaternionPD failsafe_qBW() { + return last_safe_pose_.failsafe_qBW; + } + + private: + struct RovioFailsafePose { + RovioFailsafePose() + : failsafe_WrWB(Eigen::Vector3d::Zero()), feature_distance_covariance_median(0.0) { + failsafe_qBW.setIdentity(); + } + Eigen::Vector3d failsafe_WrWB; + kindr::RotationQuaternionPD failsafe_qBW; + float feature_distance_covariance_median; + }; + + RovioFailsafePose last_safe_pose_; + int num_subsequent_unhealthy_updates_; + + // The landmark covariance is not a good measure for divergence if we are static. + static constexpr float kVelocityToConsiderStatic = 0.1f; + static constexpr int kMaxSubsequentUnhealthyUpdates = 1; + static constexpr float kHealthyFeatureDistanceCov = 0.5f; + static constexpr float kHealthyFeatureDistanceCovIncrement = 0.3f; + static constexpr float kUnhealthyFeatureDistanceCov = 1.0f; + static constexpr float kUnhealthyVelocity = 5.0f; +}; + +} // namespace rovio + #endif // ROVIO_HEALTH_MONITOR_H_ \ No newline at end of file diff --git a/include/rovio/ImgUpdate.hpp b/include/rovio/ImgUpdate.hpp index 0f440925..76e2800b 100644 --- a/include/rovio/ImgUpdate.hpp +++ b/include/rovio/ImgUpdate.hpp @@ -209,8 +209,10 @@ ImgOutlierDetection,false>{ bool useDirectMethod_; /**,false>{ int alignMaxUniSample_; bool useCrossCameraMeasurements_; /**,false>{ mutable MultilevelPatch mlpTemp2_; mutable FeatureCoordinates alignedCoordinates_; mutable FeatureCoordinates tempCoordinates_; - mutable FeatureCoordinatesVec candidates_; + mutable FeatureCoordinatesVec candidates_[mtState::nCam_]; + //mutable std::vector candidates_; mutable cv::Point2f c_temp_; mutable Eigen::Matrix2d c_J_; mutable Eigen::Matrix2d A_red_; @@ -281,8 +286,10 @@ ImgOutlierDetection,false>{ zeroDistancePenalty_ = nDetectionBuckets_*1.0; useDirectMethod_ = true; doFrameVisualisation_ = true; + showCandidates_ = false; visualizePatches_ = false; verbose_ = false; + healthCheck_ = false; trackingUpperBound_ = 0.9; trackingLowerBound_ = 0.1; minTrackedAndFreeFeatures_ = 0.5; @@ -305,6 +312,8 @@ ImgOutlierDetection,false>{ alignMaxUniSample_ = 5; useCrossCameraMeasurements_ = true; doStereoInitialization_ = true; + addGlobalBest_ = false; + histogramEqualize_ = false; removalFactor_ = 1.1; alignmentGaussianWeightingSigma_ = 2.0; discriminativeSamplingDistance_ = 0.0; @@ -339,10 +348,13 @@ ImgOutlierDetection,false>{ boolRegister_.registerScalar("MotionDetection.isEnabled",doVisualMotionDetection_); boolRegister_.registerScalar("useDirectMethod",useDirectMethod_); boolRegister_.registerScalar("doFrameVisualisation",doFrameVisualisation_); + boolRegister_.registerScalar("showCandidates",showCandidates_); boolRegister_.registerScalar("visualizePatches",visualizePatches_); boolRegister_.registerScalar("removeNegativeFeatureAfterUpdate",removeNegativeFeatureAfterUpdate_); boolRegister_.registerScalar("useCrossCameraMeasurements",useCrossCameraMeasurements_); boolRegister_.registerScalar("doStereoInitialization",doStereoInitialization_); + boolRegister_.registerScalar("addGlobalBest",addGlobalBest_); + boolRegister_.registerScalar("histogramEqualize",histogramEqualize_); boolRegister_.registerScalar("useIntensityOffsetForAlignment",alignment_.useIntensityOffset_); boolRegister_.registerScalar("useIntensitySqewForAlignment",alignment_.useIntensitySqew_); doubleRegister_.removeScalarByVar(updnoiP_(0,0)); @@ -979,70 +991,109 @@ ImgOutlierDetection,false>{ } else { medianDepthParameters.fill(initDepth_); } + + if(verbose_) std::cout << "Adding keypoints" << std::endl; + + // Get Candidates for(int camID = 0;camID newSet = filterState.fsm_.addBestCandidates(candidates_,meas.aux().pyr_[camID],camID,filterState.t_, - endLevel_,startLevel_,(mtState::nMax_-filterState.fsm_.getValidCount())/(mtState::nCam_-camID),nDetectionBuckets_, scoreDetectionExponent_, - penaltyDistance_, zeroDistancePenalty_,false,minAbsoluteSTScore_); + std::unordered_set newSet = filterState.fsm_.addBestGlobalCandidates(candidates_,meas.aux().pyr_,filterState.t_, + endLevel_,startLevel_,(mtState::nMax_-filterState.fsm_.getValidCount()),nDetectionBuckets_, scoreDetectionExponent_, + penaltyDistance_, zeroDistancePenalty_,false,minAbsoluteSTScore_); const double t3 = (double) cv::getTickCount(); - if(verbose_) std::cout << "== Got " << filterState.fsm_.getValidCount() << " after adding " << newSet.size() << " features in camera " << camID << " (" << (t3-t2)/cv::getTickFrequency()*1000 << " ms)" << std::endl; + if(verbose_) std::cout << "== Got " << filterState.fsm_.getValidCount() << " after adding " << newSet.size() << " features in" << " " << (t3-t2)/cv::getTickFrequency()*1000 << " ms" << std::endl; for(auto it = newSet.begin();it != newSet.end();++it){ FeatureManager& f = filterState.fsm_.features_[*it]; f.mpStatistics_->resetStatistics(filterState.t_); - f.mpStatistics_->status_[camID] = TRACKED; + f.mpStatistics_->status_[f.mpCoordinates_->camID_] = TRACKED; f.mpStatistics_->lastPatchUpdate_ = filterState.t_; - f.mpDistance_->p_ = medianDepthParameters[camID]; + f.mpDistance_->p_ = medianDepthParameters[f.mpCoordinates_->camID_]; const float initRelDepthCovTemp_ = initCovFeature_(0,0); initCovFeature_(0,0) = initRelDepthCovTemp_*pow(f.mpDistance_->getParameterDerivative()*f.mpDistance_->getDistance(),2); filterState.resetFeatureCovariance(*it,initCovFeature_); initCovFeature_(0,0) = initRelDepthCovTemp_; if(doFrameVisualisation_){ - f.mpCoordinates_->drawPoint(filterState.img_[camID], cv::Scalar(255,0,0)); - f.mpCoordinates_->drawText(filterState.img_[camID],std::to_string(f.idx_),cv::Scalar(255,0,0)); + f.mpCoordinates_->drawPoint(filterState.img_[f.mpCoordinates_->camID_], cv::Scalar(255,255,0)); //changed color from blue to aquamarin + f.mpCoordinates_->drawText(filterState.img_[f.mpCoordinates_->camID_],std::to_string(f.idx_),cv::Scalar(255,255,0)); //changed color from blue to aquamarin } + } + } else { + for(int camID = 0;camID newSet = filterState.fsm_.addBestCandidates(candidates_[camID],meas.aux().pyr_[camID],camID,filterState.t_, + endLevel_,startLevel_,(mtState::nMax_-filterState.fsm_.getValidCount())/(mtState::nCam_-camID),nDetectionBuckets_, scoreDetectionExponent_, + penaltyDistance_, zeroDistancePenalty_,false,minAbsoluteSTScore_); + const double t3 = (double) cv::getTickCount(); + if(verbose_) std::cout << "== Got " << filterState.fsm_.getValidCount() << " after adding " << newSet.size() << " features in camera " << camID << " (" << (t3-t2)/cv::getTickFrequency()*1000 << " ms)" << std::endl; + for(auto it = newSet.begin();it != newSet.end();++it){ + FeatureManager& f = filterState.fsm_.features_[*it]; + f.mpStatistics_->resetStatistics(filterState.t_); + f.mpStatistics_->status_[camID] = TRACKED; + f.mpStatistics_->lastPatchUpdate_ = filterState.t_; + f.mpDistance_->p_ = medianDepthParameters[camID]; + const float initRelDepthCovTemp_ = initCovFeature_(0,0); + initCovFeature_(0,0) = initRelDepthCovTemp_*pow(f.mpDistance_->getParameterDerivative()*f.mpDistance_->getDistance(),2); + filterState.resetFeatureCovariance(*it,initCovFeature_); + initCovFeature_(0,0) = initRelDepthCovTemp_; + if(doFrameVisualisation_){ + f.mpCoordinates_->drawPoint(filterState.img_[camID], cv::Scalar(255,255,0)); //changed color from blue to aquamarin + f.mpCoordinates_->drawText(filterState.img_[camID],std::to_string(f.idx_),cv::Scalar(255,255,0)); //changed color from blue to aquamarin + } - if(mtState::nCam_>1 && doStereoInitialization_){ - const int otherCam = (camID+1)%mtState::nCam_; - transformFeatureOutputCT_.setFeatureID(*it); - transformFeatureOutputCT_.setOutputCameraID(otherCam); - transformFeatureOutputCT_.transformState(filterState.state_,featureOutput_); - if(alignment_.align2DAdaptive(alignedCoordinates_,meas.aux().pyr_[otherCam],*f.mpMultilevelPatch_,featureOutput_.c(),startLevel_,endLevel_, - alignConvergencePixelRange_,alignCoverageRatio_,alignMaxUniSample_)){ - bool valid = mlpTemp1_.isMultilevelPatchInFrame(meas.aux().pyr_[otherCam],alignedCoordinates_,startLevel_,false); - if(valid && patchRejectionTh_ >= 0){ - mlpTemp1_.extractMultilevelPatchFromImage(meas.aux().pyr_[otherCam],alignedCoordinates_,startLevel_,false); - const float avgError = mlpTemp1_.computeAverageDifference(*f.mpMultilevelPatch_,endLevel_,startLevel_); - if(avgError > patchRejectionTh_){ - valid = false; + if(mtState::nCam_>1 && doStereoInitialization_){ + const int otherCam = (camID+1)%mtState::nCam_; + transformFeatureOutputCT_.setFeatureID(*it); + transformFeatureOutputCT_.setOutputCameraID(otherCam); + transformFeatureOutputCT_.transformState(filterState.state_,featureOutput_); + if(alignment_.align2DAdaptive(alignedCoordinates_,meas.aux().pyr_[otherCam],*f.mpMultilevelPatch_,featureOutput_.c(),startLevel_,endLevel_, + alignConvergencePixelRange_,alignCoverageRatio_,alignMaxUniSample_)){ + bool valid = mlpTemp1_.isMultilevelPatchInFrame(meas.aux().pyr_[otherCam],alignedCoordinates_,startLevel_,false); + if(valid && patchRejectionTh_ >= 0){ + mlpTemp1_.extractMultilevelPatchFromImage(meas.aux().pyr_[otherCam],alignedCoordinates_,startLevel_,false); + const float avgError = mlpTemp1_.computeAverageDifference(*f.mpMultilevelPatch_,endLevel_,startLevel_); + if(avgError > patchRejectionTh_){ + valid = false; + } } - } - if(valid == true){ - if(doFrameVisualisation_){ - alignedCoordinates_.drawPoint(filterState.img_[otherCam], cv::Scalar(150,0,0)); - alignedCoordinates_.drawText(filterState.img_[otherCam],std::to_string(f.idx_),cv::Scalar(150,0,0)); - } - if(f.mpCoordinates_->getDepthFromTriangulation(alignedCoordinates_,state.qCM(otherCam).rotate(V3D(state.MrMC(camID)-state.MrMC(otherCam))),state.qCM(otherCam)*state.qCM(camID).inverted(), *f.mpDistance_, 0.01)){ - filterState.resetFeatureCovariance(*it,initCovFeature_); // TODO: improve + if(valid == true){ + if(doFrameVisualisation_){ + alignedCoordinates_.drawPoint(filterState.img_[otherCam], cv::Scalar(150,0,0)); + alignedCoordinates_.drawText(filterState.img_[otherCam],std::to_string(f.idx_),cv::Scalar(150,0,0)); + } + if(f.mpCoordinates_->getDepthFromTriangulation(alignedCoordinates_,state.qCM(otherCam).rotate(V3D(state.MrMC(camID)-state.MrMC(otherCam))),state.qCM(otherCam)*state.qCM(camID).inverted(), *f.mpDistance_, 0.01)){ + filterState.resetFeatureCovariance(*it,initCovFeature_); // TODO: improve + } + } else { + if(doFrameVisualisation_){ + alignedCoordinates_.drawPoint(filterState.img_[otherCam], cv::Scalar(0,0,150)); + alignedCoordinates_.drawText(filterState.img_[otherCam],std::to_string(f.idx_),cv::Scalar(0,0,150)); + } } } else { if(doFrameVisualisation_){ - alignedCoordinates_.drawPoint(filterState.img_[otherCam], cv::Scalar(0,0,150)); - alignedCoordinates_.drawText(filterState.img_[otherCam],std::to_string(f.idx_),cv::Scalar(0,0,150)); + alignedCoordinates_.drawPoint(filterState.img_[otherCam], cv::Scalar(0,150,0)); + alignedCoordinates_.drawText(filterState.img_[otherCam],std::to_string(f.idx_),cv::Scalar(0,150,0)); } } - } else { - if(doFrameVisualisation_){ - alignedCoordinates_.drawPoint(filterState.img_[otherCam], cv::Scalar(0,150,0)); - alignedCoordinates_.drawText(filterState.img_[otherCam],std::to_string(f.idx_),cv::Scalar(0,150,0)); - } } } } diff --git a/include/rovio/RovioFilter.hpp b/include/rovio/RovioFilter.hpp index d09d9241..74f1fec0 100644 --- a/include/rovio/RovioFilter.hpp +++ b/include/rovio/RovioFilter.hpp @@ -165,6 +165,7 @@ class RovioFilter:public LWF::FilterBase, std::get<0>(mUpdates_).outlierDetection_.setEnabledAll(true); std::get<1>(mUpdates_).outlierDetection_.setEnabledAll(true); boolRegister_.registerScalar("Common.verbose",std::get<0>(mUpdates_).verbose_); + boolRegister_.registerScalar("Common.healthCheck",std::get<0>(mUpdates_).healthCheck_); mPrediction_.doubleRegister_.removeScalarByStr("alpha"); mPrediction_.doubleRegister_.removeScalarByStr("beta"); mPrediction_.doubleRegister_.removeScalarByStr("kappa"); diff --git a/include/rovio/RovioNode.hpp b/include/rovio/RovioNode.hpp index 92c0030f..66f2571c 100644 --- a/include/rovio/RovioNode.hpp +++ b/include/rovio/RovioNode.hpp @@ -33,6 +33,8 @@ #include #include +#include + #include #include #include @@ -50,6 +52,7 @@ #include #include "rovio/RovioFilter.hpp" +#include "rovio/HealthMonitor.hpp" #include "rovio/CoordinateTransform/RovioOutput.hpp" #include "rovio/CoordinateTransform/FeatureOutput.hpp" #include "rovio/CoordinateTransform/FeatureOutputReadable.hpp" @@ -84,6 +87,8 @@ class RovioNode{ typedef typename mtVelocityUpdate::mtMeas mtVelocityMeas; mtVelocityMeas velocityUpdateMeas_; + RovioHealthMonitor healthMonitor_; + struct FilterInitializationState { FilterInitializationState() : WrWM_(V3D::Zero()), @@ -123,6 +128,7 @@ class RovioNode{ bool forceMarkersPublishing_; bool forcePatchPublishing_; bool gotFirstMessages_; + std::mutex m_filter_; // Nodes, Subscriber, Publishers @@ -131,6 +137,9 @@ class RovioNode{ ros::Subscriber subImu_; ros::Subscriber subImg0_; ros::Subscriber subImg1_; + ros::Subscriber subImg2_; + ros::Subscriber subImg3_; + ros::Subscriber subImg4_; ros::Subscriber subGroundtruth_; ros::Subscriber subGroundtruthOdometry_; ros::Subscriber subVelocity_; @@ -206,8 +215,12 @@ class RovioNode{ // Subscribe topics subImu_ = nh_.subscribe("imu0", 1000, &RovioNode::imuCallback,this); + // subImg0_ = nh_.subscribe("cam0/image_raw", 1000, std::bind(&RovioNode::imgCallback, std::placeholders::_1, 0),this); subImg0_ = nh_.subscribe("cam0/image_raw", 1000, &RovioNode::imgCallback0,this); subImg1_ = nh_.subscribe("cam1/image_raw", 1000, &RovioNode::imgCallback1,this); + subImg2_ = nh_.subscribe("cam2/image_raw", 1000, &RovioNode::imgCallback2,this); + subImg3_ = nh_.subscribe("cam3/image_raw", 1000, &RovioNode::imgCallback3,this); + subImg4_ = nh_.subscribe("cam4/image_raw", 1000, &RovioNode::imgCallback4,this); subGroundtruth_ = nh_.subscribe("pose", 1000, &RovioNode::groundtruthCallback,this); subGroundtruthOdometry_ = nh_.subscribe("odometry", 1000, &RovioNode::groundtruthOdometryCallback, this); subVelocity_ = nh_.subscribe("abss/twist", 1000, &RovioNode::velocityCallback,this); @@ -490,6 +503,36 @@ class RovioNode{ if(mtState::nCam_ > 1) imgCallback(img,1); } + /** \brief Image callback for the camera with ID 2 + * + * @param img - Image message. + * @todo generalize + */ + void imgCallback2(const sensor_msgs::ImageConstPtr & img) { + std::lock_guard lock(m_filter_); + if(mtState::nCam_ > 2) imgCallback(img,2); + } + + /** \brief Image callback for the camera with ID 3 + * + * @param img - Image message. + * @todo generalize + */ + void imgCallback3(const sensor_msgs::ImageConstPtr & img) { + std::lock_guard lock(m_filter_); + if(mtState::nCam_ > 3) imgCallback(img,3); + } + + /** \brief Image callback for the camera with ID 4 + * + * @param img - Image message. + * @todo generalize + */ + void imgCallback4(const sensor_msgs::ImageConstPtr & img) { + std::lock_guard lock(m_filter_); + if(mtState::nCam_ > 4) imgCallback(img,4); + } + /** \brief Image callback. Adds images (as update measurements) to the filter. * * @param img - Image message. @@ -506,12 +549,42 @@ class RovioNode{ } cv::Mat cv_img; cv_ptr->image.copyTo(cv_img); + + // Histogram equalization + if(mpImgUpdate_->histogramEqualize_){ + constexpr size_t hist_bins = 256; + cv::Mat hist; + std::vector img_vec = {cv_img}; + cv::calcHist(img_vec, {0}, cv::Mat(), hist, {hist_bins}, + {0, hist_bins-1}, false); + cv::Mat lut(1, hist_bins, CV_8UC1); + double sum = 0.0; + //prevents an image full of noise if in total darkness + float max_per_bin = cv_img.cols * cv_img.rows * 0.02; + float min_per_bin = cv_img.cols * cv_img.rows * 0.002; + float total_pixels = 0; + for(size_t i = 0; i < hist_bins; ++i){ + float& bin = hist.at(i); + if(bin > max_per_bin){ + bin = max_per_bin; + } else if(bin < min_per_bin){ + bin = min_per_bin; + } + total_pixels += bin; + } + for(size_t i = 0; i < hist_bins; ++i){ + sum += hist.at(i) / total_pixels; + lut.at(i) = (hist_bins-1)*sum; + } + cv::LUT(cv_img, lut, cv_img); + } + if(init_state_.isInitialized() && !cv_img.empty()){ double msgTime = img->header.stamp.toSec(); if(msgTime != imgUpdateMeas_.template get().imgTime_){ for(int i=0;i().isValidPyr_[i]){ - std::cout << " \033[31mFailed Synchronization of Camera Frames, t = " << msgTime << "\033[0m" << std::endl; + std::cout << " \033[31mFailed Synchronization of Camera Frames, t = " << msgTime << " " << camID << "\033[0m" << std::endl; } } imgUpdateMeas_.template get().reset(msgTime); @@ -664,7 +737,7 @@ class RovioNode{ // Obtain the save filter state. mtFilterState& filterState = mpFilter_->safe_; - mtState& state = mpFilter_->safe_.state_; + mtState& state = mpFilter_->safe_.state_; state.updateMultiCameraExtrinsics(&mpFilter_->multiCamera_); MXD& cov = mpFilter_->safe_.cov_; imuOutputCT_.transformState(state,imuOutput_); @@ -864,6 +937,8 @@ class RovioNode{ pubImuBias_.publish(imuBiasMsg_); } + std::vector featureDistanceCov; + // PointCloud message. if(pubPcl_.getNumSubscribers() > 0 || pubMarkers_.getNumSubscribers() > 0 || forcePclPublishing_ || forceMarkersPublishing_){ pclMsg_.header.seq = msgSeq_; @@ -906,6 +981,8 @@ class RovioNode{ featureOutputReadableCT_.transformState(featureOutput_,featureOutputReadable_); featureOutputReadableCT_.transformCovMat(featureOutput_,featureOutputCov_,featureOutputReadableCov_); + featureDistanceCov.push_back(static_cast(featureOutputReadableCov_(3, 3))); // for health checker + // Get landmark output landmarkOutputImuCT_.setFeatureID(i); landmarkOutputImuCT_.transformState(state,landmarkOutput_); @@ -1001,6 +1078,19 @@ class RovioNode{ pubPatch_.publish(patchMsg_); } gotFirstMessages_ = true; + + if(mpImgUpdate_->healthCheck_){ + if(healthMonitor_.shouldResetEstimator(featureDistanceCov, imuOutput_)) { + if(!init_state_.isInitialized()) { + std::cout << "Reinitioalization already triggered. Ignoring request..."; + return; + } + + init_state_.WrWM_ = healthMonitor_.failsafe_WrWB(); + init_state_.qMW_ = healthMonitor_.failsafe_qBW(); + init_state_.state_ = FilterInitializationState::State::WaitForInitExternalPose; + } + } } } } diff --git a/launch/rovio_domi.launch b/launch/rovio_domi.launch new file mode 100644 index 00000000..1f8636a6 --- /dev/null +++ b/launch/rovio_domi.launch @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/launch/rovio_node.launch b/launch/rovio_node.launch index c4bdd653..b7ecb5ed 100644 --- a/launch/rovio_node.launch +++ b/launch/rovio_node.launch @@ -1,8 +1,16 @@ + + + + + + - - - + + + + + diff --git a/launch/rovio_rosbag_node.launch b/launch/rovio_rosbag_node.launch deleted file mode 100644 index d5556c5f..00000000 --- a/launch/rovio_rosbag_node.launch +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - - - \ No newline at end of file diff --git a/src/FeatureCoordinates.cpp b/src/FeatureCoordinates.cpp index bf5238e5..5d963ae4 100644 --- a/src/FeatureCoordinates.cpp +++ b/src/FeatureCoordinates.cpp @@ -50,7 +50,7 @@ namespace rovio{ const cv::Point2f& FeatureCoordinates::get_c() const{ if(!com_c()){ - std::cout << " \033[31mERROR: No valid coordinate data!\033[0m" << std::endl; + std::cout << " \033[31mERROR: No valid coordinate data [1]!\033[0m" << std::endl; } return c_; } @@ -67,7 +67,7 @@ namespace rovio{ const LWF::NormalVectorElement& FeatureCoordinates::get_nor() const{ if(!com_nor()){ - std::cout << " \033[31mERROR: No valid coordinate data!\033[0m" << std::endl; + std::cout << " \033[31mERROR: No valid coordinate data [2]!\033[0m" << std::endl; } return nor_; } @@ -76,7 +76,7 @@ namespace rovio{ assert(mpCamera_ != nullptr); if(!mpCamera_->bearingToPixel(get_nor(),c_,matrix2dTemp_)){ matrix2dTemp_.setZero(); - std::cout << " \033[31mERROR: No valid coordinate data!\033[0m" << std::endl; + std::cout << " \033[31mERROR: No valid coordinate data [3]!\033[0m" << std::endl; } return matrix2dTemp_; }