瀏覽代碼

Change evaluations examples to draw the aligned trajectory in a pdf file

Richard Elvira 4 年之前
父節點
當前提交
c87a094e01
共有 5 個文件被更改,包括 366 次插入229 次删除
  1. 6 9
      euroc_eval_examples.sh
  2. 130 0
      evaluation/associate.py
  3. 0 219
      evaluation/evaluate_ate.py
  4. 229 0
      evaluation/evaluate_ate_scale.py
  5. 1 1
      tum_vi_eval_examples.sh

+ 6 - 9
euroc_eval_examples.sh

@@ -1,14 +1,12 @@
 #!/bin/bash
 pathDatasetEuroc='/Datasets/EuRoC' #Example, it is necesary to change it by the dataset path
 
-#------------------------------------
 # Single Session Example (Pure visual)
-
 echo "Launching MH01 with Stereo sensor"
 ./Examples/Stereo/stereo_euroc ./Vocabulary/ORBvoc.txt ./Examples/Stereo/EuRoC.yaml "$pathDatasetEuroc"/MH01 ./Examples/Stereo/EuRoC_TimeStamps/MH01.txt dataset-MH01_stereo
 echo "------------------------------------"
 echo "Evaluation of MH01 trajectory with Stereo sensor"
-python3 evaluation/evaluate_ate.py f_dataset-MH01_stereo.txt evaluation/Ground_truth/EuRoC_left_cam/MH01_GT.txt 
+python evaluation/evaluate_ate_scale.py evaluation/Ground_truth/EuRoC_left_cam/MH01_GT.txt f_dataset-MH01_stereo.txt --plot MH01_stereo.pdf
 
 
 
@@ -17,17 +15,16 @@ echo "Launching Machine Hall with Stereo sensor"
 ./Examples/Stereo/stereo_euroc ./Vocabulary/ORBvoc.txt ./Examples/Stereo/EuRoC.yaml "$pathDatasetEuroc"/MH01 ./Examples/Stereo/EuRoC_TimeStamps/MH01.txt "$pathDatasetEuroc"/MH02 ./Examples/Stereo/EuRoC_TimeStamps/MH02.txt "$pathDatasetEuroc"/MH03 ./Examples/Stereo/EuRoC_TimeStamps/MH03.txt "$pathDatasetEuroc"/MH04 ./Examples/Stereo/EuRoC_TimeStamps/MH04.txt "$pathDatasetEuroc"/MH05 ./Examples/Stereo/EuRoC_TimeStamps/MH05.txt dataset-MH01_to_MH05_stereo
 echo "------------------------------------"
 echo "Evaluation of MAchine Hall trajectory with Stereo sensor"
-python3 evaluation/evaluate_ate.py f_dataset-MH01_to_MH05_stereo.txt evaluation/Ground_truth/EuRoC_left_cam/MH_GT.txt 
-
-#------------------------------------
-# Single Session Example (Visual-Inertial)
+python evaluation/evaluate_ate_scale.py evaluation/Ground_truth/EuRoC_left_cam/MH_GT.txt f_dataset-MH01_to_MH05_stereo.txt --plot MH01_to_MH05_stereo.pdf
 
 
+# Single Session Example (Visual-Inertial)
 echo "Launching V102 with Monocular-Inertial sensor"
 ./Examples/Monocular-Inertial/mono_inertial_euroc ./Vocabulary/ORBvoc.txt ./Examples/Monocular-Inertial/EuRoC.yaml "$pathDatasetEuroc"/V102 ./Examples/Monocular-Inertial/EuRoC_TimeStamps/V102.txt dataset-V102_monoi
 echo "------------------------------------"
 echo "Evaluation of V102 trajectory with Monocular-Inertial sensor"
-python3 evaluation/evaluate_ate.py f_dataset-V102_monoi.txt "$pathDatasetEuroc"/V102/mav0/state_groundtruth_estimate0/data.csv
+python evaluation/evaluate_ate_scale.py "$pathDatasetEuroc"/V102/mav0/state_groundtruth_estimate0/data.csv f_dataset-V102_monoi.txt --plot V102_monoi.pdf
+
 
 # MultiSession Monocular Examples
 
@@ -35,5 +32,5 @@ echo "Launching Vicon Room 2 with Monocular-Inertial sensor"
 ./Examples/Monocular-Inertial/mono_inertial_euroc ./Vocabulary/ORBvoc.txt ./Examples/Monocular-Inertial/EuRoC.yaml "$pathDatasetEuroc"/V201 ./Examples/Monocular-Inertial/EuRoC_TimeStamps/V201.txt "$pathDatasetEuroc"/V202 ./Examples/Monocular-Inertial/EuRoC_TimeStamps/V202.txt "$pathDatasetEuroc"/V203 ./Examples/Monocular-Inertial/EuRoC_TimeStamps/V203.txt dataset-V201_to_V203_monoi
 echo "------------------------------------"
 echo "Evaluation of Vicon Room 2 trajectory with Stereo sensor"
-python3 evaluation/evaluate_ate.py f_dataset-V201_to_V203_monoi.txt evaluation/Ground_truth/EuRoC_imu/V2_GT.txt
+python evaluation/evaluate_ate_scale.py evaluation/Ground_truth/EuRoC_imu/V2_GT.txt f_dataset-V201_to_V203_monoi.txt --plot V201_to_V203_monoi.pdf
 

+ 130 - 0
evaluation/associate.py

@@ -0,0 +1,130 @@
+#!/usr/bin/python
+# Software License Agreement (BSD License)
+#
+# Copyright (c) 2013, Juergen Sturm, TUM
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+#  * Redistributions of source code must retain the above copyright
+#    notice, this list of conditions and the following disclaimer.
+#  * Redistributions in binary form must reproduce the above
+#    copyright notice, this list of conditions and the following
+#    disclaimer in the documentation and/or other materials provided
+#    with the distribution.
+#  * Neither the name of TUM nor the names of its
+#    contributors may be used to endorse or promote products derived
+#    from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+#
+# Requirements: 
+# sudo apt-get install python-argparse
+
+"""
+The Kinect provides the color and depth images in an un-synchronized way. This means that the set of time stamps from the color images do not intersect with those of the depth images. Therefore, we need some way of associating color images to depth images.
+
+For this purpose, you can use the ''associate.py'' script. It reads the time stamps from the rgb.txt file and the depth.txt file, and joins them by finding the best matches.
+"""
+
+import argparse
+import sys
+import os
+import numpy
+
+
+def read_file_list(filename,remove_bounds):
+    """
+    Reads a trajectory from a text file. 
+    
+    File format:
+    The file format is "stamp d1 d2 d3 ...", where stamp denotes the time stamp (to be matched)
+    and "d1 d2 d3.." is arbitary data (e.g., a 3D position and 3D orientation) associated to this timestamp. 
+    
+    Input:
+    filename -- File name
+    
+    Output:
+    dict -- dictionary of (stamp,data) tuples
+    
+    """
+    file = open(filename)
+    data = file.read()
+    lines = data.replace(","," ").replace("\t"," ").split("\n")
+    if remove_bounds:
+        lines = lines[100:-100]
+    list = [[v.strip() for v in line.split(" ") if v.strip()!=""] for line in lines if len(line)>0 and line[0]!="#"]
+    list = [(float(l[0]),l[1:]) for l in list if len(l)>1]
+    return dict(list)
+
+def associate(first_list, second_list,offset,max_difference):
+    """
+    Associate two dictionaries of (stamp,data). As the time stamps never match exactly, we aim 
+    to find the closest match for every input tuple.
+    
+    Input:
+    first_list -- first dictionary of (stamp,data) tuples
+    second_list -- second dictionary of (stamp,data) tuples
+    offset -- time offset between both dictionaries (e.g., to model the delay between the sensors)
+    max_difference -- search radius for candidate generation
+
+    Output:
+    matches -- list of matched tuples ((stamp1,data1),(stamp2,data2))
+    
+    """
+    first_keys = first_list.keys()
+    second_keys = second_list.keys()
+    potential_matches = [(abs(a - (b + offset)), a, b) 
+                         for a in first_keys 
+                         for b in second_keys 
+                         if abs(a - (b + offset)) < max_difference]
+    potential_matches.sort()
+    matches = []
+    for diff, a, b in potential_matches:
+        if a in first_keys and b in second_keys:
+            first_keys.remove(a)
+            second_keys.remove(b)
+            matches.append((a, b))
+    
+    matches.sort()
+    return matches
+
+if __name__ == '__main__':
+    
+    # parse command line
+    parser = argparse.ArgumentParser(description='''
+    This script takes two data files with timestamps and associates them   
+    ''')
+    parser.add_argument('first_file', help='first text file (format: timestamp data)')
+    parser.add_argument('second_file', help='second text file (format: timestamp data)')
+    parser.add_argument('--first_only', help='only output associated lines from first file', action='store_true')
+    parser.add_argument('--offset', help='time offset added to the timestamps of the second file (default: 0.0)',default=0.0)
+    parser.add_argument('--max_difference', help='maximally allowed time difference for matching entries (default: 0.02)',default=0.02)
+    args = parser.parse_args()
+
+    first_list = read_file_list(args.first_file)
+    second_list = read_file_list(args.second_file)
+
+    matches = associate(first_list, second_list,float(args.offset),float(args.max_difference))    
+
+    if args.first_only:
+        for a,b in matches:
+            print("%f %s"%(a," ".join(first_list[a])))
+    else:
+        for a,b in matches:
+            print("%f %s %f %s"%(a," ".join(first_list[a]),b-float(args.offset)," ".join(second_list[b])))
+            
+        

+ 0 - 219
evaluation/evaluate_ate.py

@@ -1,219 +0,0 @@
-import numpy as np
-import csv
-import re
-import argparse
-import sys
-
-comment_pattern = re.compile(r'\s*#.*$')
-
-
-class Position:
-    def __init__(self, timestamp, pos=None, rot_quat=None):
-        self.timestamp = timestamp
-        self.is_kf = False
-        if pos is None and rot_quat is None:
-            self.exist = False
-            self.pos = pos  # x,y,z
-            self.rot_quat = rot_quat  # w,x,y,z
-        else:
-            self.exist = True
-            self.pos = pos  # x,y,z
-            self.rot_quat = rot_quat  # w,x,y,z
-
-
-class Trajectory:
-    def __init__(self, lst_poses=None):
-        if lst_poses is not None:
-            self.num_poses = len(lst_poses)
-            self.timestamps = np.zeros((self.num_poses, 1))
-            self.pos = np.zeros((self.num_poses, 3))
-            self.q = np.zeros((self.num_poses, 4))
-            for i in range(0, self.num_poses):
-                self.timestamps[i] = lst_poses[i].timestamp
-                if lst_poses[i].rot_quat is not None:
-                    for j in range(0, 4):
-                        self.q[i][j] = lst_poses[i].rot_quat[j]
-                for j in range(0, 3):
-                    self.pos[i][j] = lst_poses[i].pos[j]
-            # self.pos = np.transpose(self.pos)
-            self.pos = np.transpose(self.pos)
-            self.q = np.transpose(self.q)
-
-
-def load_file(path_file_name: str, delimiter=" "):
-    lst_poses = []
-    with open(path_file_name) as file:
-        file = skip_comments(file)
-        for line in file:
-            vector = line.split(delimiter)
-            # print(len(vector))
-            if len(vector) == 8:
-                # Correct position
-
-                timestamp = float(vector[0])
-                pos = [float(vector[1]), float(vector[2]), float(vector[3])]
-                rot_quat = [float(vector[7]), float(vector[4]), float(vector[5]), float(vector[6])]
-                pose = Position(timestamp, pos, rot_quat)
-                lst_poses.append(pose)
-            else:
-                # We haven't got a position in this
-                timestamp = float(vector[0])
-                lst_poses.append(Position(timestamp))
-
-    return lst_poses
-
-
-def associate(poses_test, poses_gt):
-    num_match = 0
-    i_test = 0
-    i_gt = 0
-    lst_check_test = []
-    lst_check_gt = []
-    while i_test < len(poses_test):
-        if i_gt == len(poses_gt):
-            break
-            lst_check_test.append(poses_test[i_test])
-            lst_check_gt.append(poses_gt[i_gt-1])
-            num_match = num_match + 1
-            i_test = i_test + 1
-        elif np.abs(poses_test[i_test].timestamp - poses_gt[i_gt].timestamp)/1e3 < 1:
-            lst_check_test.append(poses_test[i_test])
-            lst_check_gt.append(poses_gt[i_gt])
-            num_match = num_match + 1
-            i_test = i_test + 1
-            i_gt = i_gt + 1
-        elif poses_test[i_test].timestamp < poses_gt[i_gt].timestamp:
-            i_test = i_test + 1
-            if i_test >= len(poses_test):
-                break
-        elif poses_test[i_test].timestamp > poses_gt[i_gt].timestamp:
-            i_gt = i_gt + 1
-            if i_gt >= len(poses_gt):
-                break
-
-    return lst_check_test, lst_check_gt
-
-
-def create_trajectory(lst_poses_test, lst_poses_gt):
-    lst_exist_poses_test = []
-    lst_exist_poses_gt = []
-    for i in range(len(lst_poses_test)):
-        if lst_poses_test[i].exist:
-            lst_exist_poses_test.append(lst_poses_test[i])
-            pose_gt = lst_poses_gt[i]
-
-            lst_exist_poses_gt.append(pose_gt)
-
-    traj_test = Trajectory(lst_exist_poses_test)
-    traj_gt = Trajectory(lst_exist_poses_gt)
-    return traj_test, traj_gt
-
-
-def skip_comments(lines):
-    """
-    A filter which skip/strip the comments and yield the
-    rest of the lines
-
-    :param lines: any object which we can iterate through such as a file
-        object, list, tuple, or generator
-    """
-    global comment_pattern
-
-    for line in lines:
-        line = re.sub(comment_pattern, '', line).strip()
-        if line:
-            yield line
-
-
-def align(traj_test, traj_gt):
-    mean_test = traj_test.pos.mean(1)
-    mean_test = mean_test.reshape(mean_test.size, 1)
-    mean_gt = traj_gt.pos.mean(1)
-    mean_gt = mean_gt.reshape(mean_gt.size, 1)
-
-    traj_centered_test = Trajectory()
-    traj_centered_gt = Trajectory()
-    traj_centered_test.pos = traj_test.pos - mean_test
-    traj_centered_gt.pos = traj_gt.pos - mean_gt
-
-    L = traj_centered_test.pos.shape[1]
-
-    W = np.zeros((3, 3))
-    for col in range(traj_centered_test.pos.shape[1]):
-        W += np.outer(traj_centered_test.pos[:, col], traj_centered_gt.pos[:, col])
-    U, d, Vh = np.linalg.linalg.svd(W.transpose())
-
-    S = np.matrix(np.identity(3))
-    if np.linalg.det(U) * np.linalg.det(Vh) < 0:
-        # print("Negative")
-        S[2, 2] = -1
-
-    Rot = U * S * Vh
-
-    # pose_rot_cent_test = np.matmul(Rot, traj_centered_test.pos)
-    pose_rot_cent_test = Rot * np.matrix(traj_centered_test.pos)
-    # print('Size center: {}'.format(pose_rot_cent_test.shape))
-    # print('Size center2: {}'.format((Rot * np.matrix(traj_centered_test.pos)).shape))
-
-    s = 1.0
-    dots = 0.0
-    norms = 0.0
-    for column in range(traj_centered_gt.pos.shape[1]):
-        dots += np.dot(traj_centered_gt.pos[:, column].transpose(), pose_rot_cent_test[:, column])
-        normi = np.linalg.norm(traj_centered_test.pos[:, column])
-        norms += normi * normi
-
-    s = float(dots / norms)
-
-    traj_length = 0
-    for i in range(1, traj_centered_gt.pos.shape[1]):
-        # print("Pos = ", _trajData.pos[:, i])
-        traj_length = traj_length + np.linalg.norm(traj_centered_gt.pos[:, i] - traj_centered_gt.pos[:, i - 1])
-
-    # print("Rotation matrix between model and GT: \n", Rot)
-
-    trans_scale = mean_gt - s * np.matmul(Rot, mean_test)
-    traj_aligned_scale = Trajectory()
-    traj_aligned_scale.pos = s * Rot * np.matrix(traj_test.pos) + trans_scale
-
-    trans = mean_gt - np.matmul(Rot, mean_test)
-    traj_aligned = Trajectory()
-    traj_aligned.pos = Rot * np.matrix(traj_test.pos) + trans
-
-    # np.reshape(_traj_test.pos, (3, L))
-    # print(_traj_test.pos.shape)
-    error_traj_scale = traj_aligned_scale.pos - traj_gt.pos
-    error_traj = traj_aligned.pos - traj_gt.pos
-
-    ate_scale = np.squeeze(np.asarray(np.sqrt(np.sum(np.multiply(error_traj_scale, error_traj_scale), 0))))
-    ateRMSE_scale = np.sqrt(np.dot(ate_scale, ate_scale) / len(ate_scale))
-
-    ate = np.squeeze(np.asarray(np.sqrt(np.sum(np.multiply(error_traj, error_traj), 0))))
-    ateRMSE = np.sqrt(np.dot(ate, ate) / len(ate))
-
-    # Print the error
-    print("ATE RMSE(7DoF): " + str(ateRMSE_scale))
-    print("scale: %f " % s)
-    print("ATE RMSE(6DoF): " + str(ateRMSE))
-
-
-if __name__ == '__main__':
-    parser = argparse.ArgumentParser(description='''
-        This script computes the absolute trajectory error from the ground truth trajectory and the estimated trajectory. 
-        ''')
-
-    parser.add_argument('SLAM', help='estimated trajectory (format: timestamp tx ty tz qx qy qz qw)')
-    parser.add_argument('GT', help='ground truth trajectory (format: timestamp tx ty tz qx qy qz qw)')
-    args = parser.parse_args()
-
-    str_file_gt = args.GT
-    str_file_traj = args.SLAM
-
-    lst_gt_poses = load_file(str_file_gt, ",")
-    lst_traj_poses = load_file(str_file_traj, " ")
-
-    lst_traj_poses2, lst_gt_poses2 = associate(lst_traj_poses, lst_gt_poses)
-    traj_test, traj_gt = create_trajectory(lst_traj_poses2, lst_gt_poses2)
-
-    align(traj_test, traj_gt)
-

+ 229 - 0
evaluation/evaluate_ate_scale.py

@@ -0,0 +1,229 @@
+# Modified by Raul Mur-Artal
+# Automatically compute the optimal scale factor for monocular VO/SLAM.
+
+# Software License Agreement (BSD License)
+#
+# Copyright (c) 2013, Juergen Sturm, TUM
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+#  * Redistributions of source code must retain the above copyright
+#    notice, this list of conditions and the following disclaimer.
+#  * Redistributions in binary form must reproduce the above
+#    copyright notice, this list of conditions and the following
+#    disclaimer in the documentation and/or other materials provided
+#    with the distribution.
+#  * Neither the name of TUM nor the names of its
+#    contributors may be used to endorse or promote products derived
+#    from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+#
+# Requirements: 
+# sudo apt-get install python-argparse
+
+"""
+This script computes the absolute trajectory error from the ground truth
+trajectory and the estimated trajectory.
+"""
+
+import sys
+import numpy
+import argparse
+import associate
+
+def align(model,data):
+    """Align two trajectories using the method of Horn (closed-form).
+    
+    Input:
+    model -- first trajectory (3xn)
+    data -- second trajectory (3xn)
+    
+    Output:
+    rot -- rotation matrix (3x3)
+    trans -- translation vector (3x1)
+    trans_error -- translational error per point (1xn)
+    """
+
+
+    numpy.set_printoptions(precision=3,suppress=True)
+    model_zerocentered = model - model.mean(1)
+    data_zerocentered = data - data.mean(1)
+    
+    W = numpy.zeros( (3,3) )
+    for column in range(model.shape[1]):
+        W += numpy.outer(model_zerocentered[:,column],data_zerocentered[:,column])
+    U,d,Vh = numpy.linalg.linalg.svd(W.transpose())
+    S = numpy.matrix(numpy.identity( 3 ))
+    if(numpy.linalg.det(U) * numpy.linalg.det(Vh)<0):
+        S[2,2] = -1
+    rot = U*S*Vh
+
+    rotmodel = rot*model_zerocentered
+    dots = 0.0
+    norms = 0.0
+
+    for column in range(data_zerocentered.shape[1]):
+	dots += numpy.dot(data_zerocentered[:,column].transpose(),rotmodel[:,column])
+        normi = numpy.linalg.norm(model_zerocentered[:,column])
+        norms += normi*normi
+
+    s = float(dots/norms)    
+    
+    transGT = data.mean(1) - s*rot * model.mean(1)
+    trans = data.mean(1) - rot * model.mean(1)
+
+    model_alignedGT = s*rot * model + transGT
+    model_aligned = rot * model + trans
+
+    alignment_errorGT = model_alignedGT - data
+    alignment_error = model_aligned - data
+
+    trans_errorGT = numpy.sqrt(numpy.sum(numpy.multiply(alignment_errorGT,alignment_errorGT),0)).A[0]
+    trans_error = numpy.sqrt(numpy.sum(numpy.multiply(alignment_error,alignment_error),0)).A[0]
+        
+    return rot,transGT,trans_errorGT,trans,trans_error, s
+
+def plot_traj(ax,stamps,traj,style,color,label):
+    """
+    Plot a trajectory using matplotlib. 
+    
+    Input:
+    ax -- the plot
+    stamps -- time stamps (1xn)
+    traj -- trajectory (3xn)
+    style -- line style
+    color -- line color
+    label -- plot legend
+    
+    """
+    stamps.sort()
+    interval = numpy.median([s-t for s,t in zip(stamps[1:],stamps[:-1])])
+    x = []
+    y = []
+    last = stamps[0]
+    for i in range(len(stamps)):
+        if stamps[i]-last < 2*interval:
+            x.append(traj[i][0])
+            y.append(traj[i][1])
+        elif len(x)>0:
+            ax.plot(x,y,style,color=color,label=label)
+            label=""
+            x=[]
+            y=[]
+        last= stamps[i]
+    if len(x)>0:
+        ax.plot(x,y,style,color=color,label=label)
+            
+
+if __name__=="__main__":
+    # parse command line
+    parser = argparse.ArgumentParser(description='''
+    This script computes the absolute trajectory error from the ground truth trajectory and the estimated trajectory. 
+    ''')
+    parser.add_argument('first_file', help='ground truth trajectory (format: timestamp tx ty tz qx qy qz qw)')
+    parser.add_argument('second_file', help='estimated trajectory (format: timestamp tx ty tz qx qy qz qw)')
+    parser.add_argument('--offset', help='time offset added to the timestamps of the second file (default: 0.0)',default=0.0)
+    parser.add_argument('--scale', help='scaling factor for the second trajectory (default: 1.0)',default=1.0)
+    parser.add_argument('--max_difference', help='maximally allowed time difference for matching entries (default: 10000000 ns)',default=20000000)
+    parser.add_argument('--save', help='save aligned second trajectory to disk (format: stamp2 x2 y2 z2)')
+    parser.add_argument('--save_associations', help='save associated first and aligned second trajectory to disk (format: stamp1 x1 y1 z1 stamp2 x2 y2 z2)')
+    parser.add_argument('--plot', help='plot the first and the aligned second trajectory to an image (format: png)')
+    parser.add_argument('--verbose', help='print all evaluation data (otherwise, only the RMSE absolute translational error in meters after alignment will be printed)', action='store_true')
+    parser.add_argument('--verbose2', help='print scale eror and RMSE absolute translational error in meters after alignment with and without scale correction', action='store_true')
+    args = parser.parse_args()
+
+    first_list = associate.read_file_list(args.first_file, False)
+    second_list = associate.read_file_list(args.second_file, False)
+
+    matches = associate.associate(first_list, second_list,float(args.offset),float(args.max_difference))    
+    if len(matches)<2:
+        sys.exit("Couldn't find matching timestamp pairs between groundtruth and estimated trajectory! Did you choose the correct sequence?")
+    first_xyz = numpy.matrix([[float(value) for value in first_list[a][0:3]] for a,b in matches]).transpose()
+    second_xyz = numpy.matrix([[float(value)*float(args.scale) for value in second_list[b][0:3]] for a,b in matches]).transpose()
+    dictionary_items = second_list.items()
+    sorted_second_list = sorted(dictionary_items)
+
+    second_xyz_full = numpy.matrix([[float(value)*float(args.scale) for value in sorted_second_list[i][1][0:3]] for i in range(len(sorted_second_list))]).transpose() # sorted_second_list.keys()]).transpose()
+    rot,transGT,trans_errorGT,trans,trans_error, scale = align(second_xyz,first_xyz)
+    
+    second_xyz_aligned = scale * rot * second_xyz + trans
+    second_xyz_notscaled = rot * second_xyz + trans
+    second_xyz_notscaled_full = rot * second_xyz_full + trans
+    first_stamps = first_list.keys()
+    first_stamps.sort()
+    first_xyz_full = numpy.matrix([[float(value) for value in first_list[b][0:3]] for b in first_stamps]).transpose()
+    
+    second_stamps = second_list.keys()
+    second_stamps.sort()
+    second_xyz_full = numpy.matrix([[float(value)*float(args.scale) for value in second_list[b][0:3]] for b in second_stamps]).transpose()
+    second_xyz_full_aligned = scale * rot * second_xyz_full + trans
+    
+    if args.verbose:
+        print "compared_pose_pairs %d pairs"%(len(trans_error))
+
+        print "absolute_translational_error.rmse %f m"%numpy.sqrt(numpy.dot(trans_error,trans_error) / len(trans_error))
+        print "absolute_translational_error.mean %f m"%numpy.mean(trans_error)
+        print "absolute_translational_error.median %f m"%numpy.median(trans_error)
+        print "absolute_translational_error.std %f m"%numpy.std(trans_error)
+        print "absolute_translational_error.min %f m"%numpy.min(trans_error)
+        print "absolute_translational_error.max %f m"%numpy.max(trans_error)
+        print "max idx: %i" %numpy.argmax(trans_error)
+    else:
+        # print "%f, %f " % (numpy.sqrt(numpy.dot(trans_error,trans_error) / len(trans_error)),  scale)
+        # print "%f,%f" % (numpy.sqrt(numpy.dot(trans_error,trans_error) / len(trans_error)),  scale)
+        print "%f,%f,%f" % (numpy.sqrt(numpy.dot(trans_error,trans_error) / len(trans_error)), scale, numpy.sqrt(numpy.dot(trans_errorGT,trans_errorGT) / len(trans_errorGT)))
+        # print "%f" % len(trans_error)
+    if args.verbose2:
+        print "compared_pose_pairs %d pairs"%(len(trans_error))
+        print "absolute_translational_error.rmse %f m"%numpy.sqrt(numpy.dot(trans_error,trans_error) / len(trans_error))
+        print "absolute_translational_errorGT.rmse %f m"%numpy.sqrt(numpy.dot(trans_errorGT,trans_errorGT) / len(trans_errorGT))
+
+    if args.save_associations:
+        file = open(args.save_associations,"w")
+        file.write("\n".join(["%f %f %f %f %f %f %f %f"%(a,x1,y1,z1,b,x2,y2,z2) for (a,b),(x1,y1,z1),(x2,y2,z2) in zip(matches,first_xyz.transpose().A,second_xyz_aligned.transpose().A)]))
+        file.close()
+        
+    if args.save:
+        file = open(args.save,"w")
+        file.write("\n".join(["%f "%stamp+" ".join(["%f"%d for d in line]) for stamp,line in zip(second_stamps,second_xyz_notscaled_full.transpose().A)]))
+        file.close()
+
+    if args.plot:
+        import matplotlib
+        matplotlib.use('Agg')
+        import matplotlib.pyplot as plt
+        import matplotlib.pylab as pylab
+        from matplotlib.patches import Ellipse
+        fig = plt.figure()
+        ax = fig.add_subplot(111)
+        plot_traj(ax,first_stamps,first_xyz_full.transpose().A,'-',"black","ground truth")
+        plot_traj(ax,second_stamps,second_xyz_full_aligned.transpose().A,'-',"blue","estimated")
+        label="difference"
+        for (a,b),(x1,y1,z1),(x2,y2,z2) in zip(matches,first_xyz.transpose().A,second_xyz_aligned.transpose().A):
+            ax.plot([x1,x2],[y1,y2],'-',color="red",label=label)
+            label=""
+            
+        ax.legend()
+            
+        ax.set_xlabel('x [m]')
+        ax.set_ylabel('y [m]')
+        plt.axis('equal')
+        plt.savefig(args.plot,format="pdf")
+
+
+        

+ 1 - 1
tum_vi_eval_examples.sh

@@ -8,4 +8,4 @@ echo "Launching Magistrale 1 with Stereo-Inertial sensor"
 ./Examples/Stereo-Inertial/stereo_inertial_tum_vi Vocabulary/ORBvoc.txt Examples/Stereo-Inertial/TUM_512.yaml "$pathDatasetTUM_VI"/dataset-magistrale1_512_16/mav0/cam0/data "$pathDatasetTUM_VI"/dataset-magistrale1_512_16/mav0/cam1/data Examples/Stereo-Inertial/TUM_TimeStamps/dataset-magistrale1_512.txt Examples/Stereo-Inertial/TUM_IMU/dataset-magistrale1_512.txt dataset-magistrale1_512_stereoi
 echo "------------------------------------"
 echo "Evaluation of Magistrale 1 trajectory with Stereo-Inertial sensor"
-python3 evaluation/evaluate_ate.py f_dataset-magistrale1_512_stereoi.txt "$pathDatasetTUM_VI"/magistrale1_512_16/mav0/mocap0/data.csv
+python evaluation/evaluate_ate_scale.py "$pathDatasetTUM_VI"/magistrale1_512_16/mav0/mocap0/data.csv f_dataset-magistrale1_512_stereoi.txt --plot magistrale1_512_stereoi.pdf