Fault Free Analysis

As a continuation of the work conducted yesterday, several additional robust noise models were added to the script processG2O.cpp. With the additional noise models, a similar analysis to yesterday was conducted on the four pose-graphs housed in the pose graph directory.

Manhattan 3500

man3500Take2 photo imgonline-com-ua-twotoone-WnIIlEQy1RfDp_zpsulmdqymy.jpg

Fig 1 :: Sensitivity of M-Estimator on Manhattan3500 dataset


Manhattan 10000

city10000Take2 photo imgonline-com-ua-twotoone-O9S5YRaJoRvie_zpsssrffi78.jpg

Fig 2 :: Sensitivity of M-Estimator on City10000 dataset


CSAIL

csailTake2 photo imgonline-com-ua-twotoone-zbBeU4Y6OiEKm1_zps9cuxpvmm.jpg

Fig 3 :: Sensitivity of M-Estimator on CSAIL dataset



As can be seen from the analysis above, the M-Estimator is senstive to both the dataset and the user specified kernel width.

False Constrainst Added To Pose-Graph

Next, we would like to see how the trend shown above is affected by false constraints being incorporated into the graph. To do this, the script provided below was created.


#!/usr/bin/env python 

'''
Modification of code housed at 
https://svn.openslam.org/data/svn/vertigo/trunk/datasets/generateDataset.py.

This is used to add false constraints to a pose graph. These faults can be 
added to the the pose-graph randomly or in local clusters.

Currenly, only works on 2D pose-graph 
'''

__author__ = 'ryan'
__email__ = "rwatso12@gmail.com"


import sys, random
from math import *
from optparse import OptionParser


def readDataset(filename, vertexStr='VERTEX_SE2', edgeStr='EDGE_SE2'):

    # read the complete file
    f = file(filename,'r')
    lines=f.readlines()


    # determine whether this is a 3D or 2D dataset
    mode=None
    
    for i in range(len(lines)):
        if lines[i].startswith("VERTEX_SE2"):
            mode=2
            break
        
    vertexStr='VERTEX_SE2'
    edgeStr='EDGE_SE2'               

    # build a dictionary of vertices and edges
    v=[]
    e=[]
    
    for line in lines:
        if line.startswith(vertexStr):
            idx=line.split()[1]            
            v.append(line)

        elif line.startswith(edgeStr):
            idx=(line.split()[1],line.split()[2]) 
            e.append(line)

    return (v,e, mode)

# ==================================================================
def writeDataset(filename, vertices, edges, mode, outliers=0, groupSize=1, 
                 doLocal=0, informationMatrix="42,0,0,42,0,42"):

  
    # first write out all pose vertices
    f = file(filename, 'w')
    for n in vertices:
        f.write(n)

    # now create the desired number of additional outlier edges
    for i in range(outliers):        

        elem = oldStr.split()

        # determine random indices for the two vertices that are connected by an outlier edge
        v1=0
        v2=0
        while v1==v2:
            v1=random.randint(0,poseCount-1-groupSize)
            if doLocal<1:
              v2=random.randint(0,poseCount-1-groupSize)
            else: 
              v2=random.randint(v1,min(poseCount-1-groupSize, v1+20)) 

            if v1>v2:
              tmp=v1
              v1=v2
              v2=tmp                       
            if v2==v1+1:
              v2=v1+2
        
        # determine coordinates of the loop closure constraint
        if mode == 2:
            x1=random.gauss(0,0.3)
            x2=random.gauss(0,0.3)
            x3=random.gauss(0,10*pi/180.0)

        
        for j in range(groupSize):


            info_str = informationMatrix.replace(",", " ")

            n=[v1, v2, x1, x2, x3]
            s = ' '.join([edgeStr] + [str(x) for x in n]) + " " + info_str
            f.write(s+'\n')

            v1=v1+1
            v2=v2+1    

    return True


# =================================================================
def checkOptions(options):
    """Make sure the options entered by the user make sense."""

    if options.outliers<0:
        print "Number of outliers (--outliers) must be >=0."
        return False
    

    if options.groupsize<0:
        print "Groupsize (--groupsize) must be >=0."
        return False

    if options.filename == "" or options.filename==None:
        print "Dataset to read (--in) must be given."
        return False
    
    return True


# ==================================================================    
# ==================================================================
# ==================================================================

if __name__ == "__main__":


    parser = OptionParser()
    parser.add_option("-i", "--in", help = "Path to the original dataset file (in g2o format).", dest="filename")
    parser.add_option("-o", "--out", help = "Results will be written into this file.", default="new.g2o")
    parser.add_option("-n", "--outliers", help = "Spoil the dataset with this many outliers. Default = 100.", default=100, type="int")
    parser.add_option("-g", "--groupsize", help = "Use this groupsize. Default = 1.", default=1, type="int")
    parser.add_option("--seed", help = "Random seed. If >0 it will be used to initialize the random number generator to create repeatable random false positive loop closures.", default=None, type="int")
    parser.add_option("-l", "--local", help = "Create only local false positive loop closure constraints.", action="store_true", default=False)
    (options, args) = parser.parse_args()

    if checkOptions(options):

        random.seed(options.seed)
        (vertices, edges, mode) = readDataset(options.filename)

        # build and save the modified dataset with additional false positive loop closures
        if writeDataset(options.out, vertices, edges, mode,
                     options.outliers,
                     1,
                     options.groupsize,
                     options.local):
            print "Done."


    # command line options were not ok
    else: 
        print
        print "Please use --help to see all available command line parameters."


Using this script, several false constaints will be added to each graph and the M-Estimators will be tested again.

Will continue tomorrow.