Home | History | Annotate | Download | only in sensors
      1 package com.android.cts.verifier.sensors;
      2 
      3 /*
      4  * Copyright (C) 2014 The Android Open Source Project
      5  *
      6  * Licensed under the Apache License, Version 2.0 (the "License");
      7  * you may not use this file except in compliance with the License.
      8  * You may obtain a copy of the License at
      9  *
     10  *      http://www.apache.org/licenses/LICENSE-2.0
     11  *
     12  * Unless required by applicable law or agreed to in writing, software
     13  * distributed under the License is distributed on an "AS IS" BASIS,
     14  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     15  * See the License for the specific language governing permissions and
     16  * limitations under the License.
     17  */
     18 import android.media.MediaCodec;
     19 import android.media.MediaExtractor;
     20 import android.media.MediaFormat;
     21 import android.os.Debug;
     22 import android.os.Environment;
     23 import android.os.PowerManager;
     24 import android.util.JsonWriter;
     25 import android.util.Log;
     26 
     27 import org.opencv.core.Mat;
     28 import org.opencv.core.CvType;
     29 import org.opencv.core.MatOfDouble;
     30 import org.opencv.core.MatOfFloat;
     31 import org.opencv.core.MatOfPoint2f;
     32 import org.opencv.core.MatOfPoint3f;
     33 import org.opencv.core.Point;
     34 import org.opencv.core.Size;
     35 import org.opencv.imgcodecs.Imgcodecs;
     36 import org.opencv.imgproc.Imgproc;
     37 import org.opencv.calib3d.Calib3d;
     38 import org.opencv.core.Core;
     39 
     40 import org.json.JSONObject;
     41 import org.json.JSONException;
     42 
     43 import java.io.BufferedReader;
     44 import java.io.ByteArrayOutputStream;
     45 import java.io.File;
     46 import java.io.FileNotFoundException;
     47 import java.io.FileOutputStream;
     48 import java.io.FileReader;
     49 import java.io.IOException;
     50 import java.io.OutputStream;
     51 import java.io.OutputStreamWriter;
     52 import java.nio.ByteBuffer;
     53 import java.util.ArrayList;
     54 
     55 import android.opengl.GLES20;
     56 import javax.microedition.khronos.opengles.GL10;
     57 
     58 /**
     59  *  This class does analysis on the recorded RVCVCXCheck data sets.
     60  */
     61 public class RVCVXCheckAnalyzer {
     62     private static final String TAG = "RVCVXAnalysis";
     63     private static final boolean LOCAL_LOGV = false;
     64     private static final boolean LOCAL_LOGD = true;
     65     private final String mPath;
     66 
     67     private static final boolean OUTPUT_DEBUG_IMAGE = false;
     68     private static final double VALID_FRAME_THRESHOLD = 0.8;
     69     private static final double REPROJECTION_THRESHOLD_RATIO = 0.01;
     70     private static final boolean FORCE_CV_ANALYSIS  = false;
     71     private static final boolean TRACE_VIDEO_ANALYSIS = false;
     72     private static final double DECIMATION_FPS_TARGET = 15.0;
     73     private static final double MIN_VIDEO_LENGTH_SEC = 10;
     74 
     75     RVCVXCheckAnalyzer(String path)
     76     {
     77         mPath = path;
     78     }
     79 
     80     /**
     81      * A class that contains  the analysis results
     82      *
     83      */
     84     class AnalyzeReport {
     85         public boolean error=true;
     86         public String reason = "incomplete";
     87 
     88         // roll pitch yaw RMS error ( \sqrt{\frac{1}{n} \sum e_i^2 })
     89         // unit in rad
     90         public double roll_rms_error;
     91         public double pitch_rms_error;
     92         public double yaw_rms_error;
     93 
     94         // roll pitch yaw max error
     95         // unit in rad
     96         public double roll_max_error;
     97         public double pitch_max_error;
     98         public double yaw_max_error;
     99 
    100         // optimal t delta between sensor and camera data set to make best match
    101         public double optimal_delta_t;
    102         // the associate yaw offset based on initial values
    103         public double yaw_offset;
    104 
    105         public int n_of_frame;
    106         public int n_of_valid_frame;
    107 
    108         // both data below are in [sec]
    109         public double sensor_period_avg;
    110         public double sensor_period_stdev;
    111 
    112         /**
    113          * write Json format serialization to a file in case future processing need the data
    114          */
    115         public void writeToFile(File file) {
    116             try {
    117                 writeJSONToStream(new FileOutputStream(file));
    118             } catch (FileNotFoundException e) {
    119                 e.printStackTrace();
    120                 Log.e(TAG, "Cannot create analyze report file.");
    121             }
    122         }
    123 
    124         /**
    125          * Get the JSON format serialization
    126          *@return Json format serialization as String
    127          */
    128         @Override
    129         public String toString() {
    130             ByteArrayOutputStream s = new ByteArrayOutputStream();
    131             writeJSONToStream(s);
    132             return new String(s.toByteArray(),  java.nio.charset.StandardCharsets.UTF_8);
    133         }
    134 
    135         private void writeJSONToStream(OutputStream s) {
    136             try{
    137                 JsonWriter writer =
    138                         new JsonWriter(
    139                                 new OutputStreamWriter( s )
    140                         );
    141                 writer.beginObject();
    142                 writer.setLenient(true);
    143 
    144                 writer.name("roll_rms_error").value(roll_rms_error);
    145                 writer.name("pitch_rms_error").value(pitch_rms_error);
    146                 writer.name("yaw_rms_error").value(yaw_rms_error);
    147                 writer.name("roll_max_error").value(roll_max_error);
    148                 writer.name("pitch_max_error").value(pitch_max_error);
    149                 writer.name("yaw_max_error").value(yaw_max_error);
    150                 writer.name("optimal_delta_t").value(optimal_delta_t);
    151                 writer.name("yaw_offset").value(yaw_offset);
    152                 writer.name("n_of_frame").value(n_of_frame);
    153                 writer.name("n_of_valid_frame").value(n_of_valid_frame);
    154                 writer.name("sensor_period_avg").value(sensor_period_avg);
    155                 writer.name("sensor_period_stdev").value(sensor_period_stdev);
    156 
    157                 writer.endObject();
    158 
    159                 writer.close();
    160             } catch (IOException e) {
    161                 // do nothing
    162                 Log.e(TAG, "Error in serialize analyze report to JSON");
    163             } catch (IllegalArgumentException e) {
    164                 e.printStackTrace();
    165                 Log.e(TAG, "Invalid parameter to write into JSON format");
    166             }
    167         }
    168     }
    169 
    170     /**
    171      *  Process data set stored in the path specified in constructor
    172      *  and return an analyze report to caller
    173      *
    174      *  @return An AnalyzeReport that contains detailed information about analysis
    175      */
    176     public AnalyzeReport processDataSet() {
    177         int nframe;// number of frames in video
    178         int nslog; // number of sensor log
    179         int nvlog; // number of video generated log
    180 
    181 
    182         AnalyzeReport report = new AnalyzeReport();
    183 
    184         ArrayList<AttitudeRec> srecs = new ArrayList<>();
    185         ArrayList<AttitudeRec> vrecs = new ArrayList<>();
    186         ArrayList<AttitudeRec> srecs2 = new ArrayList<>();
    187 
    188 
    189         final boolean use_solved = new File(mPath, "vision_rpy.log").exists() && !FORCE_CV_ANALYSIS;
    190 
    191         if (use_solved) {
    192             nframe = nvlog = loadAttitudeRecs(new File(mPath, "vision_rpy.log"), vrecs);
    193             nslog = loadAttitudeRecs(new File(mPath, "sensor_rpy.log"),srecs);
    194         }else {
    195             nframe = analyzeVideo(vrecs);
    196             nvlog = vrecs.size();
    197 
    198             if (LOCAL_LOGV) {
    199                 Log.v(TAG, "Post video analysis nvlog = " + nvlog + " nframe=" + nframe);
    200             }
    201             if (nvlog <= 0 || nframe <= 0) {
    202                 // invalid results
    203                 report.reason = "Unable to to load recorded video.";
    204                 return report;
    205             }
    206             if (nframe < MIN_VIDEO_LENGTH_SEC*VALID_FRAME_THRESHOLD) {
    207                 // video is too short
    208                 Log.w(TAG, "Video record is to short, n frame = " + nframe);
    209                 report.reason = "Video too short.";
    210                 return report;
    211             }
    212             if ((double) nvlog / nframe < VALID_FRAME_THRESHOLD) {
    213                 // too many invalid frames
    214                 Log.w(TAG, "Too many invalid frames, n valid frame = " + nvlog +
    215                         ", n total frame = " + nframe);
    216                 report.reason = "Too many invalid frames.";
    217                 return report;
    218             }
    219 
    220             fixFlippedAxis(vrecs);
    221 
    222             nslog = loadSensorLog(srecs);
    223         }
    224 
    225         // Gradient descent will have faster performance than this simple search,
    226         // but the performance is dominated by the vision part, so it is not very necessary.
    227         double delta_t;
    228         double min_rms = Double.MAX_VALUE;
    229         double min_delta_t =0.;
    230         double min_yaw_offset =0.;
    231 
    232         // pre-allocation
    233         for (AttitudeRec i: vrecs) {
    234             srecs2.add(new AttitudeRec(0,0,0,0));
    235         }
    236 
    237         // find optimal offset
    238         for (delta_t = -2.0; delta_t<2.0; delta_t +=0.01) {
    239             double rms;
    240             resampleSensorLog(srecs, vrecs, delta_t, 0.0, srecs2);
    241             rms = Math.sqrt(calcSqrErr(vrecs, srecs2, 0)+ calcSqrErr(vrecs, srecs2, 1));
    242             if (rms < min_rms) {
    243                 min_rms = rms;
    244                 min_delta_t = delta_t;
    245                 min_yaw_offset = vrecs.get(0).yaw - srecs2.get(0).yaw;
    246             }
    247         }
    248         // sample at optimal offset
    249         resampleSensorLog(srecs, vrecs, min_delta_t, min_yaw_offset, srecs2);
    250 
    251         if (!use_solved) {
    252             dumpAttitudeRecs(new File(mPath, "vision_rpy.log"), vrecs);
    253             dumpAttitudeRecs(new File(mPath, "sensor_rpy.log"), srecs);
    254         }
    255         dumpAttitudeRecs(new File(mPath, "sensor_rpy_resampled.log"), srecs2);
    256         dumpAttitudeError(new File(mPath, "attitude_error.log"), vrecs, srecs2);
    257 
    258         // fill report fields
    259         report.roll_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 0));
    260         report.pitch_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 1));
    261         report.yaw_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 2));
    262 
    263         report.roll_max_error = calcMaxErr(vrecs, srecs2, 0);
    264         report.pitch_max_error = calcMaxErr(vrecs, srecs2, 1);
    265         report.yaw_max_error = calcMaxErr(vrecs, srecs2, 2);
    266 
    267         report.optimal_delta_t = min_delta_t;
    268         report.yaw_offset = (min_yaw_offset);
    269 
    270         report.n_of_frame = nframe;
    271         report.n_of_valid_frame = nvlog;
    272 
    273         double [] sensor_period_stat = calcSensorPeriodStat(srecs);
    274         report.sensor_period_avg = sensor_period_stat[0];
    275         report.sensor_period_stdev = sensor_period_stat[1];
    276 
    277         // output report to file and log in JSON format as well
    278         report.writeToFile(new File(mPath, "report.json"));
    279         if (LOCAL_LOGV)    Log.v(TAG, "Report in JSON:" + report.toString());
    280 
    281         report.reason = "Completed";
    282         report.error = false;
    283         return report;
    284     }
    285 
    286     /**
    287      * Generate pattern geometry like this one
    288      * http://docs.opencv.org/trunk/_downloads/acircles_pattern.png
    289      *
    290      * @return Array of 3D points
    291      */
    292     private MatOfPoint3f asymmetricalCircleGrid(Size size) {
    293         final int cn = 3;
    294 
    295         int n = (int)(size.width * size.height);
    296         float positions[] = new float[n * cn];
    297         float unit=0.02f;
    298         MatOfPoint3f grid = new MatOfPoint3f();
    299 
    300         for (int i = 0; i < size.height; i++) {
    301             for (int j = 0; j < size.width * cn; j += cn) {
    302                 positions[(int) (i * size.width * cn + j + 0)] =
    303                         (2 * (j / cn) + i % 2) * (float) unit;
    304                 positions[(int) (i * size.width * cn + j + 1)] =
    305                         i * unit;
    306                 positions[(int) (i * size.width * cn + j + 2)] = 0;
    307             }
    308         }
    309         grid.create(n, 1, CvType.CV_32FC3);
    310         grid.put(0, 0, positions);
    311         return grid;
    312     }
    313 
    314     /**
    315      *  Create a camera intrinsic matrix using input parameters
    316      *
    317      *  The camera intrinsic matrix will be like:
    318      *
    319      *       +-                       -+
    320      *       |  f   0    center.width  |
    321      *   A = |  0   f    center.height |
    322      *       |  0   0         1        |
    323      *       +-                       -+
    324      *
    325      *  @return An approximated (not actually calibrated) camera matrix
    326      */
    327     private static Mat cameraMatrix(float f, Size center) {
    328         final double [] data = {f, 0, center.width, 0, f, center.height, 0, 0, 1f};
    329         Mat m = new Mat(3,3, CvType.CV_64F);
    330         m.put(0, 0, data);
    331         return m;
    332     }
    333 
    334     /**
    335      *  Attitude record in time roll pitch yaw format.
    336      *
    337      */
    338     private class AttitudeRec {
    339         public double time;
    340         public double roll;
    341         public double pitch;
    342         public double yaw;
    343 
    344         // ctor
    345         AttitudeRec(double atime, double aroll, double apitch, double ayaw) {
    346             time = atime;
    347             roll = aroll;
    348             pitch = apitch;
    349             yaw = ayaw;
    350         }
    351 
    352         // ctor
    353         AttitudeRec(double atime, double [] rpy) {
    354             time = atime;
    355             roll = rpy[0];
    356             pitch = rpy[1];
    357             yaw = rpy[2];
    358         }
    359 
    360         // copy value of another to this
    361         void assign(AttitudeRec rec) {
    362             time = rec.time;
    363             roll = rec.time;
    364             pitch = rec.pitch;
    365             yaw = rec.yaw;
    366         }
    367 
    368         // copy roll-pitch-yaw value but leave the time specified by atime
    369         void assign(AttitudeRec rec, double atime) {
    370             time = atime;
    371             roll = rec.time;
    372             pitch = rec.pitch;
    373             yaw = rec.yaw;
    374         }
    375 
    376         // set each field separately
    377         void set(double atime, double aroll, double apitch, double ayaw) {
    378             time = atime;
    379             roll = aroll;
    380             pitch = apitch;
    381             yaw = ayaw;
    382         }
    383     }
    384 
    385 
    386     /**
    387      *  Load the sensor log in (time Roll-pitch-yaw) format to a ArrayList<AttitudeRec>
    388      *
    389      *  @return the number of sensor log items
    390      */
    391     private int loadSensorLog(ArrayList<AttitudeRec> recs) {
    392         //ArrayList<AttitudeRec> recs = new ArrayList<AttitudeRec>();
    393         File csvFile = new File(mPath, "sensor.log");
    394         BufferedReader br=null;
    395         String line;
    396 
    397         // preallocate and reuse
    398         double [] quat = new double[4];
    399         double [] rpy = new double[3];
    400 
    401         double t0 = -1;
    402 
    403         try {
    404             br = new BufferedReader(new FileReader(csvFile));
    405             while ((line = br.readLine()) != null) {
    406                 //space separator
    407                 String[] items = line.split(" ");
    408 
    409                 if (items.length != 5) {
    410                     recs.clear();
    411                     return -1;
    412                 }
    413 
    414                 quat[0] = Double.parseDouble(items[1]);
    415                 quat[1] = Double.parseDouble(items[2]);
    416                 quat[2] = Double.parseDouble(items[3]);
    417                 quat[3] = Double.parseDouble(items[4]);
    418 
    419                 //
    420                 quat2rpy(quat, rpy);
    421 
    422                 if (t0 < 0) {
    423                     t0 = Long.parseLong(items[0])/1e9;
    424                 }
    425                 recs.add(new AttitudeRec(Long.parseLong(items[0])/1e9-t0, rpy));
    426             }
    427 
    428         } catch (FileNotFoundException e) {
    429             e.printStackTrace();
    430             Log.e(TAG, "Cannot find sensor logging data");
    431         } catch (IOException e) {
    432             e.printStackTrace();
    433             Log.e(TAG, "Cannot read sensor logging data");
    434         } finally {
    435             if (br != null) {
    436                 try {
    437                     br.close();
    438                 } catch (IOException e) {
    439                     e.printStackTrace();
    440                 }
    441             }
    442         }
    443 
    444         return recs.size();
    445     }
    446 
    447     /**
    448      * Read video meta info
    449      */
    450     private class VideoMetaInfo {
    451         public double fps;
    452         public int frameWidth;
    453         public int frameHeight;
    454         public double fovWidth;
    455         public double fovHeight;
    456         public boolean valid = false;
    457 
    458         VideoMetaInfo(File file) {
    459 
    460             BufferedReader br=null;
    461             String line;
    462             String content="";
    463             try {
    464                 br = new BufferedReader(new FileReader(file));
    465                 while ((line = br.readLine()) != null) {
    466                     content = content +line;
    467                 }
    468 
    469             } catch (FileNotFoundException e) {
    470                 e.printStackTrace();
    471                 Log.e(TAG, "Cannot find video meta info file");
    472             } catch (IOException e) {
    473                 e.printStackTrace();
    474                 Log.e(TAG, "Cannot read video meta info file");
    475             } finally {
    476                 if (br != null) {
    477                     try {
    478                         br.close();
    479                     } catch (IOException e) {
    480                         e.printStackTrace();
    481                     }
    482                 }
    483             }
    484 
    485             if (content.isEmpty()) {
    486                 return;
    487             }
    488 
    489             try {
    490                 JSONObject json = new JSONObject(content);
    491                 frameWidth = json.getInt("width");
    492                 frameHeight = json.getInt("height");
    493                 fps = json.getDouble("frameRate");
    494                 fovWidth = json.getDouble("fovW")*Math.PI/180.0;
    495                 fovHeight = json.getDouble("fovH")*Math.PI/180.0;
    496             } catch (JSONException e) {
    497                 return;
    498             }
    499 
    500             valid = true;
    501 
    502         }
    503     }
    504 
    505 
    506 
    507     /**
    508      * Debugging helper function, load ArrayList<AttitudeRec> from a file dumped out by
    509      * dumpAttitudeRecs
    510      */
    511     private int loadAttitudeRecs(File file, ArrayList<AttitudeRec> recs) {
    512         BufferedReader br=null;
    513         String line;
    514         double time;
    515         double [] rpy = new double[3];
    516 
    517         try {
    518             br = new BufferedReader(new FileReader(file));
    519             while ((line = br.readLine()) != null) {
    520                 //space separator
    521                 String[] items = line.split(" ");
    522 
    523                 if (items.length != 4) {
    524                     recs.clear();
    525                     return -1;
    526                 }
    527 
    528                 time = Double.parseDouble(items[0]);
    529                 rpy[0] = Double.parseDouble(items[1]);
    530                 rpy[1] = Double.parseDouble(items[2]);
    531                 rpy[2] = Double.parseDouble(items[3]);
    532 
    533                 recs.add(new AttitudeRec(time, rpy));
    534             }
    535 
    536         } catch (FileNotFoundException e) {
    537             e.printStackTrace();
    538             Log.e(TAG, "Cannot find AttitudeRecs file specified.");
    539         } catch (IOException e) {
    540             e.printStackTrace();
    541             Log.e(TAG, "Read AttitudeRecs file failure");
    542         } finally {
    543             if (br != null) {
    544                 try {
    545                     br.close();
    546                 } catch (IOException e) {
    547                     e.printStackTrace();
    548                 }
    549             }
    550         }
    551 
    552         return recs.size();
    553     }
    554     /**
    555      * Debugging helper function, Dump an ArrayList<AttitudeRec> to a file
    556      */
    557     private void dumpAttitudeRecs(File file, ArrayList<AttitudeRec> recs) {
    558         OutputStreamWriter w=null;
    559         try {
    560             w = new OutputStreamWriter(new FileOutputStream(file));
    561 
    562             for (AttitudeRec r : recs) {
    563                 w.write(String.format("%f %f %f %f\r\n", r.time, r.roll, r.pitch, r.yaw));
    564             }
    565             w.close();
    566         } catch(FileNotFoundException e) {
    567             e.printStackTrace();
    568             Log.e(TAG, "Cannot create AttitudeRecs file.");
    569         } catch (IOException e) {
    570             Log.e(TAG, "Write AttitudeRecs file failure");
    571         } finally {
    572             if (w!=null) {
    573                 try {
    574                     w.close();
    575                 } catch (IOException e) {
    576                     e.printStackTrace();
    577                 }
    578             }
    579         }
    580     }
    581 
    582     /**
    583      *  Read the sensor log in ArrayList<AttitudeRec> format and find out the sensor sample time
    584      *  statistics: mean and standard deviation.
    585      *
    586      *  @return The returned value will be a double array with exact 2 items, first [0] will be
    587      *  mean and the second [1]  will be the standard deviation.
    588      *
    589      */
    590     private double [] calcSensorPeriodStat(ArrayList<AttitudeRec> srec)   {
    591         double tp = srec.get(0).time;
    592         int i;
    593         double sum = 0.0;
    594         double sumsq = 0.0;
    595         for(i=1; i<srec.size(); ++i) {
    596             double dt;
    597             dt = srec.get(i).time - tp;
    598             sum += dt;
    599             sumsq += dt*dt;
    600             tp += dt;
    601         }
    602         double [] ret = new double[2];
    603         ret[0] = sum/srec.size();
    604         ret[1] = Math.sqrt(sumsq/srec.size() - ret[0]*ret[0]);
    605         return ret;
    606     }
    607 
    608     /**
    609      * Flipping the axis as the image are flipped upside down in OpenGL frames
    610      */
    611     private void fixFlippedAxis(ArrayList<AttitudeRec> vrecs)   {
    612         for (AttitudeRec i: vrecs) {
    613             i.yaw = -i.yaw;
    614         }
    615     }
    616 
    617     /**
    618      *  Calculate the maximum error on the specified axis between two time aligned (resampled)
    619      *  ArrayList<AttitudeRec>. Yaw axis needs special treatment as 0 and 2pi error are same thing
    620      *
    621      * @param ra  one ArrayList of AttitudeRec
    622      * @param rb  the other ArrayList of AttitudeRec
    623      * @param axis axis id for the comparison (0 = roll, 1 = pitch, 2 = yaw)
    624      * @return Maximum error
    625      */
    626     private double calcMaxErr(ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb, int axis)  {
    627         // check if they are valid and comparable data
    628         if (ra.size() != rb.size()) {
    629             throw new ArrayIndexOutOfBoundsException("Two array has to be the same");
    630         }
    631         // check input parameter validity
    632         if (axis<0 || axis > 2) {
    633             throw new IllegalArgumentException("Invalid data axis.");
    634         }
    635 
    636         int i;
    637         double max = 0.0;
    638         double diff = 0.0;
    639         for(i=0; i<ra.size(); ++i) {
    640             // make sure they are aligned data
    641             if (ra.get(i).time != rb.get(i).time) {
    642                 throw new IllegalArgumentException("Element "+i+
    643                         " of two inputs has different time.");
    644             }
    645             switch(axis) {
    646                 case 0:
    647                     diff = ra.get(i).roll - rb.get(i).roll; // they always opposite of each other..
    648                     break;
    649                 case 1:
    650                     diff = ra.get(i).pitch - rb.get(i).pitch;
    651                     break;
    652                 case 2:
    653                     diff = Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI))
    654                             -Math.PI)-Math.PI;
    655                     break;
    656             }
    657             diff = Math.abs(diff);
    658             if (diff>max) {
    659                 max = diff;
    660             }
    661         }
    662         return max;
    663     }
    664 
    665     /**
    666      *  Calculate the RMS error on the specified axis between two time aligned (resampled)
    667      *  ArrayList<AttitudeRec>. Yaw axis needs special treatment as 0 and 2pi error are same thing
    668      *
    669      * @param ra  one ArrayList of AttitudeRec
    670      * @param rb  the other ArrayList of AttitudeRec
    671      * @param axis axis id for the comparison (0 = roll, 1 = pitch, 2 = yaw)
    672      * @return Mean square error
    673      */
    674     private double calcSqrErr(ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb, int axis) {
    675         // check if they are valid and comparable data
    676         if (ra.size() != rb.size()) {
    677             throw new ArrayIndexOutOfBoundsException("Two array has to be the same");
    678         }
    679         // check input parameter validity
    680         if (axis<0 || axis > 2) {
    681             throw new IllegalArgumentException("Invalid data axis.");
    682         }
    683 
    684         int i;
    685         double sum = 0.0;
    686         double diff = 0.0;
    687         for(i=0; i<ra.size(); ++i) {
    688             // check input data validity
    689             if (ra.get(i).time != rb.get(i).time) {
    690                 throw new IllegalArgumentException("Element "+i+
    691                         " of two inputs has different time.");
    692             }
    693 
    694             switch(axis) {
    695                 case 0:
    696                     diff = ra.get(i).roll - rb.get(i).roll;
    697                     break;
    698                 case 1:
    699                     diff = ra.get(i).pitch - rb.get(i).pitch;
    700                     break;
    701                 case 2:
    702                     diff = Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI))-
    703                             Math.PI)-Math.PI;
    704                     break;
    705             }
    706 
    707             sum += diff*diff;
    708         }
    709         return sum/ra.size();
    710     }
    711 
    712     /**
    713      * Debugging helper function. Dump the error between two time aligned ArrayList<AttitudeRec>'s
    714      *
    715      * @param file File to write to
    716      * @param ra  one ArrayList of AttitudeRec
    717      * @param rb  the other ArrayList of AttitudeRec
    718      */
    719     private void dumpAttitudeError(File file, ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb){
    720         if (ra.size() != rb.size()) {
    721             throw new ArrayIndexOutOfBoundsException("Two array has to be the same");
    722         }
    723 
    724         int i;
    725 
    726         ArrayList<AttitudeRec> rerr = new ArrayList<>();
    727         for(i=0; i<ra.size(); ++i) {
    728             if (ra.get(i).time != rb.get(i).time) {
    729                 throw new IllegalArgumentException("Element "+ i
    730                         + " of two inputs has different time.");
    731             }
    732 
    733             rerr.add(new AttitudeRec(ra.get(i).time, ra.get(i).roll - rb.get(i).roll,
    734                     ra.get(i).pitch - rb.get(i).pitch,
    735                     (Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI))
    736                             -Math.PI)-Math.PI)));
    737 
    738         }
    739         dumpAttitudeRecs(file, rerr);
    740     }
    741 
    742     /**
    743      * Resample one ArrayList<AttitudeRec> with respect to another ArrayList<AttitudeRec>
    744      *
    745      * @param rec           the ArrayList of AttitudeRec to be sampled
    746      * @param timebase      the other ArrayList of AttitudeRec that serves as time base
    747      * @param delta_t       offset in time before resample
    748      * @param yaw_offset    offset in yaw axis
    749      * @param resampled     output ArrayList of AttitudeRec
    750      */
    751 
    752     private void resampleSensorLog(ArrayList<AttitudeRec> rec, ArrayList<AttitudeRec> timebase,
    753             double delta_t, double yaw_offset, ArrayList<AttitudeRec> resampled)    {
    754         int i;
    755         int j = -1;
    756         for(i=0; i<timebase.size(); i++) {
    757             double time = timebase.get(i).time + delta_t;
    758 
    759             while(j<rec.size()-1 && rec.get(j+1).time < time) j++;
    760 
    761             if (j == -1) {
    762                 //use first
    763                 resampled.get(i).assign(rec.get(0), timebase.get(i).time);
    764             } else if (j == rec.size()-1) {
    765                 // use last
    766                 resampled.get(i).assign(rec.get(j), timebase.get(i).time);
    767             } else {
    768                 // do linear resample
    769                 double alpha = (time - rec.get(j).time)/((rec.get(j+1).time - rec.get(j).time));
    770                 double roll = (1-alpha) * rec.get(j).roll + alpha * rec.get(j+1).roll;
    771                 double pitch = (1-alpha) * rec.get(j).pitch + alpha * rec.get(j+1).pitch;
    772                 double yaw = (1-alpha) * rec.get(j).yaw + alpha * rec.get(j+1).yaw + yaw_offset;
    773                 resampled.get(i).set(timebase.get(i).time, roll, pitch, yaw);
    774             }
    775         }
    776     }
    777 
    778     /**
    779      * Analyze video frames using computer vision approach and generate a ArrayList<AttitudeRec>
    780      *
    781      * @param recs  output ArrayList of AttitudeRec
    782      * @return total number of frame of the video
    783      */
    784     private int analyzeVideo(ArrayList<AttitudeRec> recs) {
    785         VideoMetaInfo meta = new VideoMetaInfo(new File(mPath, "videometa.json"));
    786 
    787         int decimation = 1;
    788         boolean use_timestamp = true;
    789 
    790         // roughly determine if decimation is necessary
    791         if (meta.fps > DECIMATION_FPS_TARGET) {
    792             decimation = (int)(meta.fps / DECIMATION_FPS_TARGET);
    793             meta.fps /=decimation;
    794         }
    795 
    796         VideoDecoderForOpenCV videoDecoder = new VideoDecoderForOpenCV(
    797                 new File(mPath, "video.mp4"), decimation);
    798 
    799 
    800         Mat frame;
    801         Mat gray = new Mat();
    802         int i = -1;
    803 
    804         Size frameSize = videoDecoder.getSize();
    805 
    806         if (frameSize.width != meta.frameWidth || frameSize.height != meta.frameHeight) {
    807             // this is very unlikely
    808             return -1;
    809         }
    810 
    811         if (TRACE_VIDEO_ANALYSIS) {
    812             Debug.startMethodTracing("cvprocess");
    813         }
    814 
    815         final int patternWidth = 4;
    816         final int patternHeight = 11;
    817         Size patternSize = new Size(patternWidth, patternHeight);
    818 
    819         float fc = (float)(meta.frameWidth/2.0/Math.tan(meta.fovWidth/2.0));
    820         Mat camMat = cameraMatrix(fc, new Size(frameSize.width/2, frameSize.height/2));
    821         MatOfDouble coeff = new MatOfDouble(); // dummy
    822 
    823         MatOfPoint2f centers = new MatOfPoint2f();
    824         MatOfPoint3f grid = asymmetricalCircleGrid(patternSize);
    825         Mat rvec = new MatOfFloat();
    826         Mat tvec = new MatOfFloat();
    827 
    828         MatOfPoint2f reprojCenters = new MatOfPoint2f();
    829 
    830         if (LOCAL_LOGV) {
    831             Log.v(TAG, "Camera Mat = \n" + camMat.dump());
    832         }
    833 
    834         long startTime = System.nanoTime();
    835         long [] ts = new long[1];
    836 
    837         while ((frame = videoDecoder.getFrame(ts)) !=null) {
    838             if (LOCAL_LOGV) {
    839                 Log.v(TAG, "got a frame " + i);
    840             }
    841 
    842             if (use_timestamp && ts[0] == -1) {
    843                 use_timestamp = false;
    844             }
    845 
    846             // has to be in front, as there are cases where execution
    847             // will skip the later part of this while
    848             i++;
    849 
    850             // convert to gray manually as by default findCirclesGridDefault uses COLOR_BGR2GRAY
    851             Imgproc.cvtColor(frame, gray, Imgproc.COLOR_RGB2GRAY);
    852 
    853             boolean foundPattern = Calib3d.findCirclesGrid(
    854                     gray,  patternSize, centers, Calib3d.CALIB_CB_ASYMMETRIC_GRID);
    855 
    856             if (!foundPattern) {
    857                 // skip to next frame
    858                 continue;
    859             }
    860 
    861             if (OUTPUT_DEBUG_IMAGE) {
    862                 Calib3d.drawChessboardCorners(frame, patternSize, centers, true);
    863             }
    864 
    865             // figure out the extrinsic parameters using real ground truth 3D points and the pixel
    866             // position of blobs found in findCircleGrid, an estimated camera matrix and
    867             // no-distortion are assumed.
    868             boolean foundSolution =
    869                     Calib3d.solvePnP(grid, centers, camMat, coeff, rvec, tvec,
    870                             false, Calib3d.CV_ITERATIVE);
    871 
    872             if (!foundSolution) {
    873                 // skip to next frame
    874                 if (LOCAL_LOGV) {
    875                     Log.v(TAG, "cannot find pnp solution in frame " + i + ", skipped.");
    876                 }
    877                 continue;
    878             }
    879 
    880             // reproject points to for evaluation of result accuracy of solvePnP
    881             Calib3d.projectPoints(grid, rvec, tvec, camMat, coeff, reprojCenters);
    882 
    883             // Calculate the average distance between opposite corners of the pattern in pixels
    884             Point[] centerPoints = centers.toArray();
    885             Point bottomLeftPos = centerPoints[0];
    886             Point bottomRightPos = centerPoints[patternWidth - 1];
    887             Point topLeftPos = centerPoints[(patternHeight * patternWidth) - patternWidth];
    888             Point topRightPos = centerPoints[(patternHeight * patternWidth) - 1];
    889             double avgPixelDist = (getDistanceBetweenPoints(bottomLeftPos, topRightPos)
    890                     + getDistanceBetweenPoints(bottomRightPos, topLeftPos)) / 2;
    891 
    892             // Calculate the average pixel error between the circle centers from the video and the
    893             // reprojected circle centers based on the estimated camera position. The error provides
    894             // a way to estimate how accurate the assumed test device's position is. If the error
    895             // is high, then the frame should be discarded to prevent an inaccurate test device's
    896             // position from being compared against the rotation vector sample at that time.
    897             Point[] reprojectedPointsArray = reprojCenters.toArray();
    898             double avgCenterError = 0.0;
    899             for (int curCenter = 0; curCenter < reprojectedPointsArray.length; curCenter++) {
    900                 avgCenterError += getDistanceBetweenPoints(
    901                         reprojectedPointsArray[curCenter], centerPoints[curCenter]);
    902             }
    903             avgCenterError /= reprojectedPointsArray.length;
    904 
    905             if (LOCAL_LOGV) {
    906                 Log.v(TAG, "Found attitude, re-projection error = " + avgCenterError);
    907             }
    908 
    909             // if error is reasonable, add it into the results. Use a dynamic threshold based on
    910             // the pixel distance of opposite corners of the pattern to prevent higher resolution
    911             // video or the distance between the camera and the test pattern from impacting the test
    912             if (avgCenterError < REPROJECTION_THRESHOLD_RATIO * avgPixelDist) {
    913                 double [] rv = new double[3];
    914                 double timestamp;
    915 
    916                 rvec.get(0,0, rv);
    917                 if (use_timestamp) {
    918                     timestamp = (double)ts[0] / 1e6;
    919                 } else {
    920                     timestamp = (double) i / meta.fps;
    921                 }
    922                 if (LOCAL_LOGV) Log.v(TAG, String.format("Added frame %d  ts = %f", i, timestamp));
    923                 recs.add(new AttitudeRec(timestamp, rodr2rpy(rv)));
    924             }
    925 
    926             if (OUTPUT_DEBUG_IMAGE) {
    927                 Calib3d.drawChessboardCorners(frame, patternSize, reprojCenters, true);
    928                 Imgcodecs.imwrite(mPath + "/RVCVRecData/DebugCV/img" + i + ".png", frame);
    929             }
    930         }
    931 
    932         if (LOCAL_LOGV) {
    933             Log.v(TAG, "Finished decoding");
    934         }
    935 
    936         if (TRACE_VIDEO_ANALYSIS) {
    937             Debug.stopMethodTracing();
    938         }
    939 
    940         if (LOCAL_LOGV) {
    941             // time analysis
    942             double totalTime = (System.nanoTime()-startTime)/1e9;
    943             Log.i(TAG, "Total time: "+totalTime +"s, Per frame time: "+totalTime/i );
    944         }
    945         return i;
    946     }
    947 
    948     /**
    949      * OpenCV for Android have not support the VideoCapture from file
    950      * This is a make shift solution before it is supported.
    951      * One issue right now is that the glReadPixels is quite slow .. around 6.5ms for a 720p frame
    952      */
    953     private class VideoDecoderForOpenCV implements Runnable {
    954         static final String TAG = "VideoDecoderForOpenCV";
    955 
    956         private MediaExtractor extractor=null;
    957         private MediaCodec decoder=null;
    958         private CtsMediaOutputSurface surface=null;
    959 
    960         private MatBuffer mMatBuffer;
    961 
    962         private final File mVideoFile;
    963 
    964         private boolean valid;
    965         private Object setupSignal;
    966 
    967         private Thread mThread;
    968         private int mDecimation;
    969 
    970         /**
    971          * Constructor
    972          * @param file video file
    973          * @param decimation process every "decimation" number of frame
    974          */
    975         VideoDecoderForOpenCV(File file, int decimation) {
    976             mVideoFile = file;
    977             mDecimation = decimation;
    978             valid = false;
    979 
    980             start();
    981         }
    982 
    983         /**
    984          * Constructor
    985          * @param file video file
    986          */
    987         VideoDecoderForOpenCV(File file)   {
    988             this(file, 1);
    989         }
    990 
    991         /**
    992          * Test if video decoder is in valid states ready to output video.
    993          * @return true of force.
    994          */
    995         public boolean isValid() {
    996             return valid;
    997         }
    998 
    999         private void start() {
   1000             setupSignal = new Object();
   1001             mThread = new Thread(this);
   1002             mThread.start();
   1003 
   1004             synchronized (setupSignal) {
   1005                 try {
   1006                     setupSignal.wait();
   1007                 } catch (InterruptedException e) {
   1008                     Log.e(TAG, "Interrupted when waiting for video decoder setup ready");
   1009                 }
   1010             }
   1011         }
   1012         private void stop() {
   1013             if (mThread != null) {
   1014                 mThread.interrupt();
   1015                 try {
   1016                     mThread.join();
   1017                 } catch (InterruptedException e) {
   1018                     Log.e(TAG, "Interrupted when waiting for video decoder thread to stop");
   1019                 }
   1020                 try {
   1021                     decoder.stop();
   1022                 }catch (IllegalStateException e) {
   1023                     Log.e(TAG, "Video decoder is not in a state that can be stopped");
   1024                 }
   1025             }
   1026             mThread = null;
   1027         }
   1028 
   1029         void teardown() {
   1030             if (decoder!=null) {
   1031                 decoder.release();
   1032                 decoder = null;
   1033             }
   1034             if (surface!=null) {
   1035                 surface.release();
   1036                 surface = null;
   1037             }
   1038             if (extractor!=null) {
   1039                 extractor.release();
   1040                 extractor = null;
   1041             }
   1042         }
   1043 
   1044         void setup() {
   1045             int width=0, height=0;
   1046 
   1047             extractor = new MediaExtractor();
   1048 
   1049             try {
   1050                 extractor.setDataSource(mVideoFile.getPath());
   1051             } catch (IOException e) {
   1052                 return;
   1053             }
   1054 
   1055             for (int i = 0; i < extractor.getTrackCount(); i++) {
   1056                 MediaFormat format = extractor.getTrackFormat(i);
   1057                 String mime = format.getString(MediaFormat.KEY_MIME);
   1058                 width = format.getInteger(MediaFormat.KEY_WIDTH);
   1059                 height = format.getInteger(MediaFormat.KEY_HEIGHT);
   1060 
   1061                 if (mime.startsWith("video/")) {
   1062                     extractor.selectTrack(i);
   1063                     try {
   1064                         decoder = MediaCodec.createDecoderByType(mime);
   1065                     }catch (IOException e) {
   1066                         continue;
   1067                     }
   1068                     // Decode to surface
   1069                     //decoder.configure(format, surface, null, 0);
   1070 
   1071                     // Decode to offscreen surface
   1072                     surface = new CtsMediaOutputSurface(width, height);
   1073                     mMatBuffer = new MatBuffer(width, height);
   1074 
   1075                     decoder.configure(format, surface.getSurface(), null, 0);
   1076                     break;
   1077                 }
   1078             }
   1079 
   1080             if (decoder == null) {
   1081                 Log.e(TAG, "Can't find video info!");
   1082                 return;
   1083             }
   1084             valid = true;
   1085         }
   1086 
   1087         @Override
   1088         public void run() {
   1089             setup();
   1090 
   1091             synchronized (setupSignal) {
   1092                 setupSignal.notify();
   1093             }
   1094 
   1095             if (!valid) {
   1096                 return;
   1097             }
   1098 
   1099             decoder.start();
   1100 
   1101             ByteBuffer[] inputBuffers = decoder.getInputBuffers();
   1102             ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
   1103             MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
   1104 
   1105             boolean isEOS = false;
   1106             long startMs = System.currentTimeMillis();
   1107             long timeoutUs = 10000;
   1108 
   1109             int iframe = 0;
   1110             long frameTimestamp = 0;
   1111 
   1112             while (!Thread.interrupted()) {
   1113                 if (!isEOS) {
   1114                     int inIndex = decoder.dequeueInputBuffer(10000);
   1115                     if (inIndex >= 0) {
   1116                         ByteBuffer buffer = inputBuffers[inIndex];
   1117                         int sampleSize = extractor.readSampleData(buffer, 0);
   1118                         if (sampleSize < 0) {
   1119                             if (LOCAL_LOGD) {
   1120                                 Log.d("VideoDecoderForOpenCV",
   1121                                         "InputBuffer BUFFER_FLAG_END_OF_STREAM");
   1122                             }
   1123                             decoder.queueInputBuffer(inIndex, 0, 0, 0,
   1124                                     MediaCodec.BUFFER_FLAG_END_OF_STREAM);
   1125                             isEOS = true;
   1126                         } else {
   1127                             frameTimestamp = extractor.getSampleTime();
   1128                             decoder.queueInputBuffer(inIndex, 0, sampleSize, frameTimestamp, 0);
   1129                             if (LOCAL_LOGD) {
   1130                                 Log.d(TAG, String.format("Frame %d sample time %f s",
   1131                                             iframe, (double)frameTimestamp/1e6));
   1132                             }
   1133                             extractor.advance();
   1134                         }
   1135                     }
   1136                 }
   1137 
   1138                 int outIndex = decoder.dequeueOutputBuffer(info, 10000);
   1139                 MediaFormat outFormat;
   1140                 switch (outIndex) {
   1141                     case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
   1142                         if (LOCAL_LOGD) {
   1143                             Log.d(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
   1144                         }
   1145                         outputBuffers = decoder.getOutputBuffers();
   1146                         break;
   1147                     case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
   1148                         outFormat = decoder.getOutputFormat();
   1149                         if (LOCAL_LOGD) {
   1150                             Log.d(TAG, "New format " + outFormat);
   1151                         }
   1152                         break;
   1153                     case MediaCodec.INFO_TRY_AGAIN_LATER:
   1154                         if (LOCAL_LOGD) {
   1155                             Log.d(TAG, "dequeueOutputBuffer timed out!");
   1156                         }
   1157                         break;
   1158                     default:
   1159 
   1160                         ByteBuffer buffer = outputBuffers[outIndex];
   1161                         boolean doRender = (info.size != 0);
   1162 
   1163                         // As soon as we call releaseOutputBuffer, the buffer will be forwarded
   1164                         // to SurfaceTexture to convert to a texture.  The API doesn't
   1165                         // guarantee that the texture will be available before the call
   1166                         // returns, so we need to wait for the onFrameAvailable callback to
   1167                         // fire.  If we don't wait, we risk rendering from the previous frame.
   1168                         decoder.releaseOutputBuffer(outIndex, doRender);
   1169 
   1170                         if (doRender) {
   1171                             surface.awaitNewImage();
   1172                             surface.drawImage();
   1173                             if (LOCAL_LOGV) {
   1174                                 Log.v(TAG, "Finish drawing a frame!");
   1175                             }
   1176                             if ((iframe++ % mDecimation) == 0) {
   1177                                 //Send the frame for processing
   1178                                 mMatBuffer.put(frameTimestamp);
   1179                             }
   1180                         }
   1181                         break;
   1182                 }
   1183 
   1184                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
   1185                     if (LOCAL_LOGD) {
   1186                         Log.d("VideoDecoderForOpenCV", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
   1187                     }
   1188                     break;
   1189                 }
   1190             }
   1191             mMatBuffer.invalidate();
   1192 
   1193             decoder.stop();
   1194 
   1195             teardown();
   1196             mThread = null;
   1197         }
   1198 
   1199 
   1200         /**
   1201          * Get next valid frame
   1202          * @return Frame in OpenCV mat
   1203          */
   1204         public Mat getFrame(long ts[]) {
   1205             return mMatBuffer.get(ts);
   1206         }
   1207 
   1208         /**
   1209          * Get the size of the frame
   1210          * @return size of the frame
   1211          */
   1212         Size getSize() {
   1213             return mMatBuffer.getSize();
   1214         }
   1215 
   1216         /**
   1217          * A synchronized buffer
   1218          */
   1219         class MatBuffer {
   1220             private Mat mat;
   1221             private byte[] bytes;
   1222             private ByteBuffer buf;
   1223             private long timestamp;
   1224             private boolean full;
   1225 
   1226             private int mWidth, mHeight;
   1227             private boolean mValid = false;
   1228 
   1229             MatBuffer(int width, int height) {
   1230                 mWidth = width;
   1231                 mHeight = height;
   1232 
   1233                 mat = new Mat(height, width, CvType.CV_8UC4); //RGBA
   1234                 buf = ByteBuffer.allocateDirect(width*height*4);
   1235                 bytes = new byte[width*height*4];
   1236                 timestamp = -1;
   1237 
   1238                 mValid = true;
   1239                 full = false;
   1240             }
   1241 
   1242             public synchronized void invalidate() {
   1243                 mValid = false;
   1244                 notifyAll();
   1245             }
   1246 
   1247             public synchronized Mat get(long ts[]) {
   1248 
   1249                 if (!mValid) return null;
   1250                 while (full == false) {
   1251                     try {
   1252                         wait();
   1253                         if (!mValid) return null;
   1254                     } catch (InterruptedException e) {
   1255                         return null;
   1256                     }
   1257                 }
   1258                 mat.put(0,0, bytes);
   1259                 full = false;
   1260                 notifyAll();
   1261                 ts[0] = timestamp;
   1262                 return mat;
   1263             }
   1264 
   1265             public synchronized void put(long ts) {
   1266                 while (full) {
   1267                     try {
   1268                         wait();
   1269                     } catch (InterruptedException e) {
   1270                         Log.e(TAG, "Interrupted when waiting for space in buffer");
   1271                     }
   1272                 }
   1273                 GLES20.glReadPixels(0, 0, mWidth, mHeight, GL10.GL_RGBA,
   1274                         GL10.GL_UNSIGNED_BYTE, buf);
   1275                 buf.get(bytes);
   1276                 buf.rewind();
   1277 
   1278                 timestamp = ts;
   1279                 full = true;
   1280                 notifyAll();
   1281             }
   1282 
   1283             public Size getSize() {
   1284                 if (valid) {
   1285                     return mat.size();
   1286                 }
   1287                 return new Size();
   1288             }
   1289         }
   1290     }
   1291 
   1292 
   1293     /* a small set of math functions */
   1294     private static double [] quat2rpy( double [] q) {
   1295         double [] rpy = {Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2])),
   1296                 Math.asin(2*(q[0]*q[2] - q[3]*q[1])),
   1297                 Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3]))};
   1298         return rpy;
   1299     }
   1300 
   1301     private static void quat2rpy( double [] q, double[] rpy) {
   1302         rpy[0] = Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2]));
   1303         rpy[1] = Math.asin(2*(q[0]*q[2] - q[3]*q[1]));
   1304         rpy[2] = Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3]));
   1305     }
   1306 
   1307     private static Mat quat2rpy(Mat quat) {
   1308         double [] q = new double[4];
   1309         quat.get(0,0,q);
   1310 
   1311         double [] rpy = {Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2])),
   1312                 Math.asin(2*(q[0]*q[2] - q[3]*q[1])),
   1313                 Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3]))};
   1314 
   1315         Mat rpym = new Mat(3,1, CvType.CV_64F);
   1316         rpym.put(0,0, rpy);
   1317         return rpym;
   1318     }
   1319 
   1320     private static double [] rodr2quat( double [] r) {
   1321         double t = Math.sqrt(r[0]*r[0]+r[1]*r[1]+r[2]*r[2]);
   1322         double [] quat = {Math.cos(t/2), Math.sin(t/2)*r[0]/t,Math.sin(t/2)*r[1]/t,
   1323                 Math.sin(t/2)*r[2]/t};
   1324         return quat;
   1325     }
   1326 
   1327     private static void rodr2quat( double [] r, double [] quat) {
   1328         double t = Math.sqrt(r[0]*r[0]+r[1]*r[1]+r[2]*r[2]);
   1329         quat[0] = Math.cos(t/2);
   1330         quat[1] = Math.sin(t/2)*r[0]/t;
   1331         quat[2] = Math.sin(t/2)*r[1]/t;
   1332         quat[3] = Math.sin(t/2)*r[2]/t;
   1333     }
   1334 
   1335     private static Mat rodr2quat(Mat rodr) {
   1336         double t = Core.norm(rodr);
   1337         double [] r = new double[3];
   1338         rodr.get(0,0,r);
   1339 
   1340         double [] quat = {Math.cos(t/2), Math.sin(t/2)*r[0]/t,Math.sin(t/2)*r[1]/t,
   1341                 Math.sin(t/2)*r[2]/t};
   1342         Mat quatm = new Mat(4,1, CvType.CV_64F);
   1343         quatm.put(0, 0, quat);
   1344         return quatm;
   1345     }
   1346 
   1347     private static double [] rodr2rpy( double [] r) {
   1348         return quat2rpy(rodr2quat(r));
   1349     }
   1350 
   1351     private double getDistanceBetweenPoints(Point a, Point b) {
   1352         return Math.sqrt(Math.pow(a.x - b.x, 2) + Math.pow(a.y - b.y, 2));
   1353     }
   1354     //////////////////
   1355 
   1356 }
   1357