Home | History | Annotate | Download | only in test
      1 /*
      2  * Copyright (C) 2007 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package android.test;
     18 
     19 import com.android.internal.util.Predicate;
     20 import com.android.internal.util.Predicates;
     21 
     22 import android.app.Activity;
     23 import android.app.Instrumentation;
     24 import android.os.Bundle;
     25 import android.os.Debug;
     26 import android.os.Looper;
     27 import android.os.Parcelable;
     28 import android.os.PerformanceCollector;
     29 import android.os.PerformanceCollector.PerformanceResultsWriter;
     30 import android.test.suitebuilder.TestMethod;
     31 import android.test.suitebuilder.TestPredicates;
     32 import android.test.suitebuilder.TestSuiteBuilder;
     33 import android.test.suitebuilder.annotation.HasAnnotation;
     34 import android.test.suitebuilder.annotation.LargeTest;
     35 import android.util.Log;
     36 
     37 import java.io.ByteArrayOutputStream;
     38 import java.io.File;
     39 import java.io.PrintStream;
     40 import java.lang.annotation.Annotation;
     41 import java.lang.reflect.InvocationTargetException;
     42 import java.lang.reflect.Method;
     43 import java.util.ArrayList;
     44 import java.util.List;
     45 
     46 import junit.framework.AssertionFailedError;
     47 import junit.framework.Test;
     48 import junit.framework.TestCase;
     49 import junit.framework.TestListener;
     50 import junit.framework.TestResult;
     51 import junit.framework.TestSuite;
     52 import junit.runner.BaseTestRunner;
     53 import junit.textui.ResultPrinter;
     54 
     55 /**
     56  * An {@link Instrumentation} that runs various types of {@link junit.framework.TestCase}s against
     57  * an Android package (application).
     58  *
     59  * <div class="special reference">
     60  * <h3>Developer Guides</h3>
     61  * <p>For more information about application testing, read the
     62  * <a href="{@docRoot}guide/topics/testing/index.html">Testing</a> developer guide.</p>
     63  * </div>
     64  *
     65  * <h3>Typical Usage</h3>
     66  * <ol>
     67  * <li>Write {@link junit.framework.TestCase}s that perform unit, functional, or performance tests
     68  * against the classes in your package.  Typically these are subclassed from:
     69  *   <ul><li>{@link android.test.ActivityInstrumentationTestCase2}</li>
     70  *   <li>{@link android.test.ActivityUnitTestCase}</li>
     71  *   <li>{@link android.test.AndroidTestCase}</li>
     72  *   <li>{@link android.test.ApplicationTestCase}</li>
     73  *   <li>{@link android.test.InstrumentationTestCase}</li>
     74  *   <li>{@link android.test.ProviderTestCase}</li>
     75  *   <li>{@link android.test.ServiceTestCase}</li>
     76  *   <li>{@link android.test.SingleLaunchActivityTestCase}</li></ul>
     77  *   <li>In an appropriate AndroidManifest.xml, define the this instrumentation with
     78  * the appropriate android:targetPackage set.
     79  * <li>Run the instrumentation using "adb shell am instrument -w",
     80  * with no optional arguments, to run all tests (except performance tests).
     81  * <li>Run the instrumentation using "adb shell am instrument -w",
     82  * with the argument '-e func true' to run all functional tests. These are tests that derive from
     83  * {@link android.test.InstrumentationTestCase}.
     84  * <li>Run the instrumentation using "adb shell am instrument -w",
     85  * with the argument '-e unit true' to run all unit tests. These are tests that <i>do not</i>derive
     86  * from {@link android.test.InstrumentationTestCase} (and are not performance tests).
     87  * <li>Run the instrumentation using "adb shell am instrument -w",
     88  * with the argument '-e class' set to run an individual {@link junit.framework.TestCase}.
     89  * </ol>
     90  * <p/>
     91  * <b>Running all tests:</b> adb shell am instrument -w
     92  * com.android.foo/android.test.InstrumentationTestRunner
     93  * <p/>
     94  * <b>Running all small tests:</b> adb shell am instrument -w
     95  * -e size small
     96  * com.android.foo/android.test.InstrumentationTestRunner
     97  * <p/>
     98  * <b>Running all medium tests:</b> adb shell am instrument -w
     99  * -e size medium
    100  * com.android.foo/android.test.InstrumentationTestRunner
    101  * <p/>
    102  * <b>Running all large tests:</b> adb shell am instrument -w
    103  * -e size large
    104  * com.android.foo/android.test.InstrumentationTestRunner
    105  * <p/>
    106  * <b>Filter test run to tests with given annotation:</b> adb shell am instrument -w
    107  * -e annotation com.android.foo.MyAnnotation
    108  * com.android.foo/android.test.InstrumentationTestRunner
    109  * <p/>
    110  * If used with other options, the resulting test run will contain the union of the two options.
    111  * e.g. "-e size large -e annotation com.android.foo.MyAnnotation" will run only tests with both
    112  * the {@link LargeTest} and "com.android.foo.MyAnnotation" annotations.
    113  * <p/>
    114  * <b>Filter test run to tests <i>without</i> given annotation:</b> adb shell am instrument -w
    115  * -e notAnnotation com.android.foo.MyAnnotation
    116  * com.android.foo/android.test.InstrumentationTestRunner
    117  * <p/>
    118  * <b>Running a single testcase:</b> adb shell am instrument -w
    119  * -e class com.android.foo.FooTest
    120  * com.android.foo/android.test.InstrumentationTestRunner
    121  * <p/>
    122  * <b>Running a single test:</b> adb shell am instrument -w
    123  * -e class com.android.foo.FooTest#testFoo
    124  * com.android.foo/android.test.InstrumentationTestRunner
    125  * <p/>
    126  * <b>Running multiple tests:</b> adb shell am instrument -w
    127  * -e class com.android.foo.FooTest,com.android.foo.TooTest
    128  * com.android.foo/android.test.InstrumentationTestRunner
    129  * <p/>
    130  * <b>Running all tests in a java package:</b> adb shell am instrument -w
    131  * -e package com.android.foo.subpkg
    132  *  com.android.foo/android.test.InstrumentationTestRunner
    133  * <p/>
    134  * <b>Including performance tests:</b> adb shell am instrument -w
    135  * -e perf true
    136  * com.android.foo/android.test.InstrumentationTestRunner
    137  * <p/>
    138  * <b>To debug your tests, set a break point in your code and pass:</b>
    139  * -e debug true
    140  * <p/>
    141  * <b>To run in 'log only' mode</b>
    142  * -e log true
    143  * This option will load and iterate through all test classes and methods, but will bypass actual
    144  * test execution. Useful for quickly obtaining info on the tests to be executed by an
    145  * instrumentation command.
    146  * <p/>
    147  * <b>To generate EMMA code coverage:</b>
    148  * -e coverage true
    149  * Note: this requires an emma instrumented build. By default, the code coverage results file
    150  * will be saved in a /data/<app>/coverage.ec file, unless overridden by coverageFile flag (see
    151  * below)
    152  * <p/>
    153  * <b> To specify EMMA code coverage results file path:</b>
    154  * -e coverageFile /sdcard/myFile.ec
    155  * <br/>
    156  * in addition to the other arguments.
    157  */
    158 
    159 /* (not JavaDoc)
    160  * Although not necessary in most case, another way to use this class is to extend it and have the
    161  * derived class return the desired test suite from the {@link #getTestSuite()} method. The test
    162  * suite returned from this method will be used if no target class is defined in the meta-data or
    163  * command line argument parameters. If a derived class is used it needs to be added as an
    164  * instrumentation to the AndroidManifest.xml and the command to run it would look like:
    165  * <p/>
    166  * adb shell am instrument -w com.android.foo/<i>com.android.FooInstrumentationTestRunner</i>
    167  * <p/>
    168  * Where <i>com.android.FooInstrumentationTestRunner</i> is the derived class.
    169  *
    170  * This model is used by many existing app tests, but can probably be deprecated.
    171  */
    172 public class InstrumentationTestRunner extends Instrumentation implements TestSuiteProvider {
    173 
    174     /** @hide */
    175     public static final String ARGUMENT_TEST_CLASS = "class";
    176     /** @hide */
    177     public static final String ARGUMENT_TEST_PACKAGE = "package";
    178     /** @hide */
    179     public static final String ARGUMENT_TEST_SIZE_PREDICATE = "size";
    180     /** @hide */
    181     public static final String ARGUMENT_DELAY_MSEC = "delay_msec";
    182 
    183     private static final String SMALL_SUITE = "small";
    184     private static final String MEDIUM_SUITE = "medium";
    185     private static final String LARGE_SUITE = "large";
    186 
    187     private static final String ARGUMENT_LOG_ONLY = "log";
    188     /** @hide */
    189     static final String ARGUMENT_ANNOTATION = "annotation";
    190     /** @hide */
    191     static final String ARGUMENT_NOT_ANNOTATION = "notAnnotation";
    192 
    193     /**
    194      * This constant defines the maximum allowed runtime (in ms) for a test included in the "small"
    195      * suite. It is used to make an educated guess at what suite an unlabeled test belongs.
    196      */
    197     private static final float SMALL_SUITE_MAX_RUNTIME = 100;
    198 
    199     /**
    200      * This constant defines the maximum allowed runtime (in ms) for a test included in the
    201      * "medium" suite. It is used to make an educated guess at what suite an unlabeled test belongs.
    202      */
    203     private static final float MEDIUM_SUITE_MAX_RUNTIME = 1000;
    204 
    205     /**
    206      * The following keys are used in the status bundle to provide structured reports to
    207      * an IInstrumentationWatcher.
    208      */
    209 
    210     /**
    211      * This value, if stored with key {@link android.app.Instrumentation#REPORT_KEY_IDENTIFIER},
    212      * identifies InstrumentationTestRunner as the source of the report.  This is sent with all
    213      * status messages.
    214      */
    215     public static final String REPORT_VALUE_ID = "InstrumentationTestRunner";
    216     /**
    217      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
    218      * identifies the total number of tests that are being run.  This is sent with all status
    219      * messages.
    220      */
    221     public static final String REPORT_KEY_NUM_TOTAL = "numtests";
    222     /**
    223      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
    224      * identifies the sequence number of the current test.  This is sent with any status message
    225      * describing a specific test being started or completed.
    226      */
    227     public static final String REPORT_KEY_NUM_CURRENT = "current";
    228     /**
    229      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
    230      * identifies the name of the current test class.  This is sent with any status message
    231      * describing a specific test being started or completed.
    232      */
    233     public static final String REPORT_KEY_NAME_CLASS = "class";
    234     /**
    235      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
    236      * identifies the name of the current test.  This is sent with any status message
    237      * describing a specific test being started or completed.
    238      */
    239     public static final String REPORT_KEY_NAME_TEST = "test";
    240     /**
    241      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
    242      * reports the run time in seconds of the current test.
    243      */
    244     private static final String REPORT_KEY_RUN_TIME = "runtime";
    245     /**
    246      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
    247      * reports the number of total iterations of the current test.
    248      */
    249     private static final String REPORT_KEY_NUM_ITERATIONS = "numiterations";
    250     /**
    251      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
    252      * reports the guessed suite assignment for the current test.
    253      */
    254     private static final String REPORT_KEY_SUITE_ASSIGNMENT = "suiteassignment";
    255     /**
    256      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
    257      * identifies the path to the generated code coverage file.
    258      */
    259     private static final String REPORT_KEY_COVERAGE_PATH = "coverageFilePath";
    260 
    261     /**
    262      * The test is starting.
    263      */
    264     public static final int REPORT_VALUE_RESULT_START = 1;
    265     /**
    266      * The test completed successfully.
    267      */
    268     public static final int REPORT_VALUE_RESULT_OK = 0;
    269     /**
    270      * The test completed with an error.
    271      */
    272     public static final int REPORT_VALUE_RESULT_ERROR = -1;
    273     /**
    274      * The test completed with a failure.
    275      */
    276     public static final int REPORT_VALUE_RESULT_FAILURE = -2;
    277     /**
    278      * If included in the status bundle sent to an IInstrumentationWatcher, this key
    279      * identifies a stack trace describing an error or failure.  This is sent with any status
    280      * message describing a specific test being completed.
    281      */
    282     public static final String REPORT_KEY_STACK = "stack";
    283 
    284     // Default file name for code coverage
    285     private static final String DEFAULT_COVERAGE_FILE_NAME = "coverage.ec";
    286 
    287     private static final String LOG_TAG = "InstrumentationTestRunner";
    288 
    289     private final Bundle mResults = new Bundle();
    290     private Bundle mArguments;
    291     private AndroidTestRunner mTestRunner;
    292     private boolean mDebug;
    293     private boolean mJustCount;
    294     private boolean mSuiteAssignmentMode;
    295     private int mTestCount;
    296     private String mPackageOfTests;
    297     private boolean mCoverage;
    298     private String mCoverageFilePath;
    299     private int mDelayMsec;
    300 
    301     @Override
    302     public void onCreate(Bundle arguments) {
    303         super.onCreate(arguments);
    304         mArguments = arguments;
    305 
    306         // Apk paths used to search for test classes when using TestSuiteBuilders.
    307         String[] apkPaths =
    308                 {getTargetContext().getPackageCodePath(), getContext().getPackageCodePath()};
    309         ClassPathPackageInfoSource.setApkPaths(apkPaths);
    310 
    311         Predicate<TestMethod> testSizePredicate = null;
    312         Predicate<TestMethod> testAnnotationPredicate = null;
    313         Predicate<TestMethod> testNotAnnotationPredicate = null;
    314         String testClassesArg = null;
    315         boolean logOnly = false;
    316 
    317         if (arguments != null) {
    318             // Test class name passed as an argument should override any meta-data declaration.
    319             testClassesArg = arguments.getString(ARGUMENT_TEST_CLASS);
    320             mDebug = getBooleanArgument(arguments, "debug");
    321             mJustCount = getBooleanArgument(arguments, "count");
    322             mSuiteAssignmentMode = getBooleanArgument(arguments, "suiteAssignment");
    323             mPackageOfTests = arguments.getString(ARGUMENT_TEST_PACKAGE);
    324             testSizePredicate = getSizePredicateFromArg(
    325                     arguments.getString(ARGUMENT_TEST_SIZE_PREDICATE));
    326             testAnnotationPredicate = getAnnotationPredicate(
    327                     arguments.getString(ARGUMENT_ANNOTATION));
    328             testNotAnnotationPredicate = getNotAnnotationPredicate(
    329                     arguments.getString(ARGUMENT_NOT_ANNOTATION));
    330 
    331             logOnly = getBooleanArgument(arguments, ARGUMENT_LOG_ONLY);
    332             mCoverage = getBooleanArgument(arguments, "coverage");
    333             mCoverageFilePath = arguments.getString("coverageFile");
    334 
    335             try {
    336                 Object delay = arguments.get(ARGUMENT_DELAY_MSEC);  // Accept either string or int
    337                 if (delay != null) mDelayMsec = Integer.parseInt(delay.toString());
    338             } catch (NumberFormatException e) {
    339                 Log.e(LOG_TAG, "Invalid delay_msec parameter", e);
    340             }
    341         }
    342 
    343         TestSuiteBuilder testSuiteBuilder = new TestSuiteBuilder(getClass().getName(),
    344                 getTargetContext().getClassLoader());
    345 
    346         if (testSizePredicate != null) {
    347             testSuiteBuilder.addRequirements(testSizePredicate);
    348         }
    349         if (testAnnotationPredicate != null) {
    350             testSuiteBuilder.addRequirements(testAnnotationPredicate);
    351         }
    352         if (testNotAnnotationPredicate != null) {
    353             testSuiteBuilder.addRequirements(testNotAnnotationPredicate);
    354         }
    355 
    356         if (testClassesArg == null) {
    357             if (mPackageOfTests != null) {
    358                 testSuiteBuilder.includePackages(mPackageOfTests);
    359             } else {
    360                 TestSuite testSuite = getTestSuite();
    361                 if (testSuite != null) {
    362                     testSuiteBuilder.addTestSuite(testSuite);
    363                 } else {
    364                     // no package or class bundle arguments were supplied, and no test suite
    365                     // provided so add all tests in application
    366                     testSuiteBuilder.includePackages("");
    367                 }
    368             }
    369         } else {
    370             parseTestClasses(testClassesArg, testSuiteBuilder);
    371         }
    372 
    373         testSuiteBuilder.addRequirements(getBuilderRequirements());
    374 
    375         mTestRunner = getAndroidTestRunner();
    376         mTestRunner.setContext(getTargetContext());
    377         mTestRunner.setInstrumentation(this);
    378         mTestRunner.setSkipExecution(logOnly);
    379         mTestRunner.setTest(testSuiteBuilder.build());
    380         mTestCount = mTestRunner.getTestCases().size();
    381         if (mSuiteAssignmentMode) {
    382             mTestRunner.addTestListener(new SuiteAssignmentPrinter());
    383         } else {
    384             WatcherResultPrinter resultPrinter = new WatcherResultPrinter(mTestCount);
    385             mTestRunner.addTestListener(new TestPrinter("TestRunner", false));
    386             mTestRunner.addTestListener(resultPrinter);
    387             mTestRunner.setPerformanceResultsWriter(resultPrinter);
    388         }
    389         start();
    390     }
    391 
    392     /**
    393      * Get the arguments passed to this instrumentation.
    394      *
    395      * @return the Bundle object
    396      */
    397     public Bundle getArguments() {
    398         return mArguments;
    399     }
    400 
    401     /**
    402      * Add a {@link TestListener}
    403      * @hide
    404      */
    405     protected void addTestListener(TestListener listener){
    406         if(mTestRunner!=null && listener!=null){
    407             mTestRunner.addTestListener(listener);
    408         }
    409     }
    410 
    411     List<Predicate<TestMethod>> getBuilderRequirements() {
    412         return new ArrayList<Predicate<TestMethod>>();
    413     }
    414 
    415     /**
    416      * Parses and loads the specified set of test classes
    417      *
    418      * @param testClassArg - comma-separated list of test classes and methods
    419      * @param testSuiteBuilder - builder to add tests to
    420      */
    421     private void parseTestClasses(String testClassArg, TestSuiteBuilder testSuiteBuilder) {
    422         String[] testClasses = testClassArg.split(",");
    423         for (String testClass : testClasses) {
    424             parseTestClass(testClass, testSuiteBuilder);
    425         }
    426     }
    427 
    428     /**
    429      * Parse and load the given test class and, optionally, method
    430      *
    431      * @param testClassName - full package name of test class and optionally method to add.
    432      *        Expected format: com.android.TestClass#testMethod
    433      * @param testSuiteBuilder - builder to add tests to
    434      */
    435     private void parseTestClass(String testClassName, TestSuiteBuilder testSuiteBuilder) {
    436         int methodSeparatorIndex = testClassName.indexOf('#');
    437         String testMethodName = null;
    438 
    439         if (methodSeparatorIndex > 0) {
    440             testMethodName = testClassName.substring(methodSeparatorIndex + 1);
    441             testClassName = testClassName.substring(0, methodSeparatorIndex);
    442         }
    443         testSuiteBuilder.addTestClassByName(testClassName, testMethodName, getTargetContext());
    444     }
    445 
    446     protected AndroidTestRunner getAndroidTestRunner() {
    447         return new AndroidTestRunner();
    448     }
    449 
    450     private boolean getBooleanArgument(Bundle arguments, String tag) {
    451         String tagString = arguments.getString(tag);
    452         return tagString != null && Boolean.parseBoolean(tagString);
    453     }
    454 
    455     /*
    456      * Returns the size predicate object, corresponding to the "size" argument value.
    457      */
    458     private Predicate<TestMethod> getSizePredicateFromArg(String sizeArg) {
    459 
    460         if (SMALL_SUITE.equals(sizeArg)) {
    461             return TestPredicates.SELECT_SMALL;
    462         } else if (MEDIUM_SUITE.equals(sizeArg)) {
    463             return TestPredicates.SELECT_MEDIUM;
    464         } else if (LARGE_SUITE.equals(sizeArg)) {
    465             return TestPredicates.SELECT_LARGE;
    466         } else {
    467             return null;
    468         }
    469     }
    470 
    471    /**
    472     * Returns the test predicate object, corresponding to the annotation class value provided via
    473     * the {@link ARGUMENT_ANNOTATION} argument.
    474     *
    475     * @return the predicate or <code>null</code>
    476     */
    477     private Predicate<TestMethod> getAnnotationPredicate(String annotationClassName) {
    478         Class<? extends Annotation> annotationClass = getAnnotationClass(annotationClassName);
    479         if (annotationClass != null) {
    480             return new HasAnnotation(annotationClass);
    481         }
    482         return null;
    483     }
    484 
    485     /**
    486      * Returns the negative test predicate object, corresponding to the annotation class value
    487      * provided via the {@link ARGUMENT_NOT_ANNOTATION} argument.
    488      *
    489      * @return the predicate or <code>null</code>
    490      */
    491      private Predicate<TestMethod> getNotAnnotationPredicate(String annotationClassName) {
    492          Class<? extends Annotation> annotationClass = getAnnotationClass(annotationClassName);
    493          if (annotationClass != null) {
    494              return Predicates.not(new HasAnnotation(annotationClass));
    495          }
    496          return null;
    497      }
    498 
    499     /**
    500      * Helper method to return the annotation class with specified name
    501      *
    502      * @param annotationClassName the fully qualified name of the class
    503      * @return the annotation class or <code>null</code>
    504      */
    505     private Class<? extends Annotation> getAnnotationClass(String annotationClassName) {
    506         if (annotationClassName == null) {
    507             return null;
    508         }
    509         try {
    510            Class<?> annotationClass = Class.forName(annotationClassName);
    511            if (annotationClass.isAnnotation()) {
    512                return (Class<? extends Annotation>)annotationClass;
    513            } else {
    514                Log.e(LOG_TAG, String.format("Provided annotation value %s is not an Annotation",
    515                        annotationClassName));
    516            }
    517         } catch (ClassNotFoundException e) {
    518             Log.e(LOG_TAG, String.format("Could not find class for specified annotation %s",
    519                     annotationClassName));
    520         }
    521         return null;
    522     }
    523 
    524     /**
    525      * Initialize the current thread as a looper.
    526      * <p/>
    527      * Exposed for unit testing.
    528      */
    529     void prepareLooper() {
    530         Looper.prepare();
    531     }
    532 
    533     @Override
    534     public void onStart() {
    535         prepareLooper();
    536 
    537         if (mJustCount) {
    538             mResults.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID);
    539             mResults.putInt(REPORT_KEY_NUM_TOTAL, mTestCount);
    540             finish(Activity.RESULT_OK, mResults);
    541         } else {
    542             if (mDebug) {
    543                 Debug.waitForDebugger();
    544             }
    545 
    546             ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
    547             PrintStream writer = new PrintStream(byteArrayOutputStream);
    548             try {
    549                 StringResultPrinter resultPrinter = new StringResultPrinter(writer);
    550 
    551                 mTestRunner.addTestListener(resultPrinter);
    552 
    553                 long startTime = System.currentTimeMillis();
    554                 mTestRunner.runTest();
    555                 long runTime = System.currentTimeMillis() - startTime;
    556 
    557                 resultPrinter.printResult(mTestRunner.getTestResult(), runTime);
    558             } catch (Throwable t) {
    559                 // catch all exceptions so a more verbose error message can be outputted
    560                 writer.println(String.format("Test run aborted due to unexpected exception: %s",
    561                                 t.getMessage()));
    562                 t.printStackTrace(writer);
    563             } finally {
    564                 mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
    565                         String.format("\nTest results for %s=%s",
    566                         mTestRunner.getTestClassName(),
    567                         byteArrayOutputStream.toString()));
    568 
    569                 if (mCoverage) {
    570                     generateCoverageReport();
    571                 }
    572                 writer.close();
    573 
    574                 finish(Activity.RESULT_OK, mResults);
    575             }
    576         }
    577     }
    578 
    579     public TestSuite getTestSuite() {
    580         return getAllTests();
    581     }
    582 
    583     /**
    584      * Override this to define all of the tests to run in your package.
    585      */
    586     public TestSuite getAllTests() {
    587         return null;
    588     }
    589 
    590     /**
    591      * Override this to provide access to the class loader of your package.
    592      */
    593     public ClassLoader getLoader() {
    594         return null;
    595     }
    596 
    597     private void generateCoverageReport() {
    598         // use reflection to call emma dump coverage method, to avoid
    599         // always statically compiling against emma jar
    600         String coverageFilePath = getCoverageFilePath();
    601         java.io.File coverageFile = new java.io.File(coverageFilePath);
    602         try {
    603             Class<?> emmaRTClass = Class.forName("com.vladium.emma.rt.RT");
    604             Method dumpCoverageMethod = emmaRTClass.getMethod("dumpCoverageData",
    605                     coverageFile.getClass(), boolean.class, boolean.class);
    606 
    607             dumpCoverageMethod.invoke(null, coverageFile, false, false);
    608             // output path to generated coverage file so it can be parsed by a test harness if
    609             // needed
    610             mResults.putString(REPORT_KEY_COVERAGE_PATH, coverageFilePath);
    611             // also output a more user friendly msg
    612             final String currentStream = mResults.getString(
    613                     Instrumentation.REPORT_KEY_STREAMRESULT);
    614             mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
    615                 String.format("%s\nGenerated code coverage data to %s", currentStream,
    616                 coverageFilePath));
    617         } catch (ClassNotFoundException e) {
    618             reportEmmaError("Is emma jar on classpath?", e);
    619         } catch (SecurityException e) {
    620             reportEmmaError(e);
    621         } catch (NoSuchMethodException e) {
    622             reportEmmaError(e);
    623         } catch (IllegalArgumentException e) {
    624             reportEmmaError(e);
    625         } catch (IllegalAccessException e) {
    626             reportEmmaError(e);
    627         } catch (InvocationTargetException e) {
    628             reportEmmaError(e);
    629         }
    630     }
    631 
    632     private String getCoverageFilePath() {
    633         if (mCoverageFilePath == null) {
    634             return getTargetContext().getFilesDir().getAbsolutePath() + File.separator +
    635                    DEFAULT_COVERAGE_FILE_NAME;
    636         } else {
    637             return mCoverageFilePath;
    638         }
    639     }
    640 
    641     private void reportEmmaError(Exception e) {
    642         reportEmmaError("", e);
    643     }
    644 
    645     private void reportEmmaError(String hint, Exception e) {
    646         String msg = "Failed to generate emma coverage. " + hint;
    647         Log.e(LOG_TAG, msg, e);
    648         mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "\nError: " + msg);
    649     }
    650 
    651     // TODO kill this, use status() and prettyprint model for better output
    652     private class StringResultPrinter extends ResultPrinter {
    653 
    654         public StringResultPrinter(PrintStream writer) {
    655             super(writer);
    656         }
    657 
    658         public synchronized void printResult(TestResult result, long runTime) {
    659             printHeader(runTime);
    660             printFooter(result);
    661         }
    662     }
    663 
    664     /**
    665      * This class sends status reports back to the IInstrumentationWatcher about
    666      * which suite each test belongs.
    667      */
    668     private class SuiteAssignmentPrinter implements TestListener {
    669 
    670         private Bundle mTestResult;
    671         private long mStartTime;
    672         private long mEndTime;
    673         private boolean mTimingValid;
    674 
    675         public SuiteAssignmentPrinter() {
    676         }
    677 
    678         /**
    679          * send a status for the start of a each test, so long tests can be seen as "running"
    680          */
    681         public void startTest(Test test) {
    682             mTimingValid = true;
    683             mStartTime = System.currentTimeMillis();
    684         }
    685 
    686         /**
    687          * @see junit.framework.TestListener#addError(Test, Throwable)
    688          */
    689         public void addError(Test test, Throwable t) {
    690             mTimingValid = false;
    691         }
    692 
    693         /**
    694          * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError)
    695          */
    696         public void addFailure(Test test, AssertionFailedError t) {
    697             mTimingValid = false;
    698         }
    699 
    700         /**
    701          * @see junit.framework.TestListener#endTest(Test)
    702          */
    703         public void endTest(Test test) {
    704             float runTime;
    705             String assignmentSuite;
    706             mEndTime = System.currentTimeMillis();
    707             mTestResult = new Bundle();
    708 
    709             if (!mTimingValid || mStartTime < 0) {
    710                 assignmentSuite = "NA";
    711                 runTime = -1;
    712             } else {
    713                 runTime = mEndTime - mStartTime;
    714                 if (runTime < SMALL_SUITE_MAX_RUNTIME
    715                         && !InstrumentationTestCase.class.isAssignableFrom(test.getClass())) {
    716                     assignmentSuite = SMALL_SUITE;
    717                 } else if (runTime < MEDIUM_SUITE_MAX_RUNTIME) {
    718                     assignmentSuite = MEDIUM_SUITE;
    719                 } else {
    720                     assignmentSuite = LARGE_SUITE;
    721                 }
    722             }
    723             // Clear mStartTime so that we can verify that it gets set next time.
    724             mStartTime = -1;
    725 
    726             mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
    727                     test.getClass().getName() + "#" + ((TestCase) test).getName()
    728                     + "\nin " + assignmentSuite + " suite\nrunTime: "
    729                     + String.valueOf(runTime) + "\n");
    730             mTestResult.putFloat(REPORT_KEY_RUN_TIME, runTime);
    731             mTestResult.putString(REPORT_KEY_SUITE_ASSIGNMENT, assignmentSuite);
    732 
    733             sendStatus(0, mTestResult);
    734         }
    735     }
    736 
    737     /**
    738      * This class sends status reports back to the IInstrumentationWatcher
    739      */
    740     private class WatcherResultPrinter implements TestListener, PerformanceResultsWriter {
    741         private final Bundle mResultTemplate;
    742         Bundle mTestResult;
    743         int mTestNum = 0;
    744         int mTestResultCode = 0;
    745         String mTestClass = null;
    746         PerformanceCollector mPerfCollector = new PerformanceCollector();
    747         boolean mIsTimedTest = false;
    748         boolean mIncludeDetailedStats = false;
    749 
    750         public WatcherResultPrinter(int numTests) {
    751             mResultTemplate = new Bundle();
    752             mResultTemplate.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID);
    753             mResultTemplate.putInt(REPORT_KEY_NUM_TOTAL, numTests);
    754         }
    755 
    756         /**
    757          * send a status for the start of a each test, so long tests can be seen
    758          * as "running"
    759          */
    760         public void startTest(Test test) {
    761             String testClass = test.getClass().getName();
    762             String testName = ((TestCase)test).getName();
    763             mTestResult = new Bundle(mResultTemplate);
    764             mTestResult.putString(REPORT_KEY_NAME_CLASS, testClass);
    765             mTestResult.putString(REPORT_KEY_NAME_TEST, testName);
    766             mTestResult.putInt(REPORT_KEY_NUM_CURRENT, ++mTestNum);
    767             // pretty printing
    768             if (testClass != null && !testClass.equals(mTestClass)) {
    769                 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
    770                         String.format("\n%s:", testClass));
    771                 mTestClass = testClass;
    772             } else {
    773                 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "");
    774             }
    775 
    776             Method testMethod = null;
    777             try {
    778                 testMethod = test.getClass().getMethod(testName);
    779                 // Report total number of iterations, if test is repetitive
    780                 if (testMethod.isAnnotationPresent(RepetitiveTest.class)) {
    781                     int numIterations = testMethod.getAnnotation(
    782                         RepetitiveTest.class).numIterations();
    783                     mTestResult.putInt(REPORT_KEY_NUM_ITERATIONS, numIterations);
    784                 }
    785             } catch (NoSuchMethodException e) {
    786                 // ignore- the test with given name does not exist. Will be handled during test
    787                 // execution
    788             }
    789 
    790             // The delay_msec parameter is normally used to provide buffers of idle time
    791             // for power measurement purposes. To make sure there is a delay before and after
    792             // every test in a suite, we delay *after* every test (see endTest below) and also
    793             // delay *before* the first test. So, delay test1 delay test2 delay.
    794 
    795             try {
    796                 if (mTestNum == 1) Thread.sleep(mDelayMsec);
    797             } catch (InterruptedException e) {
    798                 throw new IllegalStateException(e);
    799             }
    800 
    801             sendStatus(REPORT_VALUE_RESULT_START, mTestResult);
    802             mTestResultCode = 0;
    803 
    804             mIsTimedTest = false;
    805             mIncludeDetailedStats = false;
    806             try {
    807                 // Look for TimedTest annotation on both test class and test method
    808                 if (testMethod != null && testMethod.isAnnotationPresent(TimedTest.class)) {
    809                     mIsTimedTest = true;
    810                     mIncludeDetailedStats = testMethod.getAnnotation(
    811                             TimedTest.class).includeDetailedStats();
    812                 } else if (test.getClass().isAnnotationPresent(TimedTest.class)) {
    813                     mIsTimedTest = true;
    814                     mIncludeDetailedStats = test.getClass().getAnnotation(
    815                             TimedTest.class).includeDetailedStats();
    816                 }
    817             } catch (SecurityException e) {
    818                 // ignore - the test with given name cannot be accessed. Will be handled during
    819                 // test execution
    820             }
    821 
    822             if (mIsTimedTest && mIncludeDetailedStats) {
    823                 mPerfCollector.beginSnapshot("");
    824             } else if (mIsTimedTest) {
    825                 mPerfCollector.startTiming("");
    826             }
    827         }
    828 
    829         /**
    830          * @see junit.framework.TestListener#addError(Test, Throwable)
    831          */
    832         public void addError(Test test, Throwable t) {
    833             mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t));
    834             mTestResultCode = REPORT_VALUE_RESULT_ERROR;
    835             // pretty printing
    836             mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
    837                 String.format("\nError in %s:\n%s",
    838                     ((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t)));
    839         }
    840 
    841         /**
    842          * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError)
    843          */
    844         public void addFailure(Test test, AssertionFailedError t) {
    845             mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t));
    846             mTestResultCode = REPORT_VALUE_RESULT_FAILURE;
    847             // pretty printing
    848             mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
    849                 String.format("\nFailure in %s:\n%s",
    850                     ((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t)));
    851         }
    852 
    853         /**
    854          * @see junit.framework.TestListener#endTest(Test)
    855          */
    856         public void endTest(Test test) {
    857             if (mIsTimedTest && mIncludeDetailedStats) {
    858                 mTestResult.putAll(mPerfCollector.endSnapshot());
    859             } else if (mIsTimedTest) {
    860                 writeStopTiming(mPerfCollector.stopTiming(""));
    861             }
    862 
    863             if (mTestResultCode == 0) {
    864                 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, ".");
    865             }
    866             sendStatus(mTestResultCode, mTestResult);
    867 
    868             try { // Sleep after every test, if specified
    869                 Thread.sleep(mDelayMsec);
    870             } catch (InterruptedException e) {
    871                 throw new IllegalStateException(e);
    872             }
    873         }
    874 
    875         public void writeBeginSnapshot(String label) {
    876             // Do nothing
    877         }
    878 
    879         public void writeEndSnapshot(Bundle results) {
    880             // Copy all snapshot data fields into mResults, which is outputted
    881             // via Instrumentation.finish
    882             mResults.putAll(results);
    883         }
    884 
    885         public void writeStartTiming(String label) {
    886             // Do nothing
    887         }
    888 
    889         public void writeStopTiming(Bundle results) {
    890             // Copy results into mTestResult by flattening list of iterations,
    891             // which is outputted via WatcherResultPrinter.endTest
    892             int i = 0;
    893             for (Parcelable p :
    894                     results.getParcelableArrayList(PerformanceCollector.METRIC_KEY_ITERATIONS)) {
    895                 Bundle iteration = (Bundle)p;
    896                 String index = "iteration" + i + ".";
    897                 mTestResult.putString(index + PerformanceCollector.METRIC_KEY_LABEL,
    898                         iteration.getString(PerformanceCollector.METRIC_KEY_LABEL));
    899                 mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_CPU_TIME,
    900                         iteration.getLong(PerformanceCollector.METRIC_KEY_CPU_TIME));
    901                 mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_EXECUTION_TIME,
    902                         iteration.getLong(PerformanceCollector.METRIC_KEY_EXECUTION_TIME));
    903                 i++;
    904             }
    905         }
    906 
    907         public void writeMeasurement(String label, long value) {
    908             mTestResult.putLong(label, value);
    909         }
    910 
    911         public void writeMeasurement(String label, float value) {
    912             mTestResult.putFloat(label, value);
    913         }
    914 
    915         public void writeMeasurement(String label, String value) {
    916             mTestResult.putString(label, value);
    917         }
    918 
    919         // TODO report the end of the cycle
    920     }
    921 }
    922