Home | History | Annotate | Download | only in test
      1 /*
      2  * Copyright (C) 2007 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package android.test;
     18 
     19 import com.android.internal.util.Predicate;
     20 import com.android.internal.util.Predicates;
     21 
     22 import android.app.Activity;
     23 import android.app.Instrumentation;
     24 import android.os.Bundle;
     25 import android.os.Debug;
     26 import android.os.Looper;
     27 import android.os.Parcelable;
     28 import android.os.PerformanceCollector;
     29 import android.os.PerformanceCollector.PerformanceResultsWriter;
     30 import android.test.suitebuilder.TestMethod;
     31 import android.test.suitebuilder.TestPredicates;
     32 import android.test.suitebuilder.TestSuiteBuilder;
     33 import android.test.suitebuilder.annotation.HasAnnotation;
     34 import android.test.suitebuilder.annotation.LargeTest;
     35 import android.util.Log;
     36 
     37 import java.io.ByteArrayOutputStream;
     38 import java.io.File;
     39 import java.io.PrintStream;
     40 import java.lang.annotation.Annotation;
     41 import java.lang.reflect.InvocationTargetException;
     42 import java.lang.reflect.Method;
     43 import java.util.ArrayList;
     44 import java.util.List;
     45 
     46 import junit.framework.AssertionFailedError;
     47 import junit.framework.Test;
     48 import junit.framework.TestCase;
     49 import junit.framework.TestListener;
     50 import junit.framework.TestResult;
     51 import junit.framework.TestSuite;
     52 import junit.runner.BaseTestRunner;
     53 import junit.textui.ResultPrinter;
     54 
     55 /**
     56  * An {@link Instrumentation} that runs various types of {@link junit.framework.TestCase}s against
     57  * an Android package (application).
     58  *
     59  * <div class="special reference">
     60  * <h3>Developer Guides</h3>
     61  * <p>For more information about application testing, read the
     62  * <a href="{@docRoot}guide/topics/testing/index.html">Testing</a> developer guide.</p>
     63  * </div>
     64  *
     65  * <h3>Typical Usage</h3>
     66  * <ol>
     67  * <li>Write {@link junit.framework.TestCase}s that perform unit, functional, or performance tests
     68  * against the classes in your package.  Typically these are subclassed from:
     69  *   <ul><li>{@link android.test.ActivityInstrumentationTestCase2}</li>
     70  *   <li>{@link android.test.ActivityUnitTestCase}</li>
     71  *   <li>{@link android.test.AndroidTestCase}</li>
     72  *   <li>{@link android.test.ApplicationTestCase}</li>
     73  *   <li>{@link android.test.InstrumentationTestCase}</li>
     74  *   <li>{@link android.test.ProviderTestCase}</li>
     75  *   <li>{@link android.test.ServiceTestCase}</li>
     76  *   <li>{@link android.test.SingleLaunchActivityTestCase}</li></ul>
     77  * <li>Set the <code>android:targetPackage</code> attribute of the <code>&lt;instrumentation&gt;</code>
     78  * element in the test package's manifest. You should set the attribute value
     79  * to the package name of the target application under test.
     80  * <li>Run the instrumentation using "adb shell am instrument -w",
     81  * with no optional arguments, to run all tests (except performance tests).
     82  * <li>Run the instrumentation using "adb shell am instrument -w",
     83  * with the argument '-e func true' to run all functional tests. These are tests that derive from
     84  * {@link android.test.InstrumentationTestCase}.
     85  * <li>Run the instrumentation using "adb shell am instrument -w",
     86  * with the argument '-e unit true' to run all unit tests. These are tests that <i>do not</i>derive
     87  * from {@link android.test.InstrumentationTestCase} (and are not performance tests).
     88  * <li>Run the instrumentation using "adb shell am instrument -w",
     89  * with the argument '-e class' set to run an individual {@link junit.framework.TestCase}.
     90  * </ol>
     91  * <p/>
     92  * <b>Running all tests:</b> adb shell am instrument -w
     93  * com.android.foo/android.test.InstrumentationTestRunner
     94  * <p/>
     95  * <b>Running all small tests:</b> adb shell am instrument -w
     96  * -e size small
     97  * com.android.foo/android.test.InstrumentationTestRunner
     98  * <p/>
     99  * <b>Running all medium tests:</b> adb shell am instrument -w
    100  * -e size medium
    101  * com.android.foo/android.test.InstrumentationTestRunner
    102  * <p/>
    103  * <b>Running all large tests:</b> adb shell am instrument -w
    104  * -e size large
    105  * com.android.foo/android.test.InstrumentationTestRunner
    106  * <p/>
    107  * <b>Filter test run to tests with given annotation:</b> adb shell am instrument -w
    108  * -e annotation com.android.foo.MyAnnotation
    109  * com.android.foo/android.test.InstrumentationTestRunner
    110  * <p/>
    111  * If used with other options, the resulting test run will contain the union of the two options.
    112  * e.g. "-e size large -e annotation com.android.foo.MyAnnotation" will run only tests with both
    113  * the {@link LargeTest} and "com.android.foo.MyAnnotation" annotations.
    114  * <p/>
    115  * <b>Filter test run to tests <i>without</i> given annotation:</b> adb shell am instrument -w
    116  * -e notAnnotation com.android.foo.MyAnnotation
    117  * com.android.foo/android.test.InstrumentationTestRunner
    118  * <p/>
    119  * <b>Running a single testcase:</b> adb shell am instrument -w
    120  * -e class com.android.foo.FooTest
    121  * com.android.foo/android.test.InstrumentationTestRunner
    122  * <p/>
    123  * <b>Running a single test:</b> adb shell am instrument -w
    124  * -e class com.android.foo.FooTest#testFoo
    125  * com.android.foo/android.test.InstrumentationTestRunner
    126  * <p/>
    127  * <b>Running multiple tests:</b> adb shell am instrument -w
    128  * -e class com.android.foo.FooTest,com.android.foo.TooTest
    129  * com.android.foo/android.test.InstrumentationTestRunner
    130  * <p/>
    131  * <b>Running all tests in a java package:</b> adb shell am instrument -w
    132  * -e package com.android.foo.subpkg
    133  *  com.android.foo/android.test.InstrumentationTestRunner
    134  * <p/>
    135  * <b>Including performance tests:</b> adb shell am instrument -w
    136  * -e perf true
    137  * com.android.foo/android.test.InstrumentationTestRunner
    138  * <p/>
    139  * <b>To debug your tests, set a break point in your code and pass:</b>
    140  * -e debug true
    141  * <p/>
    142  * <b>To run in 'log only' mode</b>
    143  * -e log true
    144  * This option will load and iterate through all test classes and methods, but will bypass actual
    145  * test execution. Useful for quickly obtaining info on the tests to be executed by an
    146  * instrumentation command.
    147  * <p/>
    148  * <b>To generate EMMA code coverage:</b>
    149  * -e coverage true
    150  * Note: this requires an emma instrumented build. By default, the code coverage results file
    151  * will be saved in a /data/<app>/coverage.ec file, unless overridden by coverageFile flag (see
    152  * below)
    153  * <p/>
    154  * <b> To specify EMMA code coverage results file path:</b>
    155  * -e coverageFile /sdcard/myFile.ec
    156  * <br/>
    157  * in addition to the other arguments.
    158  */
    159 
    160 /* (not JavaDoc)
    161  * Although not necessary in most case, another way to use this class is to extend it and have the
    162  * derived class return the desired test suite from the {@link #getTestSuite()} method. The test
    163  * suite returned from this method will be used if no target class is defined in the meta-data or
    164  * command line argument parameters. If a derived class is used it needs to be added as an
    165  * instrumentation to the AndroidManifest.xml and the command to run it would look like:
    166  * <p/>
    167  * adb shell am instrument -w com.android.foo/<i>com.android.FooInstrumentationTestRunner</i>
    168  * <p/>
    169  * Where <i>com.android.FooInstrumentationTestRunner</i> is the derived class.
    170  *
    171  * This model is used by many existing app tests, but can probably be deprecated.
    172  */
    173 public class InstrumentationTestRunner extends Instrumentation implements TestSuiteProvider {
    174 
    175     /** @hide */
    176     public static final String ARGUMENT_TEST_CLASS = "class";
    177     /** @hide */
    178     public static final String ARGUMENT_TEST_PACKAGE = "package";
    179     /** @hide */
    180     public static final String ARGUMENT_TEST_SIZE_PREDICATE = "size";
    181     /** @hide */
    182     public static final String ARGUMENT_DELAY_MSEC = "delay_msec";
    183 
    184     private static final String SMALL_SUITE = "small";
    185     private static final String MEDIUM_SUITE = "medium";
    186     private static final String LARGE_SUITE = "large";
    187 
    188     private static final String ARGUMENT_LOG_ONLY = "log";
    189     /** @hide */
    190     static final String ARGUMENT_ANNOTATION = "annotation";
    191     /** @hide */
    192     static final String ARGUMENT_NOT_ANNOTATION = "notAnnotation";
    193 
    194     /**
    195      * This constant defines the maximum allowed runtime (in ms) for a test included in the "small"
    196      * suite. It is used to make an educated guess at what suite an unlabeled test belongs.
    197      */
    198     private static final float SMALL_SUITE_MAX_RUNTIME = 100;
    199 
    200     /**
    201      * This constant defines the maximum allowed runtime (in ms) for a test included in the
    202      * "medium" suite. It is used to make an educated guess at what suite an unlabeled test belongs.
    203      */
    204     private static final float MEDIUM_SUITE_MAX_RUNTIME = 1000;
    205 
    206     /**
    207      * The following keys are used in the status bundle to provide structured reports to
    208      * an IInstrumentationWatcher.
    209      */
    210 
    211     /**
    212      * This value, if stored with key {@link android.app.Instrumentation#REPORT_KEY_IDENTIFIER},
    213      * identifies InstrumentationTestRunner as the source of the report.  This is sent with all
    214      * status messages.
    215      */
    216     public static final String REPORT_VALUE_ID = "InstrumentationTestRunner";
    217     /**
    218      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
    219      * identifies the total number of tests that are being run.  This is sent with all status
    220      * messages.
    221      */
    222     public static final String REPORT_KEY_NUM_TOTAL = "numtests";
    223     /**
    224      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
    225      * identifies the sequence number of the current test.  This is sent with any status message
    226      * describing a specific test being started or completed.
    227      */
    228     public static final String REPORT_KEY_NUM_CURRENT = "current";
    229     /**
    230      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
    231      * identifies the name of the current test class.  This is sent with any status message
    232      * describing a specific test being started or completed.
    233      */
    234     public static final String REPORT_KEY_NAME_CLASS = "class";
    235     /**
    236      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
    237      * identifies the name of the current test.  This is sent with any status message
    238      * describing a specific test being started or completed.
    239      */
    240     public static final String REPORT_KEY_NAME_TEST = "test";
    241     /**
    242      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
    243      * reports the run time in seconds of the current test.
    244      */
    245     private static final String REPORT_KEY_RUN_TIME = "runtime";
    246     /**
    247      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
    248      * reports the number of total iterations of the current test.
    249      */
    250     private static final String REPORT_KEY_NUM_ITERATIONS = "numiterations";
    251     /**
    252      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
    253      * reports the guessed suite assignment for the current test.
    254      */
    255     private static final String REPORT_KEY_SUITE_ASSIGNMENT = "suiteassignment";
    256     /**
    257      * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
    258      * identifies the path to the generated code coverage file.
    259      */
    260     private static final String REPORT_KEY_COVERAGE_PATH = "coverageFilePath";
    261 
    262     /**
    263      * The test is starting.
    264      */
    265     public static final int REPORT_VALUE_RESULT_START = 1;
    266     /**
    267      * The test completed successfully.
    268      */
    269     public static final int REPORT_VALUE_RESULT_OK = 0;
    270     /**
    271      * The test completed with an error.
    272      */
    273     public static final int REPORT_VALUE_RESULT_ERROR = -1;
    274     /**
    275      * The test completed with a failure.
    276      */
    277     public static final int REPORT_VALUE_RESULT_FAILURE = -2;
    278     /**
    279      * If included in the status bundle sent to an IInstrumentationWatcher, this key
    280      * identifies a stack trace describing an error or failure.  This is sent with any status
    281      * message describing a specific test being completed.
    282      */
    283     public static final String REPORT_KEY_STACK = "stack";
    284 
    285     // Default file name for code coverage
    286     private static final String DEFAULT_COVERAGE_FILE_NAME = "coverage.ec";
    287 
    288     private static final String LOG_TAG = "InstrumentationTestRunner";
    289 
    290     private final Bundle mResults = new Bundle();
    291     private Bundle mArguments;
    292     private AndroidTestRunner mTestRunner;
    293     private boolean mDebug;
    294     private boolean mJustCount;
    295     private boolean mSuiteAssignmentMode;
    296     private int mTestCount;
    297     private String mPackageOfTests;
    298     private boolean mCoverage;
    299     private String mCoverageFilePath;
    300     private int mDelayMsec;
    301 
    302     @Override
    303     public void onCreate(Bundle arguments) {
    304         super.onCreate(arguments);
    305         mArguments = arguments;
    306 
    307         // Apk paths used to search for test classes when using TestSuiteBuilders.
    308         String[] apkPaths =
    309                 {getTargetContext().getPackageCodePath(), getContext().getPackageCodePath()};
    310         ClassPathPackageInfoSource.setApkPaths(apkPaths);
    311 
    312         Predicate<TestMethod> testSizePredicate = null;
    313         Predicate<TestMethod> testAnnotationPredicate = null;
    314         Predicate<TestMethod> testNotAnnotationPredicate = null;
    315         String testClassesArg = null;
    316         boolean logOnly = false;
    317 
    318         if (arguments != null) {
    319             // Test class name passed as an argument should override any meta-data declaration.
    320             testClassesArg = arguments.getString(ARGUMENT_TEST_CLASS);
    321             mDebug = getBooleanArgument(arguments, "debug");
    322             mJustCount = getBooleanArgument(arguments, "count");
    323             mSuiteAssignmentMode = getBooleanArgument(arguments, "suiteAssignment");
    324             mPackageOfTests = arguments.getString(ARGUMENT_TEST_PACKAGE);
    325             testSizePredicate = getSizePredicateFromArg(
    326                     arguments.getString(ARGUMENT_TEST_SIZE_PREDICATE));
    327             testAnnotationPredicate = getAnnotationPredicate(
    328                     arguments.getString(ARGUMENT_ANNOTATION));
    329             testNotAnnotationPredicate = getNotAnnotationPredicate(
    330                     arguments.getString(ARGUMENT_NOT_ANNOTATION));
    331 
    332             logOnly = getBooleanArgument(arguments, ARGUMENT_LOG_ONLY);
    333             mCoverage = getBooleanArgument(arguments, "coverage");
    334             mCoverageFilePath = arguments.getString("coverageFile");
    335 
    336             try {
    337                 Object delay = arguments.get(ARGUMENT_DELAY_MSEC);  // Accept either string or int
    338                 if (delay != null) mDelayMsec = Integer.parseInt(delay.toString());
    339             } catch (NumberFormatException e) {
    340                 Log.e(LOG_TAG, "Invalid delay_msec parameter", e);
    341             }
    342         }
    343 
    344         TestSuiteBuilder testSuiteBuilder = new TestSuiteBuilder(getClass().getName(),
    345                 getTargetContext().getClassLoader());
    346 
    347         if (testSizePredicate != null) {
    348             testSuiteBuilder.addRequirements(testSizePredicate);
    349         }
    350         if (testAnnotationPredicate != null) {
    351             testSuiteBuilder.addRequirements(testAnnotationPredicate);
    352         }
    353         if (testNotAnnotationPredicate != null) {
    354             testSuiteBuilder.addRequirements(testNotAnnotationPredicate);
    355         }
    356 
    357         if (testClassesArg == null) {
    358             if (mPackageOfTests != null) {
    359                 testSuiteBuilder.includePackages(mPackageOfTests);
    360             } else {
    361                 TestSuite testSuite = getTestSuite();
    362                 if (testSuite != null) {
    363                     testSuiteBuilder.addTestSuite(testSuite);
    364                 } else {
    365                     // no package or class bundle arguments were supplied, and no test suite
    366                     // provided so add all tests in application
    367                     testSuiteBuilder.includePackages("");
    368                 }
    369             }
    370         } else {
    371             parseTestClasses(testClassesArg, testSuiteBuilder);
    372         }
    373 
    374         testSuiteBuilder.addRequirements(getBuilderRequirements());
    375 
    376         mTestRunner = getAndroidTestRunner();
    377         mTestRunner.setContext(getTargetContext());
    378         mTestRunner.setInstrumentation(this);
    379         mTestRunner.setSkipExecution(logOnly);
    380         mTestRunner.setTest(testSuiteBuilder.build());
    381         mTestCount = mTestRunner.getTestCases().size();
    382         if (mSuiteAssignmentMode) {
    383             mTestRunner.addTestListener(new SuiteAssignmentPrinter());
    384         } else {
    385             WatcherResultPrinter resultPrinter = new WatcherResultPrinter(mTestCount);
    386             mTestRunner.addTestListener(new TestPrinter("TestRunner", false));
    387             mTestRunner.addTestListener(resultPrinter);
    388             mTestRunner.setPerformanceResultsWriter(resultPrinter);
    389         }
    390         start();
    391     }
    392 
    393     /**
    394      * Get the arguments passed to this instrumentation.
    395      *
    396      * @return the Bundle object
    397      */
    398     public Bundle getArguments() {
    399         return mArguments;
    400     }
    401 
    402     /**
    403      * Add a {@link TestListener}
    404      * @hide
    405      */
    406     protected void addTestListener(TestListener listener){
    407         if(mTestRunner!=null && listener!=null){
    408             mTestRunner.addTestListener(listener);
    409         }
    410     }
    411 
    412     List<Predicate<TestMethod>> getBuilderRequirements() {
    413         return new ArrayList<Predicate<TestMethod>>();
    414     }
    415 
    416     /**
    417      * Parses and loads the specified set of test classes
    418      *
    419      * @param testClassArg - comma-separated list of test classes and methods
    420      * @param testSuiteBuilder - builder to add tests to
    421      */
    422     private void parseTestClasses(String testClassArg, TestSuiteBuilder testSuiteBuilder) {
    423         String[] testClasses = testClassArg.split(",");
    424         for (String testClass : testClasses) {
    425             parseTestClass(testClass, testSuiteBuilder);
    426         }
    427     }
    428 
    429     /**
    430      * Parse and load the given test class and, optionally, method
    431      *
    432      * @param testClassName - full package name of test class and optionally method to add.
    433      *        Expected format: com.android.TestClass#testMethod
    434      * @param testSuiteBuilder - builder to add tests to
    435      */
    436     private void parseTestClass(String testClassName, TestSuiteBuilder testSuiteBuilder) {
    437         int methodSeparatorIndex = testClassName.indexOf('#');
    438         String testMethodName = null;
    439 
    440         if (methodSeparatorIndex > 0) {
    441             testMethodName = testClassName.substring(methodSeparatorIndex + 1);
    442             testClassName = testClassName.substring(0, methodSeparatorIndex);
    443         }
    444         testSuiteBuilder.addTestClassByName(testClassName, testMethodName, getTargetContext());
    445     }
    446 
    447     protected AndroidTestRunner getAndroidTestRunner() {
    448         return new AndroidTestRunner();
    449     }
    450 
    451     private boolean getBooleanArgument(Bundle arguments, String tag) {
    452         String tagString = arguments.getString(tag);
    453         return tagString != null && Boolean.parseBoolean(tagString);
    454     }
    455 
    456     /*
    457      * Returns the size predicate object, corresponding to the "size" argument value.
    458      */
    459     private Predicate<TestMethod> getSizePredicateFromArg(String sizeArg) {
    460 
    461         if (SMALL_SUITE.equals(sizeArg)) {
    462             return TestPredicates.SELECT_SMALL;
    463         } else if (MEDIUM_SUITE.equals(sizeArg)) {
    464             return TestPredicates.SELECT_MEDIUM;
    465         } else if (LARGE_SUITE.equals(sizeArg)) {
    466             return TestPredicates.SELECT_LARGE;
    467         } else {
    468             return null;
    469         }
    470     }
    471 
    472    /**
    473     * Returns the test predicate object, corresponding to the annotation class value provided via
    474     * the {@link ARGUMENT_ANNOTATION} argument.
    475     *
    476     * @return the predicate or <code>null</code>
    477     */
    478     private Predicate<TestMethod> getAnnotationPredicate(String annotationClassName) {
    479         Class<? extends Annotation> annotationClass = getAnnotationClass(annotationClassName);
    480         if (annotationClass != null) {
    481             return new HasAnnotation(annotationClass);
    482         }
    483         return null;
    484     }
    485 
    486     /**
    487      * Returns the negative test predicate object, corresponding to the annotation class value
    488      * provided via the {@link ARGUMENT_NOT_ANNOTATION} argument.
    489      *
    490      * @return the predicate or <code>null</code>
    491      */
    492      private Predicate<TestMethod> getNotAnnotationPredicate(String annotationClassName) {
    493          Class<? extends Annotation> annotationClass = getAnnotationClass(annotationClassName);
    494          if (annotationClass != null) {
    495              return Predicates.not(new HasAnnotation(annotationClass));
    496          }
    497          return null;
    498      }
    499 
    500     /**
    501      * Helper method to return the annotation class with specified name
    502      *
    503      * @param annotationClassName the fully qualified name of the class
    504      * @return the annotation class or <code>null</code>
    505      */
    506     private Class<? extends Annotation> getAnnotationClass(String annotationClassName) {
    507         if (annotationClassName == null) {
    508             return null;
    509         }
    510         try {
    511            Class<?> annotationClass = Class.forName(annotationClassName);
    512            if (annotationClass.isAnnotation()) {
    513                return (Class<? extends Annotation>)annotationClass;
    514            } else {
    515                Log.e(LOG_TAG, String.format("Provided annotation value %s is not an Annotation",
    516                        annotationClassName));
    517            }
    518         } catch (ClassNotFoundException e) {
    519             Log.e(LOG_TAG, String.format("Could not find class for specified annotation %s",
    520                     annotationClassName));
    521         }
    522         return null;
    523     }
    524 
    525     /**
    526      * Initialize the current thread as a looper.
    527      * <p/>
    528      * Exposed for unit testing.
    529      */
    530     void prepareLooper() {
    531         Looper.prepare();
    532     }
    533 
    534     @Override
    535     public void onStart() {
    536         prepareLooper();
    537 
    538         if (mJustCount) {
    539             mResults.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID);
    540             mResults.putInt(REPORT_KEY_NUM_TOTAL, mTestCount);
    541             finish(Activity.RESULT_OK, mResults);
    542         } else {
    543             if (mDebug) {
    544                 Debug.waitForDebugger();
    545             }
    546 
    547             ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
    548             PrintStream writer = new PrintStream(byteArrayOutputStream);
    549             try {
    550                 StringResultPrinter resultPrinter = new StringResultPrinter(writer);
    551 
    552                 mTestRunner.addTestListener(resultPrinter);
    553 
    554                 long startTime = System.currentTimeMillis();
    555                 mTestRunner.runTest();
    556                 long runTime = System.currentTimeMillis() - startTime;
    557 
    558                 resultPrinter.printResult(mTestRunner.getTestResult(), runTime);
    559             } catch (Throwable t) {
    560                 // catch all exceptions so a more verbose error message can be outputted
    561                 writer.println(String.format("Test run aborted due to unexpected exception: %s",
    562                                 t.getMessage()));
    563                 t.printStackTrace(writer);
    564             } finally {
    565                 mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
    566                         String.format("\nTest results for %s=%s",
    567                         mTestRunner.getTestClassName(),
    568                         byteArrayOutputStream.toString()));
    569 
    570                 if (mCoverage) {
    571                     generateCoverageReport();
    572                 }
    573                 writer.close();
    574 
    575                 finish(Activity.RESULT_OK, mResults);
    576             }
    577         }
    578     }
    579 
    580     public TestSuite getTestSuite() {
    581         return getAllTests();
    582     }
    583 
    584     /**
    585      * Override this to define all of the tests to run in your package.
    586      */
    587     public TestSuite getAllTests() {
    588         return null;
    589     }
    590 
    591     /**
    592      * Override this to provide access to the class loader of your package.
    593      */
    594     public ClassLoader getLoader() {
    595         return null;
    596     }
    597 
    598     private void generateCoverageReport() {
    599         // use reflection to call emma dump coverage method, to avoid
    600         // always statically compiling against emma jar
    601         String coverageFilePath = getCoverageFilePath();
    602         java.io.File coverageFile = new java.io.File(coverageFilePath);
    603         try {
    604             Class<?> emmaRTClass = Class.forName("com.vladium.emma.rt.RT");
    605             Method dumpCoverageMethod = emmaRTClass.getMethod("dumpCoverageData",
    606                     coverageFile.getClass(), boolean.class, boolean.class);
    607 
    608             dumpCoverageMethod.invoke(null, coverageFile, false, false);
    609             // output path to generated coverage file so it can be parsed by a test harness if
    610             // needed
    611             mResults.putString(REPORT_KEY_COVERAGE_PATH, coverageFilePath);
    612             // also output a more user friendly msg
    613             final String currentStream = mResults.getString(
    614                     Instrumentation.REPORT_KEY_STREAMRESULT);
    615             mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
    616                 String.format("%s\nGenerated code coverage data to %s", currentStream,
    617                 coverageFilePath));
    618         } catch (ClassNotFoundException e) {
    619             reportEmmaError("Is emma jar on classpath?", e);
    620         } catch (SecurityException e) {
    621             reportEmmaError(e);
    622         } catch (NoSuchMethodException e) {
    623             reportEmmaError(e);
    624         } catch (IllegalArgumentException e) {
    625             reportEmmaError(e);
    626         } catch (IllegalAccessException e) {
    627             reportEmmaError(e);
    628         } catch (InvocationTargetException e) {
    629             reportEmmaError(e);
    630         }
    631     }
    632 
    633     private String getCoverageFilePath() {
    634         if (mCoverageFilePath == null) {
    635             return getTargetContext().getFilesDir().getAbsolutePath() + File.separator +
    636                    DEFAULT_COVERAGE_FILE_NAME;
    637         } else {
    638             return mCoverageFilePath;
    639         }
    640     }
    641 
    642     private void reportEmmaError(Exception e) {
    643         reportEmmaError("", e);
    644     }
    645 
    646     private void reportEmmaError(String hint, Exception e) {
    647         String msg = "Failed to generate emma coverage. " + hint;
    648         Log.e(LOG_TAG, msg, e);
    649         mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "\nError: " + msg);
    650     }
    651 
    652     // TODO kill this, use status() and prettyprint model for better output
    653     private class StringResultPrinter extends ResultPrinter {
    654 
    655         public StringResultPrinter(PrintStream writer) {
    656             super(writer);
    657         }
    658 
    659         public synchronized void printResult(TestResult result, long runTime) {
    660             printHeader(runTime);
    661             printFooter(result);
    662         }
    663     }
    664 
    665     /**
    666      * This class sends status reports back to the IInstrumentationWatcher about
    667      * which suite each test belongs.
    668      */
    669     private class SuiteAssignmentPrinter implements TestListener {
    670 
    671         private Bundle mTestResult;
    672         private long mStartTime;
    673         private long mEndTime;
    674         private boolean mTimingValid;
    675 
    676         public SuiteAssignmentPrinter() {
    677         }
    678 
    679         /**
    680          * send a status for the start of a each test, so long tests can be seen as "running"
    681          */
    682         public void startTest(Test test) {
    683             mTimingValid = true;
    684             mStartTime = System.currentTimeMillis();
    685         }
    686 
    687         /**
    688          * @see junit.framework.TestListener#addError(Test, Throwable)
    689          */
    690         public void addError(Test test, Throwable t) {
    691             mTimingValid = false;
    692         }
    693 
    694         /**
    695          * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError)
    696          */
    697         public void addFailure(Test test, AssertionFailedError t) {
    698             mTimingValid = false;
    699         }
    700 
    701         /**
    702          * @see junit.framework.TestListener#endTest(Test)
    703          */
    704         public void endTest(Test test) {
    705             float runTime;
    706             String assignmentSuite;
    707             mEndTime = System.currentTimeMillis();
    708             mTestResult = new Bundle();
    709 
    710             if (!mTimingValid || mStartTime < 0) {
    711                 assignmentSuite = "NA";
    712                 runTime = -1;
    713             } else {
    714                 runTime = mEndTime - mStartTime;
    715                 if (runTime < SMALL_SUITE_MAX_RUNTIME
    716                         && !InstrumentationTestCase.class.isAssignableFrom(test.getClass())) {
    717                     assignmentSuite = SMALL_SUITE;
    718                 } else if (runTime < MEDIUM_SUITE_MAX_RUNTIME) {
    719                     assignmentSuite = MEDIUM_SUITE;
    720                 } else {
    721                     assignmentSuite = LARGE_SUITE;
    722                 }
    723             }
    724             // Clear mStartTime so that we can verify that it gets set next time.
    725             mStartTime = -1;
    726 
    727             mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
    728                     test.getClass().getName() + "#" + ((TestCase) test).getName()
    729                     + "\nin " + assignmentSuite + " suite\nrunTime: "
    730                     + String.valueOf(runTime) + "\n");
    731             mTestResult.putFloat(REPORT_KEY_RUN_TIME, runTime);
    732             mTestResult.putString(REPORT_KEY_SUITE_ASSIGNMENT, assignmentSuite);
    733 
    734             sendStatus(0, mTestResult);
    735         }
    736     }
    737 
    738     /**
    739      * This class sends status reports back to the IInstrumentationWatcher
    740      */
    741     private class WatcherResultPrinter implements TestListener, PerformanceResultsWriter {
    742         private final Bundle mResultTemplate;
    743         Bundle mTestResult;
    744         int mTestNum = 0;
    745         int mTestResultCode = 0;
    746         String mTestClass = null;
    747         PerformanceCollector mPerfCollector = new PerformanceCollector();
    748         boolean mIsTimedTest = false;
    749         boolean mIncludeDetailedStats = false;
    750 
    751         public WatcherResultPrinter(int numTests) {
    752             mResultTemplate = new Bundle();
    753             mResultTemplate.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID);
    754             mResultTemplate.putInt(REPORT_KEY_NUM_TOTAL, numTests);
    755         }
    756 
    757         /**
    758          * send a status for the start of a each test, so long tests can be seen
    759          * as "running"
    760          */
    761         public void startTest(Test test) {
    762             String testClass = test.getClass().getName();
    763             String testName = ((TestCase)test).getName();
    764             mTestResult = new Bundle(mResultTemplate);
    765             mTestResult.putString(REPORT_KEY_NAME_CLASS, testClass);
    766             mTestResult.putString(REPORT_KEY_NAME_TEST, testName);
    767             mTestResult.putInt(REPORT_KEY_NUM_CURRENT, ++mTestNum);
    768             // pretty printing
    769             if (testClass != null && !testClass.equals(mTestClass)) {
    770                 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
    771                         String.format("\n%s:", testClass));
    772                 mTestClass = testClass;
    773             } else {
    774                 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "");
    775             }
    776 
    777             Method testMethod = null;
    778             try {
    779                 testMethod = test.getClass().getMethod(testName);
    780                 // Report total number of iterations, if test is repetitive
    781                 if (testMethod.isAnnotationPresent(RepetitiveTest.class)) {
    782                     int numIterations = testMethod.getAnnotation(
    783                         RepetitiveTest.class).numIterations();
    784                     mTestResult.putInt(REPORT_KEY_NUM_ITERATIONS, numIterations);
    785                 }
    786             } catch (NoSuchMethodException e) {
    787                 // ignore- the test with given name does not exist. Will be handled during test
    788                 // execution
    789             }
    790 
    791             // The delay_msec parameter is normally used to provide buffers of idle time
    792             // for power measurement purposes. To make sure there is a delay before and after
    793             // every test in a suite, we delay *after* every test (see endTest below) and also
    794             // delay *before* the first test. So, delay test1 delay test2 delay.
    795 
    796             try {
    797                 if (mTestNum == 1) Thread.sleep(mDelayMsec);
    798             } catch (InterruptedException e) {
    799                 throw new IllegalStateException(e);
    800             }
    801 
    802             sendStatus(REPORT_VALUE_RESULT_START, mTestResult);
    803             mTestResultCode = 0;
    804 
    805             mIsTimedTest = false;
    806             mIncludeDetailedStats = false;
    807             try {
    808                 // Look for TimedTest annotation on both test class and test method
    809                 if (testMethod != null && testMethod.isAnnotationPresent(TimedTest.class)) {
    810                     mIsTimedTest = true;
    811                     mIncludeDetailedStats = testMethod.getAnnotation(
    812                             TimedTest.class).includeDetailedStats();
    813                 } else if (test.getClass().isAnnotationPresent(TimedTest.class)) {
    814                     mIsTimedTest = true;
    815                     mIncludeDetailedStats = test.getClass().getAnnotation(
    816                             TimedTest.class).includeDetailedStats();
    817                 }
    818             } catch (SecurityException e) {
    819                 // ignore - the test with given name cannot be accessed. Will be handled during
    820                 // test execution
    821             }
    822 
    823             if (mIsTimedTest && mIncludeDetailedStats) {
    824                 mPerfCollector.beginSnapshot("");
    825             } else if (mIsTimedTest) {
    826                 mPerfCollector.startTiming("");
    827             }
    828         }
    829 
    830         /**
    831          * @see junit.framework.TestListener#addError(Test, Throwable)
    832          */
    833         public void addError(Test test, Throwable t) {
    834             mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t));
    835             mTestResultCode = REPORT_VALUE_RESULT_ERROR;
    836             // pretty printing
    837             mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
    838                 String.format("\nError in %s:\n%s",
    839                     ((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t)));
    840         }
    841 
    842         /**
    843          * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError)
    844          */
    845         public void addFailure(Test test, AssertionFailedError t) {
    846             mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t));
    847             mTestResultCode = REPORT_VALUE_RESULT_FAILURE;
    848             // pretty printing
    849             mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
    850                 String.format("\nFailure in %s:\n%s",
    851                     ((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t)));
    852         }
    853 
    854         /**
    855          * @see junit.framework.TestListener#endTest(Test)
    856          */
    857         public void endTest(Test test) {
    858             if (mIsTimedTest && mIncludeDetailedStats) {
    859                 mTestResult.putAll(mPerfCollector.endSnapshot());
    860             } else if (mIsTimedTest) {
    861                 writeStopTiming(mPerfCollector.stopTiming(""));
    862             }
    863 
    864             if (mTestResultCode == 0) {
    865                 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, ".");
    866             }
    867             sendStatus(mTestResultCode, mTestResult);
    868 
    869             try { // Sleep after every test, if specified
    870                 Thread.sleep(mDelayMsec);
    871             } catch (InterruptedException e) {
    872                 throw new IllegalStateException(e);
    873             }
    874         }
    875 
    876         public void writeBeginSnapshot(String label) {
    877             // Do nothing
    878         }
    879 
    880         public void writeEndSnapshot(Bundle results) {
    881             // Copy all snapshot data fields into mResults, which is outputted
    882             // via Instrumentation.finish
    883             mResults.putAll(results);
    884         }
    885 
    886         public void writeStartTiming(String label) {
    887             // Do nothing
    888         }
    889 
    890         public void writeStopTiming(Bundle results) {
    891             // Copy results into mTestResult by flattening list of iterations,
    892             // which is outputted via WatcherResultPrinter.endTest
    893             int i = 0;
    894             for (Parcelable p :
    895                     results.getParcelableArrayList(PerformanceCollector.METRIC_KEY_ITERATIONS)) {
    896                 Bundle iteration = (Bundle)p;
    897                 String index = "iteration" + i + ".";
    898                 mTestResult.putString(index + PerformanceCollector.METRIC_KEY_LABEL,
    899                         iteration.getString(PerformanceCollector.METRIC_KEY_LABEL));
    900                 mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_CPU_TIME,
    901                         iteration.getLong(PerformanceCollector.METRIC_KEY_CPU_TIME));
    902                 mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_EXECUTION_TIME,
    903                         iteration.getLong(PerformanceCollector.METRIC_KEY_EXECUTION_TIME));
    904                 i++;
    905             }
    906         }
    907 
    908         public void writeMeasurement(String label, long value) {
    909             mTestResult.putLong(label, value);
    910         }
    911 
    912         public void writeMeasurement(String label, float value) {
    913             mTestResult.putFloat(label, value);
    914         }
    915 
    916         public void writeMeasurement(String label, String value) {
    917             mTestResult.putString(label, value);
    918         }
    919 
    920         // TODO report the end of the cycle
    921     }
    922 }
    923