1 /* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package com.android.tradefed.testtype; 18 19 import com.android.ddmlib.FileListingService; 20 import com.android.ddmlib.Log; 21 import com.android.ddmlib.testrunner.ITestRunListener; 22 import com.android.tradefed.config.Option; 23 import com.android.tradefed.config.OptionClass; 24 import com.android.tradefed.device.DeviceNotAvailableException; 25 import com.android.tradefed.device.IFileEntry; 26 import com.android.tradefed.device.ITestDevice; 27 import com.android.tradefed.result.ITestInvocationListener; 28 29 import com.google.common.annotations.VisibleForTesting; 30 31 import java.util.ArrayList; 32 import java.util.Collection; 33 import java.util.HashMap; 34 import java.util.Map; 35 import java.util.concurrent.TimeUnit; 36 37 /** 38 * A Test that runs a native benchmark test executable on given device. 39 * <p/> 40 * It uses {@link NativeBenchmarkTestParser} to parse out the average operation time vs delay 41 * between operations those results to the {@link ITestInvocationListener}s. 42 */ 43 @OptionClass(alias = "native-benchmark") 44 public class NativeBenchmarkTest implements IDeviceTest, IRemoteTest { 45 46 private static final String LOG_TAG = "NativeStressTest"; 47 static final String DEFAULT_TEST_PATH = "data/nativebenchmark"; 48 49 // The metrics key names to report to listeners 50 static final String AVG_OP_TIME_KEY_PREFIX = "avg-operation-time"; 51 static final String ITERATION_KEY = "iterations"; 52 53 private ITestDevice mDevice = null; 54 55 @Option(name = "native-benchmark-device-path", 56 description="The path on the device where native stress tests are located.") 57 private String mDeviceTestPath = DEFAULT_TEST_PATH; 58 59 @Option(name = "benchmark-module-name", 60 description="The name of the native benchmark test module to run. " + 61 "If not specified all tests in --native-benchmark-device-path will be run.") 62 private String mTestModule = null; 63 64 @Option(name = "benchmark-run-name", 65 description="Optional name to pass to test reporters. If unspecified, will use" + 66 "--benchmark-module-name.") 67 private String mReportRunName = null; 68 69 @Option(name = "iterations", 70 description="The number of benchmark test iterations per run.") 71 private int mNumIterations = 1000; 72 73 @Option(name = "delay-per-run", 74 description="The delay between each benchmark iteration, in micro seconds." + 75 "Multiple values may be given to specify multiple runs with different delay values.") 76 // TODO: change units to seconds for consistency with native benchmark module input 77 private Collection<Integer> mDelays = new ArrayList<Integer>(); 78 79 @Option(name = "max-run-time", description = 80 "The maximum time to allow for one benchmark run in ms.") 81 private int mMaxRunTime = 5 * 60 * 1000; 82 83 @Option(name = "server-cpu", 84 description="Optionally specify a server cpu.") 85 private int mServerCpu = 1; 86 87 @Option(name = "client-cpu", 88 description="Optionally specify a client cpu.") 89 private int mClientCpu = 1; 90 91 @Option(name = "max-cpu-freq", 92 description="Flag to force device cpu to run at maximum frequency.") 93 private boolean mMaxCpuFreq = false; 94 95 96 // TODO: consider sharing code with {@link GTest} and {@link NativeStressTest} 97 98 /** 99 * {@inheritDoc} 100 */ 101 @Override 102 public void setDevice(ITestDevice device) { 103 mDevice = device; 104 } 105 106 /** 107 * {@inheritDoc} 108 */ 109 @Override 110 public ITestDevice getDevice() { 111 return mDevice; 112 } 113 114 /** 115 * Set the Android native benchmark test module to run. 116 * 117 * @param moduleName The name of the native test module to run 118 */ 119 public void setModuleName(String moduleName) { 120 mTestModule = moduleName; 121 } 122 123 /** 124 * Get the Android native benchmark test module to run. 125 * 126 * @return the name of the native test module to run, or null if not set 127 */ 128 public String getModuleName() { 129 return mTestModule; 130 } 131 132 /** 133 * Set the number of iterations to execute per run 134 */ 135 void setNumIterations(int iterations) { 136 mNumIterations = iterations; 137 } 138 139 /** 140 * Set the delay values per run 141 */ 142 void addDelaysPerRun(Collection<Integer> delays) { 143 mDelays.addAll(delays); 144 } 145 146 /** 147 * Gets the path where native benchmark tests live on the device. 148 * 149 * @return The path on the device where the native tests live. 150 */ 151 @VisibleForTesting 152 String getTestPath() { 153 StringBuilder testPath = new StringBuilder(mDeviceTestPath); 154 if (mTestModule != null) { 155 testPath.append(FileListingService.FILE_SEPARATOR); 156 testPath.append(mTestModule); 157 } 158 return testPath.toString(); 159 } 160 161 /** 162 * Executes all native benchmark tests in a folder as well as in all subfolders recursively. 163 * 164 * @param rootEntry The root folder to begin searching for native tests 165 * @param testDevice The device to run tests on 166 * @param listener the run listener 167 * @throws DeviceNotAvailableException 168 */ 169 @VisibleForTesting 170 void doRunAllTestsInSubdirectory(IFileEntry rootEntry, ITestDevice testDevice, 171 ITestRunListener listener) throws DeviceNotAvailableException { 172 173 if (rootEntry.isDirectory()) { 174 // recursively run tests in all subdirectories 175 for (IFileEntry childEntry : rootEntry.getChildren(true)) { 176 doRunAllTestsInSubdirectory(childEntry, testDevice, listener); 177 } 178 } else { 179 // assume every file is a valid benchmark test binary. 180 // use name of file as run name 181 String runName = (mReportRunName == null ? rootEntry.getName() : mReportRunName); 182 String fullPath = rootEntry.getFullEscapedPath(); 183 if (mDelays.size() == 0) { 184 // default to one run with no delay 185 mDelays.add(0); 186 } 187 188 // force file to be executable 189 testDevice.executeShellCommand(String.format("chmod 755 %s", fullPath)); 190 long startTime = System.currentTimeMillis(); 191 192 listener.testRunStarted(runName, 0); 193 Map<String, String> metricMap = new HashMap<String, String>(); 194 metricMap.put(ITERATION_KEY, Integer.toString(mNumIterations)); 195 try { 196 for (Integer delay : mDelays) { 197 NativeBenchmarkTestParser resultParser = createResultParser(runName); 198 // convert delay to seconds 199 double delayFloat = ((double)delay)/1000000; 200 Log.i(LOG_TAG, String.format("Running %s for %d iterations with delay %f", 201 rootEntry.getName(), mNumIterations, delayFloat)); 202 String cmd = String.format("%s -n %d -d %f -c %d -s %d", fullPath, 203 mNumIterations, delayFloat, mClientCpu, mServerCpu); 204 Log.i(LOG_TAG, String.format("Running native benchmark test on %s: %s", 205 mDevice.getSerialNumber(), cmd)); 206 testDevice.executeShellCommand(cmd, resultParser, 207 mMaxRunTime, TimeUnit.MILLISECONDS, 0); 208 addMetric(metricMap, resultParser, delay); 209 } 210 // TODO: is catching exceptions, and reporting testRunFailed necessary? 211 } finally { 212 final long elapsedTime = System.currentTimeMillis() - startTime; 213 listener.testRunEnded(elapsedTime, metricMap); 214 } 215 } 216 } 217 218 /** 219 * Adds the operation time metric for a run with given delay 220 * 221 * @param metricMap 222 * @param resultParser 223 * @param delay 224 */ 225 private void addMetric(Map<String, String> metricMap, NativeBenchmarkTestParser resultParser, 226 Integer delay) { 227 String metricKey = String.format("%s-delay%d", AVG_OP_TIME_KEY_PREFIX, delay); 228 // temporarily convert seconds to microseconds, as some reporters cannot handle small values 229 metricMap.put(metricKey, Double.toString(resultParser.getAvgOperationTime()*1000000)); 230 } 231 232 /** 233 * Factory method for creating a {@link NativeBenchmarkTestParser} that parses test output 234 * <p/> 235 * Exposed so unit tests can mock. 236 * 237 * @param runName 238 * @return a {@link NativeBenchmarkTestParser} 239 */ 240 NativeBenchmarkTestParser createResultParser(String runName) { 241 return new NativeBenchmarkTestParser(runName); 242 } 243 244 /** 245 * {@inheritDoc} 246 */ 247 @Override 248 public void run(ITestInvocationListener listener) throws DeviceNotAvailableException { 249 if (mDevice == null) { 250 throw new IllegalArgumentException("Device has not been set"); 251 } 252 253 String testPath = getTestPath(); 254 IFileEntry nativeTestDirectory = mDevice.getFileEntry(testPath); 255 if (nativeTestDirectory == null) { 256 Log.w(LOG_TAG, String.format("Could not find native benchmark test directory %s in %s!", 257 testPath, mDevice.getSerialNumber())); 258 return; 259 } 260 if (mMaxCpuFreq) { 261 mDevice.executeShellCommand( 262 "cat /sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_max_freq > " + 263 "/sys/devices/system/cpu/cpu0/cpufreq/scaling_min_freq"); 264 } 265 doRunAllTestsInSubdirectory(nativeTestDirectory, mDevice, listener); 266 if (mMaxCpuFreq) { 267 // revert to normal 268 mDevice.executeShellCommand( 269 "cat /sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_min_freq > " + 270 "/sys/devices/system/cpu/cpu0/cpufreq/scaling_min_freq"); 271 } 272 273 } 274 } 275