/* * Copyright (C) 2007 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.test; import com.android.internal.util.Predicate; import com.android.internal.util.Predicates; import android.app.Activity; import android.app.Instrumentation; import android.os.Bundle; import android.os.Debug; import android.os.Looper; import android.os.Parcelable; import android.os.PerformanceCollector; import android.os.PerformanceCollector.PerformanceResultsWriter; import android.test.suitebuilder.TestMethod; import android.test.suitebuilder.TestPredicates; import android.test.suitebuilder.TestSuiteBuilder; import android.test.suitebuilder.annotation.HasAnnotation; import android.test.suitebuilder.annotation.LargeTest; import android.util.Log; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.PrintStream; import java.lang.annotation.Annotation; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; import junit.framework.AssertionFailedError; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestListener; import junit.framework.TestResult; import junit.framework.TestSuite; import junit.runner.BaseTestRunner; import junit.textui.ResultPrinter; /** * An {@link Instrumentation} that runs various types of {@link junit.framework.TestCase}s against * an Android package (application). * *
For more information about application testing, read the * Testing developer guide.
*null
*/
private Predicatenull
*/
private Predicatenull
*/
private Class extends Annotation> getAnnotationClass(String annotationClassName) {
if (annotationClassName == null) {
return null;
}
try {
Class> annotationClass = Class.forName(annotationClassName);
if (annotationClass.isAnnotation()) {
return (Class extends Annotation>)annotationClass;
} else {
Log.e(LOG_TAG, String.format("Provided annotation value %s is not an Annotation",
annotationClassName));
}
} catch (ClassNotFoundException e) {
Log.e(LOG_TAG, String.format("Could not find class for specified annotation %s",
annotationClassName));
}
return null;
}
/**
* Initialize the current thread as a looper.
*
* Exposed for unit testing.
*/
void prepareLooper() {
Looper.prepare();
}
@Override
public void onStart() {
prepareLooper();
if (mJustCount) {
mResults.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID);
mResults.putInt(REPORT_KEY_NUM_TOTAL, mTestCount);
finish(Activity.RESULT_OK, mResults);
} else {
if (mDebug) {
Debug.waitForDebugger();
}
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
PrintStream writer = new PrintStream(byteArrayOutputStream);
try {
StringResultPrinter resultPrinter = new StringResultPrinter(writer);
mTestRunner.addTestListener(resultPrinter);
long startTime = System.currentTimeMillis();
mTestRunner.runTest();
long runTime = System.currentTimeMillis() - startTime;
resultPrinter.printResult(mTestRunner.getTestResult(), runTime);
} catch (Throwable t) {
// catch all exceptions so a more verbose error message can be outputted
writer.println(String.format("Test run aborted due to unexpected exception: %s",
t.getMessage()));
t.printStackTrace(writer);
} finally {
mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
String.format("\nTest results for %s=%s",
mTestRunner.getTestClassName(),
byteArrayOutputStream.toString()));
if (mCoverage) {
generateCoverageReport();
}
writer.close();
finish(Activity.RESULT_OK, mResults);
}
}
}
public TestSuite getTestSuite() {
return getAllTests();
}
/**
* Override this to define all of the tests to run in your package.
*/
public TestSuite getAllTests() {
return null;
}
/**
* Override this to provide access to the class loader of your package.
*/
public ClassLoader getLoader() {
return null;
}
private void generateCoverageReport() {
// use reflection to call emma dump coverage method, to avoid
// always statically compiling against emma jar
String coverageFilePath = getCoverageFilePath();
java.io.File coverageFile = new java.io.File(coverageFilePath);
try {
Class> emmaRTClass = Class.forName("com.vladium.emma.rt.RT");
Method dumpCoverageMethod = emmaRTClass.getMethod("dumpCoverageData",
coverageFile.getClass(), boolean.class, boolean.class);
dumpCoverageMethod.invoke(null, coverageFile, false, false);
// output path to generated coverage file so it can be parsed by a test harness if
// needed
mResults.putString(REPORT_KEY_COVERAGE_PATH, coverageFilePath);
// also output a more user friendly msg
final String currentStream = mResults.getString(
Instrumentation.REPORT_KEY_STREAMRESULT);
mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
String.format("%s\nGenerated code coverage data to %s", currentStream,
coverageFilePath));
} catch (ClassNotFoundException e) {
reportEmmaError("Is emma jar on classpath?", e);
} catch (SecurityException e) {
reportEmmaError(e);
} catch (NoSuchMethodException e) {
reportEmmaError(e);
} catch (IllegalArgumentException e) {
reportEmmaError(e);
} catch (IllegalAccessException e) {
reportEmmaError(e);
} catch (InvocationTargetException e) {
reportEmmaError(e);
}
}
private String getCoverageFilePath() {
if (mCoverageFilePath == null) {
return getTargetContext().getFilesDir().getAbsolutePath() + File.separator +
DEFAULT_COVERAGE_FILE_NAME;
} else {
return mCoverageFilePath;
}
}
private void reportEmmaError(Exception e) {
reportEmmaError("", e);
}
private void reportEmmaError(String hint, Exception e) {
String msg = "Failed to generate emma coverage. " + hint;
Log.e(LOG_TAG, msg, e);
mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "\nError: " + msg);
}
// TODO kill this, use status() and prettyprint model for better output
private class StringResultPrinter extends ResultPrinter {
public StringResultPrinter(PrintStream writer) {
super(writer);
}
public synchronized void printResult(TestResult result, long runTime) {
printHeader(runTime);
printFooter(result);
}
}
/**
* This class sends status reports back to the IInstrumentationWatcher about
* which suite each test belongs.
*/
private class SuiteAssignmentPrinter implements TestListener {
private Bundle mTestResult;
private long mStartTime;
private long mEndTime;
private boolean mTimingValid;
public SuiteAssignmentPrinter() {
}
/**
* send a status for the start of a each test, so long tests can be seen as "running"
*/
public void startTest(Test test) {
mTimingValid = true;
mStartTime = System.currentTimeMillis();
}
/**
* @see junit.framework.TestListener#addError(Test, Throwable)
*/
public void addError(Test test, Throwable t) {
mTimingValid = false;
}
/**
* @see junit.framework.TestListener#addFailure(Test, AssertionFailedError)
*/
public void addFailure(Test test, AssertionFailedError t) {
mTimingValid = false;
}
/**
* @see junit.framework.TestListener#endTest(Test)
*/
public void endTest(Test test) {
float runTime;
String assignmentSuite;
mEndTime = System.currentTimeMillis();
mTestResult = new Bundle();
if (!mTimingValid || mStartTime < 0) {
assignmentSuite = "NA";
runTime = -1;
} else {
runTime = mEndTime - mStartTime;
if (runTime < SMALL_SUITE_MAX_RUNTIME
&& !InstrumentationTestCase.class.isAssignableFrom(test.getClass())) {
assignmentSuite = SMALL_SUITE;
} else if (runTime < MEDIUM_SUITE_MAX_RUNTIME) {
assignmentSuite = MEDIUM_SUITE;
} else {
assignmentSuite = LARGE_SUITE;
}
}
// Clear mStartTime so that we can verify that it gets set next time.
mStartTime = -1;
mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
test.getClass().getName() + "#" + ((TestCase) test).getName()
+ "\nin " + assignmentSuite + " suite\nrunTime: "
+ String.valueOf(runTime) + "\n");
mTestResult.putFloat(REPORT_KEY_RUN_TIME, runTime);
mTestResult.putString(REPORT_KEY_SUITE_ASSIGNMENT, assignmentSuite);
sendStatus(0, mTestResult);
}
}
/**
* This class sends status reports back to the IInstrumentationWatcher
*/
private class WatcherResultPrinter implements TestListener, PerformanceResultsWriter {
private final Bundle mResultTemplate;
Bundle mTestResult;
int mTestNum = 0;
int mTestResultCode = 0;
String mTestClass = null;
PerformanceCollector mPerfCollector = new PerformanceCollector();
boolean mIsTimedTest = false;
boolean mIncludeDetailedStats = false;
public WatcherResultPrinter(int numTests) {
mResultTemplate = new Bundle();
mResultTemplate.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID);
mResultTemplate.putInt(REPORT_KEY_NUM_TOTAL, numTests);
}
/**
* send a status for the start of a each test, so long tests can be seen
* as "running"
*/
public void startTest(Test test) {
String testClass = test.getClass().getName();
String testName = ((TestCase)test).getName();
mTestResult = new Bundle(mResultTemplate);
mTestResult.putString(REPORT_KEY_NAME_CLASS, testClass);
mTestResult.putString(REPORT_KEY_NAME_TEST, testName);
mTestResult.putInt(REPORT_KEY_NUM_CURRENT, ++mTestNum);
// pretty printing
if (testClass != null && !testClass.equals(mTestClass)) {
mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
String.format("\n%s:", testClass));
mTestClass = testClass;
} else {
mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "");
}
Method testMethod = null;
try {
testMethod = test.getClass().getMethod(testName);
// Report total number of iterations, if test is repetitive
if (testMethod.isAnnotationPresent(RepetitiveTest.class)) {
int numIterations = testMethod.getAnnotation(
RepetitiveTest.class).numIterations();
mTestResult.putInt(REPORT_KEY_NUM_ITERATIONS, numIterations);
}
} catch (NoSuchMethodException e) {
// ignore- the test with given name does not exist. Will be handled during test
// execution
}
// The delay_msec parameter is normally used to provide buffers of idle time
// for power measurement purposes. To make sure there is a delay before and after
// every test in a suite, we delay *after* every test (see endTest below) and also
// delay *before* the first test. So, delay test1 delay test2 delay.
try {
if (mTestNum == 1) Thread.sleep(mDelayMsec);
} catch (InterruptedException e) {
throw new IllegalStateException(e);
}
sendStatus(REPORT_VALUE_RESULT_START, mTestResult);
mTestResultCode = 0;
mIsTimedTest = false;
mIncludeDetailedStats = false;
try {
// Look for TimedTest annotation on both test class and test method
if (testMethod != null && testMethod.isAnnotationPresent(TimedTest.class)) {
mIsTimedTest = true;
mIncludeDetailedStats = testMethod.getAnnotation(
TimedTest.class).includeDetailedStats();
} else if (test.getClass().isAnnotationPresent(TimedTest.class)) {
mIsTimedTest = true;
mIncludeDetailedStats = test.getClass().getAnnotation(
TimedTest.class).includeDetailedStats();
}
} catch (SecurityException e) {
// ignore - the test with given name cannot be accessed. Will be handled during
// test execution
}
if (mIsTimedTest && mIncludeDetailedStats) {
mPerfCollector.beginSnapshot("");
} else if (mIsTimedTest) {
mPerfCollector.startTiming("");
}
}
/**
* @see junit.framework.TestListener#addError(Test, Throwable)
*/
public void addError(Test test, Throwable t) {
mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t));
mTestResultCode = REPORT_VALUE_RESULT_ERROR;
// pretty printing
mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
String.format("\nError in %s:\n%s",
((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t)));
}
/**
* @see junit.framework.TestListener#addFailure(Test, AssertionFailedError)
*/
public void addFailure(Test test, AssertionFailedError t) {
mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t));
mTestResultCode = REPORT_VALUE_RESULT_FAILURE;
// pretty printing
mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
String.format("\nFailure in %s:\n%s",
((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t)));
}
/**
* @see junit.framework.TestListener#endTest(Test)
*/
public void endTest(Test test) {
if (mIsTimedTest && mIncludeDetailedStats) {
mTestResult.putAll(mPerfCollector.endSnapshot());
} else if (mIsTimedTest) {
writeStopTiming(mPerfCollector.stopTiming(""));
}
if (mTestResultCode == 0) {
mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, ".");
}
sendStatus(mTestResultCode, mTestResult);
try { // Sleep after every test, if specified
Thread.sleep(mDelayMsec);
} catch (InterruptedException e) {
throw new IllegalStateException(e);
}
}
public void writeBeginSnapshot(String label) {
// Do nothing
}
public void writeEndSnapshot(Bundle results) {
// Copy all snapshot data fields into mResults, which is outputted
// via Instrumentation.finish
mResults.putAll(results);
}
public void writeStartTiming(String label) {
// Do nothing
}
public void writeStopTiming(Bundle results) {
// Copy results into mTestResult by flattening list of iterations,
// which is outputted via WatcherResultPrinter.endTest
int i = 0;
for (Parcelable p :
results.getParcelableArrayList(PerformanceCollector.METRIC_KEY_ITERATIONS)) {
Bundle iteration = (Bundle)p;
String index = "iteration" + i + ".";
mTestResult.putString(index + PerformanceCollector.METRIC_KEY_LABEL,
iteration.getString(PerformanceCollector.METRIC_KEY_LABEL));
mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_CPU_TIME,
iteration.getLong(PerformanceCollector.METRIC_KEY_CPU_TIME));
mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_EXECUTION_TIME,
iteration.getLong(PerformanceCollector.METRIC_KEY_EXECUTION_TIME));
i++;
}
}
public void writeMeasurement(String label, long value) {
mTestResult.putLong(label, value);
}
public void writeMeasurement(String label, float value) {
mTestResult.putFloat(label, value);
}
public void writeMeasurement(String label, String value) {
mTestResult.putString(label, value);
}
// TODO report the end of the cycle
}
}