Skip to content

Commit

Permalink
feat(testRunner): UnifiedTestRunner and friends (#107)
Browse files Browse the repository at this point in the history
* feat(testRunner): UnifiedTestRunner and friends

1. Setting up the UnifiedTestRunner that will rely on the JUnit
   Platform to autodiscover the execute tests across Junit 5/4/3 and
   TestNG.
   a. Issuing separate code paths for test discovery (`containsExecutableTest`)
       and test execution (`getExecutableTest`).
   b. Using a different discovery path is useful to answer questions questions like:
       "Does the testClass have executable tests?", and as such,
       "Can we use the UnifiedTestRunner?"
   c. If the discovery path fails, then the UnifiedTest runner is not viable for
       a given test class.
   d. Handling unforeseen exception in UnifiedTestRunner.
   e. Optional test dryRuns: Setting up the bits necessary to start doing dryruns 
       when figuring out if a testClass has actual *executable* tests.
2. We give the UnifiedTestRunner first dibs as part of the TestType discovery
     process in AbstractTestRunner.
3. UnifiedTestListenerAdapter is a TestExecutionListener (Junit Platform)
     implementation  that hosts a ITacocoTestLister implementation.
     This is the current approach to listening test events from a variety
     of test listening implementations (Junit X, TestNG, etc).
4. Updating pom.xml to pull in a compile time dependency on JUnit Platform
    a. Also adding runtime dependencies on test engines that confrom to the JUnit
        Platform spec.

Test Notes: 
Integration tests that run on spideruci/projects4testing
- bringing back integration-test in pom.xml
- resurrecting and making it  aware of projects4testing
    - Cloning spideruci/projects4testing before_install
- updating pom.xml to: pull spideruci/projects4testing; run tacoco-diag.
- Adding spiderMath_JUnit4_single_module
- Disabling mvn -Panalyzer for now.
  • Loading branch information
VijayKrishna authored Apr 7, 2020
1 parent c44f5d3 commit 4851b12
Show file tree
Hide file tree
Showing 6 changed files with 323 additions and 18 deletions.
3 changes: 2 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ matrix:
dist: bionic

before_install:
- git clone https://github.com/spideruci/primitive-hamcrest.git
- git clone https://github.com/spideruci/projects4testing.git
- git clone https://github.com/spideruci/primitive-hamcrest.git
- cd primitive-hamcrest
- mvn install
- cd ..
Expand Down
50 changes: 48 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<junit.jupiter.version>5.6.0</junit.jupiter.version>
<junit4.version>4.12</junit4.version>
</properties>

<name>tacoco the per testcase junit runner</name>
Expand Down Expand Up @@ -147,9 +149,37 @@
<dependency>
<groupId>org.apache.maven.shared</groupId>
<artifactId>maven-invoker</artifactId>
<version>3.0.1</version>
<version>3.0.1</version>
</dependency>
</dependencies>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-launcher</artifactId>
<version>1.6.0</version>
</dependency>

<!--
Runtime dependencies to inject Test Engines
to be used by the UnifiedTestRunner.
-->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>${junit.jupiter.version}</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<version>${junit.jupiter.version}</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>com.github.testng-team</groupId>
<artifactId>testng-junit5</artifactId>
<version>0.0.1</version>
<scope>runtime</scope>
</dependency>
</dependencies>

<build>
<plugins>
Expand Down Expand Up @@ -200,6 +230,22 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>1.4.0</version>
<executions>
<execution>
<phase>integration-test</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>${basedir}/tacoco-diagnoses</executable>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
package org.spideruci.tacoco.testlisteners;

import org.junit.platform.engine.TestExecutionResult;
import org.junit.platform.engine.TestExecutionResult.Status;
import org.junit.platform.launcher.TestExecutionListener;
import org.junit.platform.launcher.TestIdentifier;
import org.junit.platform.launcher.TestPlan;

public class UnifiedTestListenerAdapter implements TestExecutionListener {

private final ITacocoTestListener listener;
public UnifiedTestListenerAdapter(final ITacocoTestListener listener) {
this.listener = listener;
}

private String getUniqueTestName(TestIdentifier testIdentifier) {
final String testUid = testIdentifier.getUniqueId();
final String testName = testIdentifier.getDisplayName();
final String testUniqueName = String.format("%s.%s", testName, testUid);
return testUniqueName;
}

@Override
public void executionStarted(final TestIdentifier testIdentifier) {
final String testUniqueName = getUniqueTestName(testIdentifier);
listener.onTestStart(testUniqueName);
}

@Override
public void executionSkipped(final TestIdentifier testIdentifier, final String reason) {
listener.onTestSkipped();
}

@Override
public void executionFinished(final TestIdentifier testIdentifier, final TestExecutionResult testExecutionResult) {
final Status status = testExecutionResult.getStatus();

switch (status) {
case SUCCESSFUL:
listener.onTestPassed();
break;
case FAILED:
case ABORTED:
listener.onTestFailed();
break;
}

listener.onTestEnd();
}

public void testPlanExecutionStarted(final TestPlan testPlan) {
listener.onStart();
}

public void testPlanExecutionFinished(final TestPlan testPlan) {
listener.onEnd();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,6 @@

import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;

import org.spideruci.tacoco.analysis.AnalysisResults;
Expand All @@ -17,7 +12,7 @@

public abstract class AbstractTestRunner {

public static enum TestType {JUNIT, TESTNG, UNKNOWN};
public static enum TestType {JUNIT, TESTNG, UNIFIED, UNKNOWN};
public static boolean LOGGING = false;


Expand All @@ -31,39 +26,44 @@ public static enum TestType {JUNIT, TESTNG, UNKNOWN};
public abstract Callable<AnalysisResults> getExecutableTest(Class<?> test);
public abstract void printTestRunSummary(AnalysisResults results);

public static AbstractTestRunner getInstance(AbstractBuildProbe probe) {
for(String test : probe.getTestClasses()){
public static AbstractTestRunner getInstance(final AbstractBuildProbe probe) {
for(final String test : probe.getTestClasses()){
try {
switch(getTestType(Class.forName(test))){
case JUNIT:
return new JUnitRunner();
case TESTNG:
return new TestNGRunner();
case UNIFIED:
return new UnifiedTestRunner();
case UNKNOWN:
continue;
}
} catch (ClassNotFoundException e) {
} catch (final ClassNotFoundException e) {
e.printStackTrace();
}
}
return null;
}

private static TestType getTestType(Class<?> test){
private static TestType getTestType(final Class<?> test){

if(test == null) {
if(test == null || Modifier.isAbstract(test.getModifiers())) {
return TestType.UNKNOWN;
}

if(Modifier.isAbstract(test.getModifiers())) {
return TestType.UNKNOWN;

if (UnifiedTestRunner.containsExecutableTest(test)) {
// We are going to give the UnifiedTestRunner first dibs.
// If it is able to find any executable test, then we run with it
// else, fall back on the individual test types
return TestType.UNIFIED; // UnifiedTestRunner
}

if(junit.framework.TestCase.class.isAssignableFrom(test)) {
return TestType.JUNIT; //JUnit3
}

for(Method testMethod : test.getMethods()) {
for(final Method testMethod : test.getMethods()) {
if(testMethod.getAnnotation(org.junit.Test.class) != null){
return TestType.JUNIT; //JUnit4
}
Expand Down
153 changes: 153 additions & 0 deletions src/main/java/org/spideruci/tacoco/testrunners/UnifiedTestRunner.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
package org.spideruci.tacoco.testrunners;

import static org.junit.platform.launcher.core.LauncherDiscoveryRequestBuilder.request;
import static org.junit.platform.engine.discovery.DiscoverySelectors.selectClass;

import java.util.concurrent.Callable;

import org.junit.platform.launcher.Launcher;
import org.junit.platform.launcher.LauncherDiscoveryRequest;
import org.junit.platform.launcher.TestPlan;
import org.junit.platform.launcher.core.LauncherFactory;
import org.junit.platform.launcher.listeners.SummaryGeneratingListener;
import org.junit.platform.launcher.listeners.TestExecutionSummary;
import org.junit.platform.launcher.listeners.TestExecutionSummary.Failure;
import org.spideruci.tacoco.analysis.AnalysisResults;
import org.spideruci.tacoco.testlisteners.ITacocoTestListener;
import org.spideruci.tacoco.testlisteners.UnifiedTestListenerAdapter;

public class UnifiedTestRunner extends AbstractTestRunner {

private final static String TEST_CLASS_NAME = "test-class-name";
private final static String TEST_SUMMARY = "test-summary";

final Launcher launcher = LauncherFactory.create();


public static boolean containsExecutableTest(final Class<?> test, final boolean doSanityDryRun) {
try {
final LauncherDiscoveryRequest discoveryRequest = request().selectors(selectClass(test)).build();
final Launcher launcher = LauncherFactory.create();
final TestPlan testplan = launcher.discover(discoveryRequest);
final boolean containsTests = testplan.containsTests();

if (containsTests && doSanityDryRun) {
// the idea here is this: if this *silent* execute call
// throws any exception then we will return false in the catch
// block below. This is a silent execute call because it does
// not register any listeners at any point.
launcher.execute(testplan);
}

return containsTests;
} catch (final Exception e) {
return false;
}
}

public static boolean containsExecutableTest(final Class<?> test) {
return containsExecutableTest(test, false);
}

private LauncherDiscoveryRequest discoveryRequest(final Class<?> test) {
try {
final LauncherDiscoveryRequest discoveryRequest = request().selectors(selectClass(test)).build();
return discoveryRequest;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}

@Override
public boolean shouldRun(final Class<?> test) {
return UnifiedTestRunner.containsExecutableTest(test);
}

@Override
public void listenThrough(final ITacocoTestListener listener) {
final UnifiedTestListenerAdapter testListenerAdapter = new UnifiedTestListenerAdapter(listener);
launcher.registerTestExecutionListeners(testListenerAdapter);
}

@Override
public Callable<AnalysisResults> getExecutableTest(final Class<?> test) {
final LauncherDiscoveryRequest discoveryRequest = this.discoveryRequest(test);
final Launcher launcher = this.launcher;
final String testClassName = test.getName();

final Callable<AnalysisResults> execTest = new Callable<AnalysisResults>() {

@Override
public AnalysisResults call() throws Exception {
try {
if (discoveryRequest == null) {
return null;
}

final SummaryGeneratingListener sGeneratingListener = new SummaryGeneratingListener();

launcher.execute(discoveryRequest, sGeneratingListener);
final TestExecutionSummary summary = sGeneratingListener.getSummary();

final AnalysisResults results = new AnalysisResults();
results.put(TEST_CLASS_NAME, testClassName);
results.put(TEST_SUMMARY, summary);
return results;
} catch(Exception e) {
e.printStackTrace();
return null;
}
}
};

return execTest;
}

@Override
public void printTestRunSummary(final AnalysisResults results) {
if (results == null || results.iterator() == null || !results.iterator().hasNext()) {
return;
}

final TestExecutionSummary summary = results.get(TEST_SUMMARY);
if (summary == null) {
return;
}

final String testName = results.get(TEST_CLASS_NAME);
if (testName == null) {
return;
}

try {
this.testRunTime = (summary.getTimeFinished() - summary.getTimeStarted()) / 1000.0;
this.executedTestCount = (int) summary.getTestsStartedCount();
this.failedTestCount = (int) (summary.getTestsFailedCount() + summary.getTestsAbortedCount());
this.ignoredTestCount = (int) summary.getTestsSkippedCount();

System.out.println("Finishing " + testName + " Tests run: " + executedTestCount + " Failures: "
+ failedTestCount + " Errors: " + summary.getTestsAbortedCount() + " Skipped: " + ignoredTestCount
+ " Time elapsed: " + testRunTime + "sec");

if (this.failedTestCount != 0) {
System.out.println("---------------------Failures--------------------");
for (final Failure f : summary.getFailures()) {
System.out.println("Test Name: " + f.getTestIdentifier().getDisplayName());
System.out.println("Test Identifier: " + f.getTestIdentifier().getUniqueId());

System.out.println("Message: " + f.getException().getMessage());
System.out.println("Description: " + f.getException().getCause());
System.out.println("Trace: ");
f.getException().printStackTrace();
}
}
} catch (Exception e) {
System.err.println("---------------------Tacoco Error--------------------");
System.err.printf("Failed to parse Analysis Results for testName: %s\n", testName);
e.printStackTrace();
}

}

}
Loading

0 comments on commit 4851b12

Please sign in to comment.