Skip to content

Commit

Permalink
Add IT for depclean-results.json creation (#83)
Browse files Browse the repository at this point in the history
  • Loading branch information
cesarsotovalero authored Apr 12, 2021
1 parent 9e79780 commit 0303e5a
Show file tree
Hide file tree
Showing 7 changed files with 153 additions and 191 deletions.
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
package se.kth.depclean.core.analysis;

import java.util.Set;
import lombok.AllArgsConstructor;
import lombok.Data;

/**
* POJO containing the types in an artifact.
*/
@Data
@AllArgsConstructor
public class ArtifactTypes {

/**
Expand All @@ -19,14 +21,4 @@ public class ArtifactTypes {
*/
private Set<String> usedTypes;

/**
* Ctor.
*
* @param allTypes All types in the artifact.
* @param usedTypes Thew used types in the artifact.
*/
public ArtifactTypes(Set<String> allTypes, Set<String> usedTypes) {
this.allTypes = allTypes;
this.usedTypes = usedTypes;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -41,23 +41,31 @@
import se.kth.depclean.core.analysis.graph.DefaultCallGraph;

/**
* The principal class that perform the dependency analysis in a Maven project.
* This is principal class that perform the dependency analysis in a Maven project.
*/
@Slf4j
@Component(role = ProjectDependencyAnalyzer.class)
public class DefaultProjectDependencyAnalyzer implements ProjectDependencyAnalyzer {

/**
* If true, the project's classes in target/test-classes are not going to be analyzed.
*/
private final boolean isIgnoredTest;

@Requirement
private final ClassAnalyzer classAnalyzer = new DefaultClassAnalyzer();

@Requirement
private final DependencyAnalyzer dependencyAnalyzer = new ASMDependencyAnalyzer();

/**
* If true, the project's classes in target/test-classes are not going to be analyzed.
*/
private final boolean isIgnoredTest;

/**
* A map [artifact] -> [allTypes].
*/
private Map<Artifact, Set<String>> artifactClassesMap;

/**
* A map [artifact] -> [usedTypes].
*/
private final Map<Artifact, Set<String>> artifactUsedClassesMap = new HashMap<>();

/**
Expand All @@ -67,11 +75,6 @@ public DefaultProjectDependencyAnalyzer(boolean isIgnoredTest) {
this.isIgnoredTest = isIgnoredTest;
}

/**
* A map [dependency] -> [dependency classes].
*/
private Map<Artifact, Set<String>> artifactClassesMap;

/**
* Analyze the dependencies in a project.
*
Expand Down Expand Up @@ -108,7 +111,6 @@ public ProjectDependencyAnalysis analyze(MavenProject project) throws ProjectDep
);
Set<Artifact> usedArtifacts = collectUsedArtifactsFromProcessors(project, artifactClassesMap);


/* ******************** results as statically used at the bytecode *********************** */

// for the used dependencies, get the ones that are declared
Expand All @@ -127,12 +129,12 @@ public ProjectDependencyAnalysis analyze(MavenProject project) throws ProjectDep
} catch (IOException exception) {
throw new ProjectDependencyAnalyzerException("Cannot analyze dependencies", exception);
}

}

/**
* Maven processors are defined like this.
* <pre>
* {@code
* <pre>{@code
* <plugin>
* <groupId>org.bsc.maven</groupId>
* <artifactId>maven-processor-plugin</artifactId>
Expand All @@ -148,8 +150,7 @@ public ProjectDependencyAnalysis analyze(MavenProject project) throws ProjectDep
* </execution>
* </executions>
* </plugin>
* }
* </pre>
* }</pre>
*
* @param project the maven project
* @param artifactClassesMap previously built artifacts map
Expand All @@ -167,6 +168,7 @@ private Set<Artifact> collectUsedArtifactsFromProcessors(MavenProject project,
.forEach(processor -> findArtifactForClassName(artifactClassesMap, processor.getValue())
.ifPresent(artifact -> artifactUsedClassesMap.putIfAbsent(artifact, new HashSet<>()))
);

return artifactUsedClassesMap.keySet();
}

Expand Down Expand Up @@ -222,16 +224,31 @@ private void buildDependenciesDependencyClasses(MavenProject project) throws IOE
collectDependencyClasses(dependenciesDirectory);
}

private Set<Artifact> collectUsedArtifacts(Map<Artifact, Set<String>> artifactClassMap,
/**
* Determine the artifacts that are used.
*
* @param artifactClassMap A map of [artifact] -> [classes in the artifact].
* @param referencedClasses A set of classes that are detected as used.
* @return The set of used artifacts.
*/
private Set<Artifact> collectUsedArtifacts(
Map<Artifact, Set<String>> artifactClassMap,
Set<String> referencedClasses) {
// find for used members in each class in the dependency classes
Set<Artifact> usedArtifacts = new HashSet<>();
for (String clazz : referencedClasses) {
findArtifactForClassName(artifactClassMap, clazz)
.ifPresent(artifact -> artifactUsedClassesMap.putIfAbsent(artifact, new HashSet<>()));
Optional<Artifact> artifact = findArtifactForClassName(artifactClassMap, clazz);
if (artifact.isPresent()) {
if (!artifactUsedClassesMap.containsKey(artifact.get())) {
artifactUsedClassesMap.put(artifact.get(), new HashSet<>());
}
artifactUsedClassesMap.get(artifact.get()).add(clazz);
usedArtifacts.add(artifact.get());
}
}
return artifactUsedClassesMap.keySet();
return usedArtifacts;
}


private Optional<Artifact> findArtifactForClassName(Map<Artifact, Set<String>> artifactClassMap, String className) {
for (Map.Entry<Artifact, Set<String>> entry : artifactClassMap.entrySet()) {
if (entry.getValue().contains(className)) {
Expand Down Expand Up @@ -272,24 +289,26 @@ private Set<String> collectDependencyClasses(String path) throws IOException {
}

/**
* Computes a map of artifacts and their types.
* Computes a map of [artifact] -> [allTypes, usedTypes].
*
* @return A map of artifact -> classes
* @return A map of [artifact] -> [allTypes, usedTypes]
*/
public Map<String, ArtifactTypes> getArtifactClassesMap() {
Map<String, ArtifactTypes> output = new HashMap<>();
for (Map.Entry<Artifact, Set<String>> entry : artifactClassesMap.entrySet()) {
Artifact key = entry.getKey();
if (artifactUsedClassesMap.containsKey(key)) {
output.put(key.toString(), new ArtifactTypes(
artifactClassesMap.get(key), // get all the types
artifactUsedClassesMap.get(key) // get used types
));
output.put(key.toString(),
new ArtifactTypes(
artifactClassesMap.get(key), // get all the types
artifactUsedClassesMap.get(key) // get used types
));
} else {
output.put(key.toString(), new ArtifactTypes(
artifactClassesMap.get(key), // get all the types
new HashSet<>() // get used types
));
output.put(key.toString(),
new ArtifactTypes(
artifactClassesMap.get(key), // get all the types
new HashSet<>() // get used types
));
}
}
return output;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,11 +104,19 @@ public class DepCleanMojo extends AbstractMojo {

/**
* If this is true, DepClean creates a JSON file with the result of the analysis. The file is called
* "debloat-result.json" and it is located in the root of the project.
* "debloat-result.json" and it is located in /target.
*/
@Parameter(property = "createResultJson", defaultValue = "false")
private boolean createResultJson;


/**
* If this is true, DepClean creates a CSV file with the result of the analysis with the columns:
* OriginClass,TargetClass,Dependency. The file is called "class-usage.csv" and it is located in /target.
*/
@Parameter(property = "createClassUsageCsv", defaultValue = "false")
private boolean createClassUsageCsv;

/**
* Add a list of dependencies, identified by their coordinates, to be ignored by DepClean during the analysis and
* considered as used dependencies. Useful to override incomplete result caused by bytecode-level analysis Dependency
Expand Down Expand Up @@ -167,6 +175,7 @@ public class DepCleanMojo extends AbstractMojo {
@Component(hint = "default")
private DependencyGraphBuilder dependencyGraphBuilder;


/**
* Write pom file to the filesystem.
*
Expand Down Expand Up @@ -392,7 +401,7 @@ public void execute() throws MojoExecutionException, MojoFailureException {
Iterator<File> iterator = FileUtils.iterateFiles(
new File(
project.getBuild().getDirectory() + File.separator
+ DIRECTORY_TO_COPY_DEPENDENCIES), new String[]{"jar"}, true);
+ DIRECTORY_TO_COPY_DEPENDENCIES), new String[] {"jar"}, true);
while (iterator.hasNext()) {
File file = iterator.next();
sizeOfDependencies.put(file.getName(), FileUtils.sizeOf(file));
Expand All @@ -411,8 +420,7 @@ public void execute() throws MojoExecutionException, MojoFailureException {

/* Analyze dependencies usage status */
ProjectDependencyAnalysis projectDependencyAnalysis;
DefaultProjectDependencyAnalyzer dependencyAnalyzer = new DefaultProjectDependencyAnalyzer(
ignoreTests);
DefaultProjectDependencyAnalyzer dependencyAnalyzer = new DefaultProjectDependencyAnalyzer(ignoreTests);
try {
projectDependencyAnalysis = dependencyAnalyzer.analyze(project);
} catch (ProjectDependencyAnalyzerException e) {
Expand Down Expand Up @@ -543,7 +551,7 @@ public void execute() throws MojoExecutionException, MojoFailureException {
}
}

/* Printing the results to the console */
/* Printing the results to the terminal */
printString(SEPARATOR);
printString(" D E P C L E A N A N A L Y S I S R E S U L T S");
printString(SEPARATOR);
Expand Down Expand Up @@ -652,7 +660,6 @@ public void execute() throws MojoExecutionException, MojoFailureException {
} catch (IOException e) {
throw new MojoExecutionException(e.getMessage(), e);
}

getLog().info("POM debloated successfully");
getLog().info("pom-debloated.xml file created in: " + pathToDebloatedPom);
}
Expand All @@ -661,10 +668,9 @@ public void execute() throws MojoExecutionException, MojoFailureException {
/* Writing the JSON file with the debloat results */
if (createResultJson) {
printString("Creating depclean-results.json, please wait...");
String pathToJsonFile =
project.getBasedir().getAbsolutePath() + File.separator + "depclean-results.json";
String treeFile = project.getBuild().getDirectory() + File.separator + "tree.txt";
/* Copy direct dependencies locally */
final File jsonFile = new File(project.getBuild().getDirectory() + File.separator + "depclean-results.json");
final File treeFile = new File(project.getBuild().getDirectory() + File.separator + "tree.txt");
final File classUsageFile = new File(project.getBuild().getDirectory() + File.separator + "class-usage.csv");
try {
MavenInvoker.runCommand("mvn dependency:tree -DoutputFile=" + treeFile + " -Dverbose=true");
} catch (IOException | InterruptedException e) {
Expand All @@ -673,13 +679,13 @@ public void execute() throws MojoExecutionException, MojoFailureException {
Thread.currentThread().interrupt();
return;
}
File classUsageFile = new File(
project.getBasedir().getAbsolutePath() + File.separator + "class-usage.csv");
try {
FileUtils.write(classUsageFile, "OriginClass,TargetClass,Dependency\n",
Charset.defaultCharset());
} catch (IOException e) {
getLog().error("Error writing the CSV header.");
if (createClassUsageCsv) {
printString("Creating class-usage.csv, please wait...");
try {
FileUtils.write(classUsageFile, "OriginClass,TargetClass,Dependency\n", Charset.defaultCharset());
} catch (IOException e) {
getLog().error("Error writing the CSV header.");
}
}
ParsedDependencies parsedDependencies = new ParsedDependencies(
treeFile,
Expand All @@ -691,15 +697,20 @@ public void execute() throws MojoExecutionException, MojoFailureException {
unusedDirectArtifactsCoordinates,
unusedInheritedArtifactsCoordinates,
unusedTransitiveArtifactsCoordinates,
classUsageFile
classUsageFile,
createClassUsageCsv
);
try {
FileUtils.write(new File(pathToJsonFile), parsedDependencies.parseTreeToJson(),
Charset.defaultCharset());
getLog().info("depclean-results.json file created in: " + pathToJsonFile);
FileUtils.write(jsonFile, parsedDependencies.parseTreeToJson(), Charset.defaultCharset());
} catch (ParseException | IOException e) {
getLog().error("Unable to generate JSON file.");
}
if (jsonFile.exists()) {
getLog().info("depclean-results.json file created in: " + jsonFile.getAbsolutePath());
}
if (classUsageFile.exists()) {
getLog().info("class-usage.csv file created in: " + classUsageFile.getAbsolutePath());
}
}
}
}
Loading

0 comments on commit 0303e5a

Please sign in to comment.