Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

misc pom/test/resource improvements #1142

Closed
wants to merge 11 commits into from
Closed
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,11 @@ adam*.jar
build
*~
#*
*.bak
*.bam*
*.adam*
*.log
.*.swp
.DS_Store

*#*
*#*
14 changes: 7 additions & 7 deletions adam-apis/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

<artifactId>adam-apis_2.10</artifactId>
<packaging>jar</packaging>
<name>ADAM_2.10: APIs for Java</name>
<name>ADAM_${scala.version.prefix}: APIs for Java</name>
<build>
<plugins>
<!-- disable surefire -->
Expand All @@ -26,7 +26,7 @@
<groupId>org.scalatest</groupId>
<artifactId>scalatest-maven-plugin</artifactId>
<configuration>
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
<reportsDirectory>${project.build.directory}/scalatest-reports</reportsDirectory>
<junitxml>.</junitxml>
<filereports>ADAMTestSuite.txt</filereports>
<!--
Expand Down Expand Up @@ -100,11 +100,11 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<artifactId>spark-core_${scala.version.prefix}</artifactId>
</dependency>
<dependency>
<groupId>org.bdgenomics.utils</groupId>
<artifactId>utils-misc_2.10</artifactId>
<artifactId>utils-misc_${scala.version.prefix}</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
Expand All @@ -114,11 +114,11 @@
</dependency>
<dependency>
<groupId>org.bdgenomics.adam</groupId>
<artifactId>adam-core_2.10</artifactId>
<artifactId>adam-core_${scala.version.prefix}</artifactId>
</dependency>
<dependency>
<groupId>org.bdgenomics.adam</groupId>
<artifactId>adam-core_2.10</artifactId>
<artifactId>adam-core_${scala.version.prefix}</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
Expand All @@ -132,7 +132,7 @@
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_2.10</artifactId>
<artifactId>scalatest_${scala.version.prefix}</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
Expand Down
6 changes: 3 additions & 3 deletions adam-assembly/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,13 @@

<artifactId>adam-assembly_2.10</artifactId>
<packaging>jar</packaging>
<name>ADAM_2.10: Assembly</name>
<name>ADAM_${scala.version.prefix}: Assembly</name>
<properties>
<timestamp>${maven.build.timestamp}</timestamp>
<maven.build.timestamp.format>yyyy-MM-dd</maven.build.timestamp.format>
</properties>
<build>
<finalName>adam_2.10-${project.version}</finalName>
<finalName>adam_${scala.version.prefix}-${project.version}</finalName>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
Expand Down Expand Up @@ -114,7 +114,7 @@
<dependencies>
<dependency>
<groupId>org.bdgenomics.adam</groupId>
<artifactId>adam-cli_2.10</artifactId>
<artifactId>adam-cli_${scala.version.prefix}</artifactId>
</dependency>
</dependencies>
</project>
28 changes: 14 additions & 14 deletions adam-cli/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

<artifactId>adam-cli_2.10</artifactId>
<packaging>jar</packaging>
<name>ADAM_2.10: CLI</name>
<name>ADAM_${scala.version.prefix}: CLI</name>
<properties>
<timestamp>${maven.build.timestamp}</timestamp>
<maven.build.timestamp.format>yyyy-MM-dd</maven.build.timestamp.format>
Expand Down Expand Up @@ -53,7 +53,7 @@
<groupId>org.scalatest</groupId>
<artifactId>scalatest-maven-plugin</artifactId>
<configuration>
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
<reportsDirectory>${project.build.directory}/scalatest-reports</reportsDirectory>
<junitxml>.</junitxml>
<filereports>ADAMTestSuite.txt</filereports>
<!--
Expand Down Expand Up @@ -111,51 +111,51 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<artifactId>spark-core_${scala.version.prefix}</artifactId>
</dependency>
<dependency>
<groupId>org.bdgenomics.utils</groupId>
<artifactId>utils-misc_2.10</artifactId>
<artifactId>utils-misc_${scala.version.prefix}</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.bdgenomics.utils</groupId>
<artifactId>utils-misc_2.10</artifactId>
<artifactId>utils-misc_${scala.version.prefix}</artifactId>
</dependency>
<dependency>
<groupId>org.bdgenomics.utils</groupId>
<artifactId>utils-io_2.10</artifactId>
<artifactId>utils-io_${scala.version.prefix}</artifactId>
</dependency>
<dependency>
<groupId>org.bdgenomics.utils</groupId>
<artifactId>utils-cli_2.10</artifactId>
<artifactId>utils-cli_${scala.version.prefix}</artifactId>
</dependency>
<dependency>
<groupId>org.bdgenomics.utils</groupId>
<artifactId>utils-metrics_2.10</artifactId>
<artifactId>utils-metrics_${scala.version.prefix}</artifactId>
</dependency>
<dependency>
<groupId>org.bdgenomics.bdg-formats</groupId>
<artifactId>bdg-formats</artifactId>
</dependency>
<dependency>
<groupId>org.bdgenomics.adam</groupId>
<artifactId>adam-core_2.10</artifactId>
<artifactId>adam-core_${scala.version.prefix}</artifactId>
</dependency>
<dependency>
<groupId>org.bdgenomics.adam</groupId>
<artifactId>adam-core_2.10</artifactId>
<artifactId>adam-core_${scala.version.prefix}</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.bdgenomics.adam</groupId>
<artifactId>adam-apis_2.10</artifactId>
<artifactId>adam-apis_${scala.version.prefix}</artifactId>
</dependency>
<dependency>
<groupId>org.bdgenomics.adam</groupId>
<artifactId>adam-apis_2.10</artifactId>
<artifactId>adam-apis_${scala.version.prefix}</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
Expand All @@ -173,12 +173,12 @@
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_2.10</artifactId>
<artifactId>scalatest_${scala.version.prefix}</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>net.codingwell</groupId>
<artifactId>scala-guice_2.10</artifactId>
<artifactId>scala-guice_${scala.version.prefix}</artifactId>
</dependency>
</dependencies>
</project>
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ import org.bdgenomics.formats.avro.AlignmentRecord
class Adam2FastqSuite extends ADAMFunSuite {

sparkTest("convert SAM to paired FASTQ") {
val readsFilepath = resourcePath("bqsr1.sam")
val readsFilepath = testFile("bqsr1.sam")

// The following fastq files were generated by Picard's SamToFastq

Expand All @@ -45,8 +45,8 @@ class Adam2FastqSuite extends ADAMFunSuite {

// VALIDATION_STRINGENCY=SILENT is necessary since they are unpaired reads and this matches the ADAM default

val fastq1Path = resourcePath("bqsr1-r1.fq")
val fastq2Path = resourcePath("bqsr1-r2.fq")
val fastq1Path = testFile("bqsr1-r1.fq")
val fastq2Path = testFile("bqsr1-r2.fq")

val outputDir = Files.createTempDir()
val outputFastqR1File = outputDir.getAbsolutePath + "/bqsr1-r1.fq"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class FlagStatSuite extends ADAMFunSuite {

sparkTest("Standard FlagStat test") {

val inputpath = resourcePath("NA12878.sam")
val inputpath = testFile("NA12878.sam")
val argLine = "%s".format(inputpath).split("\\s+")

val args: FlagStatArgs = Args4j.apply[FlagStatArgs](argLine)
Expand Down
18 changes: 9 additions & 9 deletions adam-core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

<artifactId>adam-core_2.10</artifactId>
<packaging>jar</packaging>
<name>ADAM_2.10: Core</name>
<name>ADAM_${scala.version.prefix}: Core</name>
<build>
<plugins>
<!-- disable surefire -->
Expand All @@ -26,7 +26,7 @@
<groupId>org.scalatest</groupId>
<artifactId>scalatest-maven-plugin</artifactId>
<configuration>
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
<reportsDirectory>${project.build.directory}/scalatest-reports</reportsDirectory>
<junitxml>.</junitxml>
<filereports>ADAMTestSuite.txt</filereports>
<!--
Expand Down Expand Up @@ -92,25 +92,25 @@
<dependencies>
<dependency>
<groupId>org.bdgenomics.utils</groupId>
<artifactId>utils-misc_2.10</artifactId>
<artifactId>utils-misc_${scala.version.prefix}</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.bdgenomics.utils</groupId>
<artifactId>utils-metrics_2.10</artifactId>
<artifactId>utils-metrics_${scala.version.prefix}</artifactId>
</dependency>
<dependency>
<groupId>org.bdgenomics.utils</groupId>
<artifactId>utils-io_2.10</artifactId>
<artifactId>utils-io_${scala.version.prefix}</artifactId>
</dependency>
<dependency>
<groupId>org.bdgenomics.utils</groupId>
<artifactId>utils-cli_2.10</artifactId>
<artifactId>utils-cli_${scala.version.prefix}</artifactId>
</dependency>
<dependency>
<groupId>org.bdgenomics.utils</groupId>
<artifactId>utils-intervalrdd_2.10</artifactId>
<artifactId>utils-intervalrdd_${scala.version.prefix}</artifactId>
</dependency>
<dependency>
<groupId>com.esotericsoftware.kryo</groupId>
Expand All @@ -134,7 +134,7 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<artifactId>spark-core_${scala.version.prefix}</artifactId>
</dependency>
<dependency>
<groupId>it.unimi.dsi</groupId>
Expand Down Expand Up @@ -166,7 +166,7 @@
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_2.10</artifactId>
<artifactId>scalatest_${scala.version.prefix}</artifactId>
<scope>test</scope>
</dependency>
<dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ import scala.transient
* @param sc Spark context to use.
*/
private[adam] class ConsensusGeneratorFromKnowns(file: String,
@transient sc: SparkContext) extends ConsensusGenerator {
sc: SparkContext) extends ConsensusGenerator {

private val indelTable = sc.broadcast(IndelTable(file, sc))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ object SAMFileHeaderWritable {
}
}

class SAMFileHeaderWritable(@transient hdr: SAMFileHeader) extends Serializable {
class SAMFileHeaderWritable(hdr: SAMFileHeader) extends Serializable {
// extract fields that are needed in order to recreate the SAMFileHeader
protected val text = {
val txt: String = hdr.getTextHeader
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ sealed trait ShuffleRegionJoin[T, U, RT, RU] extends RegionJoin[T, U, RT, RU] {

val sd: SequenceDictionary
val partitionSize: Long
@transient val sc: SparkContext
val sc: SparkContext

// Create the set of bins across the genome for parallel processing
protected val seqLengths = Map(sd.records.toSeq.map(rec => (rec.name, rec.length)): _*)
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ class ConsensusGeneratorFromReadsSuite extends ADAMFunSuite {
val cg = new ConsensusGeneratorFromReads

def artificial_reads: RDD[AlignmentRecord] = {
val path = resourcePath("artificial.sam")
val path = testFile("artificial.sam")
sc.loadAlignments(path).rdd
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ class FastaConverterSuite extends ADAMFunSuite {
assert(convertedFragmentSequence2 === fastaFragmentSequence2)
}

val chr1File = resourcePath("human_g1k_v37_chr1_59kb.fasta")
val chr1File = testFile("human_g1k_v37_chr1_59kb.fasta")

sparkTest("convert reference fasta file") {
//Loading "human_g1k_v37_chr1_59kb.fasta"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import scala.collection.JavaConversions._

class VariantContextConverterSuite extends ADAMFunSuite {
val dictionary = {
val path = resourcePath("dict_with_accession.dict")
val path = testFile("dict_with_accession.dict")
SequenceDictionary(SAMFileReader.getSequenceDictionary(new File(path)))
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ class SequenceDictionarySuite extends ADAMFunSuite {
}

test("Convert from SAM sequence dictionary file (with extra fields)") {
val path = resourcePath("dict_with_accession.dict")
val path = testFile("dict_with_accession.dict")
val ssd = SAMFileReader.getSequenceDictionary(new File(path))

val chr1 = ssd.getSequence("1") // Validate that extra fields are parsed
Expand All @@ -54,7 +54,7 @@ class SequenceDictionarySuite extends ADAMFunSuite {
}

test("merge into existing dictionary") {
val path = resourcePath("dict_with_accession.dict")
val path = testFile("dict_with_accession.dict")
val ssd = SAMFileReader.getSequenceDictionary(new File(path))

val asd = SequenceDictionary(ssd)
Expand All @@ -67,7 +67,7 @@ class SequenceDictionarySuite extends ADAMFunSuite {
}

test("Convert from SAM sequence dictionary and back") {
val path = resourcePath("dict_with_accession.dict")
val path = testFile("dict_with_accession.dict")
val ssd = SAMFileReader.getSequenceDictionary(new File(path))
val asd = SequenceDictionary(ssd)
ssd.assertSameDictionary(SequenceDictionary.toSAMSequenceDictionary(asd))
Expand Down Expand Up @@ -211,7 +211,7 @@ class SequenceDictionarySuite extends ADAMFunSuite {
}

test("load sequence dictionary from VCF file") {
val path = resourcePath("small.vcf")
val path = testFile("small.vcf")
val fileReader = new VCFFileReader(new File(path), false)
val sd = SequenceDictionary.fromVCFHeader(fileReader.getFileHeader)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ class FieldEnumerationSuite extends ADAMFunSuite {
}

sparkTest("Simple projection on Read works") {
val readsFilepath = resourcePath("reads12.sam")
val readsFilepath = testFile("reads12.sam")
val readsParquetFilepath = tmpFile("reads12.adam")

// Convert the reads12.sam file into a parquet file
Expand Down
Loading