Skip to content

Commit

Permalink
fix some deprecation/style things and rename a pkg
Browse files Browse the repository at this point in the history
  • Loading branch information
ryan-williams committed Mar 28, 2016
1 parent 2e843de commit 33eb79f
Show file tree
Hide file tree
Showing 4 changed files with 12 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ class NucleotideContigFragmentRDDFunctions(rdd: RDD[NucleotideContigFragment]) e
*
* @return Returns an RDD of reads.
*/
def toReads(): RDD[AlignmentRecord] = {
def toReads: RDD[AlignmentRecord] = {
FragmentConverter.convertRdd(rdd)
}

Expand All @@ -59,18 +59,18 @@ class NucleotideContigFragmentRDDFunctions(rdd: RDD[NucleotideContigFragment]) e
def saveAsFasta(fileName: String, lineWidth: Int = 60) = {

def isFragment(record: NucleotideContigFragment): Boolean = {
Option(record.fragmentNumber).isDefined && Option(record.numberOfFragmentsInContig).fold(false)(_ > 1)
Option(record.getFragmentNumber).isDefined && Option(record.getNumberOfFragmentsInContig).fold(false)(_ > 1)
}

def toFasta(record: NucleotideContigFragment): String = {
val sb = new StringBuilder()
sb.append(">")
sb.append(record.contig.contigName)
Option(record.description).foreach(n => sb.append(" ").append(n))
sb.append(record.getContig.getContigName)
Option(record.getDescription).foreach(n => sb.append(" ").append(n))
if (isFragment(record)) {
sb.append(" fragment %d of %d".format(record.fragmentNumber + 1, record.numberOfFragmentsInContig))
sb.append(s" fragment ${record.getFragmentNumber + 1} of ${record.getNumberOfFragmentsInContig}")
}
for (line <- Splitter.fixedLength(lineWidth).split(record.fragmentSequence)) {
for (line <- Splitter.fixedLength(lineWidth).split(record.getFragmentSequence)) {
sb.append("\n")
sb.append(line)
}
Expand All @@ -90,15 +90,15 @@ class NucleotideContigFragmentRDDFunctions(rdd: RDD[NucleotideContigFragment]) e
.setFragmentNumber(null)
.setFragmentStartPosition(null)
.setNumberOfFragmentsInContig(null)
.setFragmentSequence(first.fragmentSequence + second.fragmentSequence)
.setFragmentSequence(first.getFragmentSequence + second.getFragmentSequence)
.build

merged
}

rdd
.sortBy(fragment => (fragment.contig.contigName, Option(fragment.fragmentNumber).map(_.toInt).getOrElse(-1)))
.map(fragment => (fragment.contig.contigName, fragment))
.sortBy(fragment => (fragment.getContig.getContigName, Option(fragment.getFragmentNumber).map(_.toInt).getOrElse(-1)))
.map(fragment => (fragment.getContig.getContigName, fragment))
.reduceByKey(merge)
.values
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ class VariantContextRDDFunctions(rdd: RDD[VariantContext]) extends ADAMSequenceD

}

def getCallsetSamples(): List[String] = {
def getCallsetSamples: List[String] = {
rdd.flatMap(c => c.genotypes.map(_.getSampleId).toSeq.distinct)
.distinct
.collect()
Expand Down Expand Up @@ -80,7 +80,7 @@ class VariantContextRDDFunctions(rdd: RDD[VariantContext]) extends ADAMSequenceD
log.info(s"Writing $vcfFormat file to $filePath")

// Initialize global header object required by Hadoop VCF Writer
val header = getCallsetSamples()
val header = getCallsetSamples
val bcastHeader = rdd.context.broadcast(header)
val mp = rdd.mapPartitionsWithIndex((idx, iter) => {
log.info(s"Setting header for partition $idx")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ class ADAMVariationRDDFunctionsSuite extends ADAMFunSuite {
.build()

val vc = VariantContext.buildFromGenotypes(List(genotype0, genotype1))
val samples = sc.parallelize(List(vc)).getCallsetSamples()
val samples = sc.parallelize(List(vc)).getCallsetSamples

assert(samples.count(_ == "you") === 1)
assert(samples.count(_ == "me") === 1)
Expand Down

0 comments on commit 33eb79f

Please sign in to comment.