Skip to content

Commit

Permalink
Get rid of ancient, unused option to track unique reads in a LIBS
Browse files Browse the repository at this point in the history
  • Loading branch information
davidbenjamin committed Jan 24, 2022
1 parent caa48f9 commit 101cdcf
Show file tree
Hide file tree
Showing 14 changed files with 38 additions and 275 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ public ActivityProfileStateIterator(final MultiIntervalShard<GATKRead> readShard

// We wrap our LocusIteratorByState inside an IntervalAlignmentContextIterator so that we get empty loci
// for uncovered locations. This is critical for reproducing GATK 3.x behavior!
LocusIteratorByState libs = new LocusIteratorByState(readShard.iterator(), DownsamplingMethod.NONE, false, ReadUtils.getSamplesFromHeader(readHeader), readHeader, true);
LocusIteratorByState libs = new LocusIteratorByState(readShard.iterator(), DownsamplingMethod.NONE, ReadUtils.getSamplesFromHeader(readHeader), readHeader, true);
final IntervalLocusIterator intervalLocusIterator = new IntervalLocusIterator(readShard.getIntervals().iterator());
this.locusIterator = new IntervalAlignmentContextIterator(libs, intervalLocusIterator, readHeader.getSequenceDictionary());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ public AssemblyRegionIterator(final MultiIntervalShard<GATKRead> readShard,

// We wrap our LocusIteratorByState inside an IntervalAlignmentContextIterator so that we get empty loci
// for uncovered locations. This is critical for reproducing GATK 3.x behavior!
this.libs = new LocusIteratorByState(readCachingIterator, DownsamplingMethod.NONE, false, ReadUtils.getSamplesFromHeader(readHeader), readHeader, true);
this.libs = new LocusIteratorByState(readCachingIterator, DownsamplingMethod.NONE, ReadUtils.getSamplesFromHeader(readHeader), readHeader, true);
final IntervalLocusIterator intervalLocusIterator = new IntervalLocusIterator(readShard.getIntervals().iterator());
this.locusIterator = new IntervalAlignmentContextIterator(libs, intervalLocusIterator, readHeader.getSequenceDictionary());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
import org.broadinstitute.hellbender.engine.filters.ReadFilter;
import org.broadinstitute.hellbender.engine.filters.ReadFilterLibrary;
import org.broadinstitute.hellbender.engine.filters.WellformedReadFilter;
import org.broadinstitute.hellbender.exceptions.UserException;
import org.broadinstitute.hellbender.utils.SimpleInterval;
import org.broadinstitute.hellbender.utils.locusiterator.AlignmentContextIteratorBuilder;
import org.broadinstitute.hellbender.utils.locusiterator.LIBSDownsamplingInfo;
Expand Down Expand Up @@ -43,13 +42,6 @@ public abstract class LocusWalker extends WalkerBase {
@Argument(fullName = MAX_DEPTH_PER_SAMPLE_NAME, shortName = MAX_DEPTH_PER_SAMPLE_NAME, doc = "Maximum number of reads to retain per sample per locus. Reads above this threshold will be downsampled. Set to 0 to disable.", optional = true)
protected int maxDepthPerSample = defaultMaxDepthPerSample();

/**
* Should the LIBS keep unique reads? Tools that do should override to return {@code true}.
*/
protected boolean keepUniqueReadListInLibs() {
return false;
}

/**
* LocusWalkers requires read sources
*/
Expand Down Expand Up @@ -185,7 +177,6 @@ final Iterator<AlignmentContext> getAlignmentContextIterator(final CountingReadF
alignmentContextIteratorBuilder.setDownsamplingInfo(getDownsamplingInfo());
alignmentContextIteratorBuilder.setEmitEmptyLoci(emitEmptyLoci());
alignmentContextIteratorBuilder.setIncludeDeletions(includeDeletions());
alignmentContextIteratorBuilder.setKeepUniqueReadListInLibs(keepUniqueReadListInLibs());
alignmentContextIteratorBuilder.setIncludeNs(includeNs());

return alignmentContextIteratorBuilder.build(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
import org.broadinstitute.barclay.argparser.Argument;
import org.broadinstitute.barclay.argparser.CommandLineException;
import org.broadinstitute.hellbender.engine.*;
import org.broadinstitute.hellbender.engine.spark.datasources.ReferenceMultiSparkSource;
import org.broadinstitute.hellbender.utils.IntervalUtils;
import org.broadinstitute.hellbender.utils.SimpleInterval;
import org.broadinstitute.hellbender.utils.io.IOUtils;
Expand All @@ -19,7 +18,10 @@
import org.broadinstitute.hellbender.utils.locusiterator.LocusIteratorByState;
import org.broadinstitute.hellbender.utils.read.GATKRead;

import java.util.*;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Spliterators;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;

Expand Down Expand Up @@ -116,7 +118,6 @@ private static FlatMapFunction<Shard<GATKRead>, LocusWalkerContext> getAlignment
final AlignmentContextIteratorBuilder alignmentContextIteratorBuilder = new AlignmentContextIteratorBuilder();
alignmentContextIteratorBuilder.setDownsamplingInfo(downsamplingInfo);
alignmentContextIteratorBuilder.setEmitEmptyLoci(isEmitEmptyLoci);
alignmentContextIteratorBuilder.setKeepUniqueReadListInLibs(false);
alignmentContextIteratorBuilder.setIncludeNs(false);

final Iterator<AlignmentContext> alignmentContextIterator = alignmentContextIteratorBuilder.build(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -454,8 +454,7 @@ public static List<ReadPileup> getPileupsOverReference(final SAMFileHeader reads
final List<GATKRead> reads = new ArrayList<>(readLikelihoods.sampleEvidence(0));
reads.sort(new ReadCoordinateComparator(readsHeader)); //because we updated the reads based on the local realignments we have to re-sort or the pileups will be... unpredictable

final LocusIteratorByState libs = new LocusIteratorByState(reads.iterator(), LocusIteratorByState.NO_DOWNSAMPLING,
false, samples.asSetOfSamples(), readsHeader, true);
final LocusIteratorByState libs = new LocusIteratorByState(reads.iterator(), LocusIteratorByState.NO_DOWNSAMPLING, samples.asSetOfSamples(), readsHeader, true);

final int startPos = activeRegionSpan.getStart();
final List<ReadPileup> pileups = new ArrayList<>(activeRegionSpan.getEnd() - startPos);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,7 @@ public PileupReadErrorCorrector(final double logOddsThreshold, final SAMFileHead
public final List<GATKRead> correctReads(final Collection<GATKRead> originalReads) {
final List<GATKRead> reads = originalReads.stream().map(GATKRead::deepCopy).collect(Collectors.toList());

final Iterator<AlignmentContext> locusIterator = new LocusIteratorByState(reads.iterator(), DownsamplingMethod.NONE,
false, ReadUtils.getSamplesFromHeader(header), header, false);
final Iterator<AlignmentContext> locusIterator = new LocusIteratorByState(reads.iterator(), DownsamplingMethod.NONE, ReadUtils.getSamplesFromHeader(header), header, false);

final Map<GATKRead, List<Pair<Integer, Byte>>> potentialCorrections = reads.stream().collect(Collectors.toMap(read -> read, read -> new ArrayList<>()));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ public void apply(List<VariantContext> variantContexts, ReferenceContext referen
readLikelihoods.changeEvidence(readRealignments);
writeBamOutput(assemblyResult, readLikelihoods, new HashSet<>(readLikelihoods.alleles()), regionForGenotyping.getSpan());

final LocusIteratorByState libs = new LocusIteratorByState(regionForGenotyping.getReads().iterator(), DownsamplingMethod.NONE, false, samplesList.asListOfSamples(), bamHeader, true);
final LocusIteratorByState libs = new LocusIteratorByState(regionForGenotyping.getReads().iterator(), DownsamplingMethod.NONE, samplesList.asListOfSamples(), bamHeader, true);

final List<byte[]> unitigs = getUnitigs(libs);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ public class AlignmentContextIteratorBuilder {
protected static final Logger logger = LogManager.getLogger(AlignmentContextIteratorBuilder.class);

private boolean isEmitEmptyLoci;
private boolean isKeepUniqueReadListInLibs;
private boolean isIncludeDeletions;
private boolean isIncludeNs;
private LIBSDownsamplingInfo downsamplingInfo;
Expand All @@ -36,10 +35,6 @@ public void setEmitEmptyLoci(boolean emitEmptyLoci) {
isEmitEmptyLoci = emitEmptyLoci;
}

public void setKeepUniqueReadListInLibs(boolean keepUniqueReadListInLibs) {
isKeepUniqueReadListInLibs = keepUniqueReadListInLibs;
}

public void setIncludeDeletions(boolean includeDeletions) {
isIncludeDeletions = includeDeletions;
}
Expand All @@ -54,7 +49,6 @@ public void setDownsamplingInfo(LIBSDownsamplingInfo downsamplingInfo) {

public AlignmentContextIteratorBuilder() {
isEmitEmptyLoci = false;
isKeepUniqueReadListInLibs = false;
isIncludeDeletions = true;
isIncludeNs = false;
downsamplingInfo = LocusIteratorByState.NO_DOWNSAMPLING;
Expand All @@ -75,7 +69,7 @@ public Iterator<AlignmentContext> build(final Iterator<GATKRead> readIterator, f
Utils.nonNull(readIterator, "Read iterator cannot be null");
final boolean isDefinitelyReference = (dictionary != null) && isReference ;
return createAlignmentContextIterator(intervalsForTraversal, header, readIterator, dictionary, downsamplingInfo,
isDefinitelyReference, isEmitEmptyLoci, isKeepUniqueReadListInLibs, isIncludeDeletions, isIncludeNs);
isDefinitelyReference, isEmitEmptyLoci, isIncludeDeletions, isIncludeNs);
}

/**
Expand All @@ -90,8 +84,6 @@ public Iterator<AlignmentContext> build(final Iterator<GATKRead> readIterator, f
* @param downsamplingInfo how to downsample (for {@link LocusIteratorByState})
* @param isReference the dictionary specified above is a reference, {@code false} if no reference being used or it is not a reference.
* @param emitEmptyLoci whether loci with no coverage should be emitted. In this case, the AlignmentContext will be empty (not null).
* @param isKeepUniqueReadListInLibs if true, we will keep the unique reads from the samIterator and make them
* available via the transferReadsFromAllPreviousPileups interface (this parameter is specific to {@link LocusIteratorByState})
* @param isIncludeDeletions include reads with deletion on the loci in question
* @param isIncludeNs include reads with N on the loci in question
* @return iterator that produces AlignmentContexts ready for consumption (e.g. by a {@link org.broadinstitute.hellbender.engine.LocusWalker})
Expand All @@ -103,7 +95,6 @@ private static Iterator<AlignmentContext> createAlignmentContextIterator(final L
final LIBSDownsamplingInfo downsamplingInfo,
final boolean isReference,
boolean emitEmptyLoci,
boolean isKeepUniqueReadListInLibs,
boolean isIncludeDeletions,
boolean isIncludeNs) {

Expand All @@ -113,7 +104,7 @@ private static Iterator<AlignmentContext> createAlignmentContextIterator(final L
.collect(Collectors.toSet());

// get the LIBS
final LocusIteratorByState libs = new LocusIteratorByState(readIterator, downsamplingInfo, isKeepUniqueReadListInLibs, samples, header, isIncludeDeletions, isIncludeNs);
final LocusIteratorByState libs = new LocusIteratorByState(readIterator, downsamplingInfo, samples, header, isIncludeDeletions, isIncludeNs);

List<SimpleInterval> finalIntervals = intervalsForTraversal;
validateEmitEmptyLociParameters(emitEmptyLoci, dictionary, intervalsForTraversal, isReference);
Expand Down
Loading

0 comments on commit 101cdcf

Please sign in to comment.