Skip to content
This repository was archived by the owner on Nov 9, 2019. It is now read-only.

Commit 2b64845

Browse files
committed
Using Utils.stream and Utils.validate
1 parent 455b790 commit 2b64845

File tree

40 files changed

+127
-272
lines changed

40 files changed

+127
-272
lines changed

src/main/java/org/broadinstitute/hellbender/engine/AbstractConcordanceWalker.java

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -3,14 +3,11 @@
33
import htsjdk.samtools.SAMSequenceDictionary;
44
import htsjdk.samtools.util.Locatable;
55
import htsjdk.samtools.util.PeekableIterator;
6-
import htsjdk.samtools.util.SequenceUtil;
76
import htsjdk.variant.variantcontext.VariantContext;
87
import htsjdk.variant.variantcontext.VariantContextComparator;
98
import htsjdk.variant.vcf.VCFHeader;
109
import org.apache.commons.collections4.Predicate;
1110
import org.apache.commons.collections4.iterators.FilterIterator;
12-
import org.apache.commons.lang3.tuple.ImmutablePair;
13-
import org.apache.commons.lang3.tuple.Pair;
1411
import org.broadinstitute.barclay.argparser.Argument;
1512
import org.broadinstitute.hellbender.exceptions.GATKException;
1613
import org.broadinstitute.hellbender.tools.walkers.validation.ConcordanceState;
@@ -23,7 +20,6 @@
2320
import java.util.Optional;
2421
import java.util.Spliterator;
2522
import java.util.Spliterators;
26-
import java.util.stream.StreamSupport;
2723

2824
/**
2925
* Base class for concordance walkers, which process one variant at a time from one or more sources of variants,
@@ -88,10 +84,10 @@ protected Predicate<VariantContext> makeVariantFilter() {
8884
return vc -> !vc.isFiltered();
8985
}
9086

91-
private Spliterator<TruthVersusEval> getSpliteratorForDrivingVariants() {
87+
private Iterator<TruthVersusEval> getIteratorForDrivingVariants() {
9288
final Iterator<VariantContext> truthIterator = new FilterIterator<>(truthVariants.iterator(), makeVariantFilter());
9389
final Iterator<VariantContext> evalIterator = new FilterIterator<>(evalVariants.iterator(), makeVariantFilter());
94-
return new ConcordanceIterator(truthIterator, evalIterator).spliterator();
90+
return new ConcordanceIterator(truthIterator, evalIterator);
9591
}
9692

9793
// ********** The basic traversal structure of GATKTool
@@ -118,7 +114,7 @@ protected final void onStartup() {
118114
@Override
119115
public final void traverse() {
120116
// Process each variant in the input stream.
121-
StreamSupport.stream(getSpliteratorForDrivingVariants(), false)
117+
Utils.stream(getIteratorForDrivingVariants())
122118
.forEach(truthVersusEval -> {
123119
final SimpleInterval variantInterval = new SimpleInterval(truthVersusEval);
124120
apply(truthVersusEval, new ReadsContext(reads, variantInterval), new ReferenceContext(reference, variantInterval));

src/main/java/org/broadinstitute/hellbender/tools/HaplotypeCallerSpark.java

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,6 @@
4545
import java.util.function.Function;
4646
import java.util.stream.Collectors;
4747
import java.util.stream.Stream;
48-
import java.util.stream.StreamSupport;
4948

5049
/**
5150
* Call germline SNPs and indels via local re-assembly of haplotypes
@@ -178,15 +177,10 @@ private static FlatMapFunction<Iterator<Tuple2<AssemblyRegion, SimpleInterval>>,
178177
//HaplotypeCallerEngine isn't serializable but is expensive to instantiate, so construct and reuse one for every partition
179178
final ReferenceMultiSourceAdapter referenceReader = new ReferenceMultiSourceAdapter(referenceBroadcast.getValue(), authHolder);
180179
final HaplotypeCallerEngine hcEngine = new HaplotypeCallerEngine(hcArgsBroadcast.value(), header, referenceReader);
181-
return iteratorToStream(regionAndIntervals).flatMap(regionToVariants(hcEngine)).iterator();
180+
return Utils.stream(regionAndIntervals).flatMap(regionToVariants(hcEngine)).iterator();
182181
};
183182
}
184183

185-
private static <T> Stream<T> iteratorToStream(Iterator<T> iterator) {
186-
Iterable<T> regionsIterable = () -> iterator;
187-
return StreamSupport.stream(regionsIterable.spliterator(), false);
188-
}
189-
190184
private static Function<Tuple2<AssemblyRegion, SimpleInterval>, Stream<? extends VariantContext>> regionToVariants(HaplotypeCallerEngine hcEngine) {
191185
return regionAndInterval -> {
192186
final List<VariantContext> variantContexts = hcEngine.callRegion(regionAndInterval._1(), new FeatureContext());
@@ -258,7 +252,7 @@ private static FlatMapFunction<Iterator<Shard<GATKRead>>, Tuple2<AssemblyRegion,
258252
final ReferenceMultiSourceAdapter referenceSource = new ReferenceMultiSourceAdapter(referenceMultiSource, authHolder);
259253
final HaplotypeCallerEngine hcEngine = new HaplotypeCallerEngine(hcArgsBroadcast.value(), header, referenceSource);
260254

261-
return iteratorToStream(shards).flatMap(shardToRegion(assemblyArgs, header, referenceSource, hcEngine)).iterator();
255+
return Utils.stream(shards).flatMap(shardToRegion(assemblyArgs, header, referenceSource, hcEngine)).iterator();
262256
};
263257
}
264258

@@ -279,8 +273,7 @@ private static Function<Shard<GATKRead>, Stream<? extends Tuple2<AssemblyRegion,
279273
assemblyArgs.assemblyRegionPadding, assemblyArgs.activeProbThreshold,
280274
assemblyArgs.maxProbPropagationDistance);
281275

282-
return StreamSupport.stream(assemblyRegions.spliterator(), false)
283-
.map(a -> new Tuple2<>(a, shard.getInterval()));
276+
return Utils.stream(assemblyRegions).map(a -> new Tuple2<>(a, shard.getInterval()));
284277
};
285278
}
286279

src/main/java/org/broadinstitute/hellbender/tools/exome/CombineReadCounts.java

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
import org.broadinstitute.hellbender.exceptions.GATKException;
1010
import org.broadinstitute.hellbender.exceptions.UserException;
1111
import org.broadinstitute.hellbender.utils.SimpleInterval;
12+
import org.broadinstitute.hellbender.utils.Utils;
1213
import org.broadinstitute.hellbender.utils.tsv.DataLine;
1314
import org.broadinstitute.hellbender.utils.tsv.TableColumnCollection;
1415
import org.broadinstitute.hellbender.utils.tsv.TableReader;
@@ -504,9 +505,7 @@ private void composeCountColumnNamesAndSourceIndexMapping() {
504505
for (final TableReader<ReadCountRecord> reader : readers) {
505506
unsortedCountColumnNames.addAll(readCountColumnNames(reader.columns()));
506507
}
507-
if (unsortedCountColumnNames.isEmpty()) {
508-
throw new IllegalStateException("there must be at least one count column");
509-
}
508+
Utils.validate(!unsortedCountColumnNames.isEmpty(), "there must be at least one count column");
510509
countColumnSourceIndexMap = IntStream.range(0, unsortedCountColumnNames.size()).boxed()
511510
.sorted(Comparator.comparing(unsortedCountColumnNames::get))
512511
.mapToInt(Integer::intValue).toArray();

src/main/java/org/broadinstitute/hellbender/tools/exome/ReadCountCollectionUtils.java

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -151,9 +151,7 @@ public static TableWriter<ReadCountRecord> writerWithIntervals(final Writer writ
151151
@Override
152152
protected void composeLine(final ReadCountRecord record, final DataLine dataLine) {
153153
final SimpleInterval interval = record.getTarget().getInterval();
154-
if (interval == null) {
155-
throw new IllegalStateException("invalid combination of targets with and without intervals defined");
156-
}
154+
Utils.validate(interval != null, "invalid combination of targets with and without intervals defined");
157155
dataLine.append(interval.getContig())
158156
.append(interval.getStart())
159157
.append(interval.getEnd())

src/main/java/org/broadinstitute/hellbender/tools/exome/Target.java

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -98,25 +98,19 @@ public String getName() {
9898
// Three methods to implement Locatable
9999
@Override
100100
public String getContig() {
101-
if (interval == null) {
102-
throw new IllegalStateException("the target does not have an interval assigned");
103-
}
101+
Utils.validate(interval != null, "the target does not have an interval assigned");
104102
return interval.getContig();
105103
}
106104

107105
@Override
108106
public int getStart() {
109-
if (interval == null) {
110-
throw new IllegalStateException("the target does not have an interval assigned");
111-
}
107+
Utils.validate(interval != null, "the target does not have an interval assigned");
112108
return interval.getStart();
113109
}
114110

115111
@Override
116112
public int getEnd() {
117-
if (interval == null) {
118-
throw new IllegalStateException("the target does not have an interval assigned");
119-
}
113+
Utils.validate(interval != null, "the target does not have an interval assigned");
120114
return interval.getEnd();
121115
}
122116

src/main/java/org/broadinstitute/hellbender/tools/exome/convertbed/ConvertBedToTargetFile.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,11 +14,11 @@
1414
import org.broadinstitute.hellbender.tools.exome.Target;
1515
import org.broadinstitute.hellbender.tools.exome.TargetWriter;
1616
import org.broadinstitute.hellbender.utils.SimpleInterval;
17+
import org.broadinstitute.hellbender.utils.Utils;
1718

1819
import java.io.File;
1920
import java.util.List;
2021
import java.util.stream.Collectors;
21-
import java.util.stream.StreamSupport;
2222

2323
@CommandLineProgramProperties(
2424
summary = "Converts a target bed file to the target file format. Empty files will probably fail. Drops bed file columns other than the first four.",
@@ -55,7 +55,7 @@ protected Object doWork() {
5555
final Class<? extends Feature> featureType = codec.getFeatureType();
5656
if (BEDFeature.class.isAssignableFrom(featureType)) {
5757
final FeatureDataSource<? extends BEDFeature> source = new FeatureDataSource<>(inputBedFile);
58-
final List<Target> targets = StreamSupport.stream(source.spliterator(), false).map(ConvertBedToTargetFile::createTargetFromBEDFeature)
58+
final List<Target> targets = Utils.stream(source).map(ConvertBedToTargetFile::createTargetFromBEDFeature)
5959
.collect(Collectors.toList());
6060
TargetWriter.writeTargetsToFile(outFile, targets);
6161
} else {

src/main/java/org/broadinstitute/hellbender/tools/exome/eval/EvaluateCopyNumberTriStateCalls.java

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,6 @@
3333
import java.util.function.Function;
3434
import java.util.stream.Collectors;
3535
import java.util.stream.Stream;
36-
import java.util.stream.StreamSupport;
3736

3837
/**
3938
* Tool to evaluate the output of {@link XHMMSegmentCaller}.
@@ -651,8 +650,7 @@ private CopyNumberTriStateAllele copyNumberToTrueAllele(final int cn) {
651650
* @return never {@code null}, potentially immutably
652651
*/
653652
private List<VariantContext> variantQueryToList(final VCFFileReader reader, final Locatable interval) {
654-
return StreamSupport.stream(Spliterators.spliteratorUnknownSize(reader.query(interval.getContig(),
655-
interval.getStart(), interval.getEnd()), Spliterator.NONNULL),false)
653+
return Utils.stream(reader.query(interval.getContig(), interval.getStart(), interval.getEnd()))
656654
.sorted(IntervalUtils.LEXICOGRAPHICAL_ORDER_COMPARATOR)
657655
.collect(Collectors.toList());
658656
}

src/main/java/org/broadinstitute/hellbender/tools/exome/eval/VariantEvaluationContext.java

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -69,9 +69,8 @@ private double[] getAlleleDoubleArrayFromAlternativeAlleleArrayAttribute(final S
6969
final double[] alternativeAllelesFrequencies = GATKProtectedVariantContextUtils.getAttributeAsDoubleArray(this, key,
7070
() -> { return new double[alleles.size() - 1]; }, missingValue);
7171
final double[] result = new double[alleles.size()];
72-
if (alternativeAllelesFrequencies.length != alleles.size() - 1) {
73-
throw new IllegalStateException(String.format("We expect the %s Info annotation to contain an array of %d elements (alt. allele count)", key, alleles.size() - 1));
74-
}
72+
Utils.validate(alternativeAllelesFrequencies.length == alleles.size() - 1,
73+
() -> String.format("We expect the %s Info annotation to contain an array of %d elements (alt. allele count)", key, alleles.size() - 1));
7574
System.arraycopy(alternativeAllelesFrequencies, 0, result, 1, alternativeAllelesFrequencies.length);
7675
final double nonRefSum = MathUtils.sum(result);
7776
Utils.validateArg(nonRefSum <= 1.0, () -> String.format("The sum of element on annotation %s cannot greater than 1.0: %g", key, nonRefSum));

src/main/java/org/broadinstitute/hellbender/tools/exome/eval/VariantEvaluationContextBuilder.java

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -81,9 +81,7 @@ public VariantEvaluationContextBuilder alleles(final String... alleleStrings) {
8181
@Override
8282
public List<Allele> getAlleles() {
8383
final List<Allele> result = super.getAlleles();
84-
if (result == null) {
85-
throw new IllegalStateException("you must set the alleles before calling getAlleles");
86-
}
84+
Utils.validate(result != null, "you must set the alleles before calling getAlleles");
8785
return result;
8886
}
8987

src/main/java/org/broadinstitute/hellbender/tools/exome/sexgenotyper/SexGenotypeData.java

Lines changed: 5 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package org.broadinstitute.hellbender.tools.exome.sexgenotyper;
22

33
import org.broadinstitute.hellbender.exceptions.UserException;
4+
import org.broadinstitute.hellbender.utils.Utils;
45
import org.broadinstitute.hellbender.utils.param.ParamUtils;
56

67
import javax.annotation.Nonnull;
@@ -60,11 +61,8 @@ public SexGenotypeData(@Nonnull final String sampleName,
6061
* @return set of genotype string identifiers
6162
*/
6263
public Set<String> getSexGenotypesSet() {
63-
if (logLikelihoodMap != null) {
64-
return logLikelihoodMap.keySet();
65-
} else {
66-
throw new IllegalStateException("Genotyping inference data is not available");
67-
}
64+
Utils.validate(logLikelihoodMap != null, "Genotyping inference data is not available");
65+
return logLikelihoodMap.keySet();
6866
}
6967

7068
/**
@@ -73,11 +71,8 @@ public Set<String> getSexGenotypesSet() {
7371
* @return log likelihood
7472
*/
7573
public double getLogLikelihoodPerGenotype(@Nonnull final String genotype) {
76-
if (logLikelihoodMap != null) {
77-
return logLikelihoodMap.get(genotype);
78-
} else {
79-
throw new IllegalStateException("Genotyping inference data is not available");
80-
}
74+
Utils.validate(logLikelihoodMap != null, "Genotyping inference data is not available");
75+
return logLikelihoodMap.get(genotype);
8176
}
8277

8378
/**

0 commit comments

Comments
 (0)