Skip to content

Commit

Permalink
pretty code format
Browse files Browse the repository at this point in the history
  • Loading branch information
Schaudge committed Oct 16, 2024
1 parent e162299 commit d79b7cf
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 36 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,12 @@
import htsjdk.variant.vcf.*;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.broadinstitute.hellbender.cmdline.StandardArgumentDefinitions;
import org.broadinstitute.hellbender.engine.FeatureContext;
import org.broadinstitute.hellbender.engine.FeatureDataSource;
import org.broadinstitute.hellbender.engine.FeatureInput;
import org.broadinstitute.hellbender.engine.ReferenceContext;
import org.broadinstitute.hellbender.exceptions.GATKException;
import org.broadinstitute.hellbender.exceptions.UserException;
import org.broadinstitute.hellbender.tools.walkers.GenotypeGVCFsAnnotationArgumentCollection;
import org.broadinstitute.hellbender.tools.walkers.annotator.allelespecific.ReducibleAnnotation;
import org.broadinstitute.hellbender.tools.walkers.annotator.allelespecific.ReducibleAnnotationData;
import org.broadinstitute.hellbender.utils.IntervalUtils;
Expand All @@ -26,7 +24,6 @@

import java.util.*;
import java.util.function.Predicate;
import java.util.stream.Collectors;

/**
* The class responsible for computing annotations for variants.
Expand All @@ -39,8 +36,8 @@ public final class VariantAnnotatorEngine {
private final List<GenotypeAnnotation> genotypeAnnotations;
private final List<JumboInfoAnnotation> jumboInfoAnnotations;
private final List<JumboGenotypeAnnotation> jumboGenotypeAnnotations;
private Set<String> reducibleKeys;
private List<VAExpression> expressions = new ArrayList<>();
private final Set<String> reducibleKeys;
private final List<VAExpression> expressions = new ArrayList<>();

private final VariantOverlapAnnotator variantOverlapAnnotator;
private boolean expressionAlleleConcordance;
Expand Down Expand Up @@ -99,9 +96,7 @@ public VariantAnnotatorEngine(final Collection<Annotation> annotationList,
}
for (InfoFieldAnnotation annot : infoAnnotations) {
if (annot instanceof ReducibleAnnotation) {
for (final String rawKey : ((ReducibleAnnotation) annot).getRawKeyNames()) {
reducibleKeys.add(rawKey);
}
reducibleKeys.addAll(((ReducibleAnnotation) annot).getRawKeyNames());
}
}
}
Expand All @@ -119,7 +114,7 @@ private VariantOverlapAnnotator initializeOverlapAnnotator(final FeatureInput<Va
for ( final FeatureInput<VariantContext> fi : featureInputs) {
overlaps.put(fi, fi.getName());
}
if (overlaps.values().contains(VCFConstants.DBSNP_KEY)){
if (overlaps.containsValue(VCFConstants.DBSNP_KEY)){
throw new GATKException("The map of overlaps must not contain " + VCFConstants.DBSNP_KEY);
}
if (dbSNPInput != null) {
Expand Down Expand Up @@ -245,8 +240,7 @@ public Map<String, Object> combineAnnotations(final List<Allele> allelesList, Ma

// go through all the requested reducible info annotationTypes
for (final InfoFieldAnnotation annotationType : infoAnnotations) {
if (annotationType instanceof ReducibleAnnotation) {
ReducibleAnnotation currentASannotation = (ReducibleAnnotation) annotationType;
if (annotationType instanceof ReducibleAnnotation currentASannotation) {
for (final String rawKey : currentASannotation.getRawKeyNames()) {
//here we're assuming that each annotation combines data corresponding to its primary raw key, which is index zero
//AS_QD only needs to be combined if it's relying on its primary raw key
Expand All @@ -258,7 +252,7 @@ public Map<String, Object> combineAnnotations(final List<Allele> allelesList, Ma
combinedAnnotations.putAll(annotationsFromCurrentType);
}
//remove all the raw keys for the annotation because we already used all of them in combineRawData
annotationMap.keySet().removeAll(currentASannotation.getRawKeyNames());
currentASannotation.getRawKeyNames().forEach(annotationMap.keySet()::remove);
}
}
}
Expand All @@ -285,9 +279,7 @@ public VariantContext finalizeAnnotations(VariantContext vc, VariantContext orig

// go through all the requested info annotationTypes
for (final InfoFieldAnnotation annotationType : infoAnnotations) {
if (annotationType instanceof ReducibleAnnotation) {

ReducibleAnnotation currentASannotation = (ReducibleAnnotation) annotationType;
if (annotationType instanceof ReducibleAnnotation currentASannotation) {

final Map<String, Object> annotationsFromCurrentType = currentASannotation.finalizeRawData(vc, originalVC);
if (annotationsFromCurrentType != null) {
Expand Down Expand Up @@ -317,8 +309,7 @@ public VariantContext finalizeAnnotations(VariantContext vc, VariantContext orig
final VariantContextBuilder builder = new VariantContextBuilder(vc).attributes(variantAnnotations);

// annotate genotypes, creating another new VC in the process
final VariantContext annotated = builder.make();
return annotated;
return builder.make();
}

/**
Expand Down Expand Up @@ -401,8 +392,7 @@ private Map<String, Object> addInfoAnnotations(VariantContext vc, FeatureContext
//TODO see #7543. This spiderweb of cases should be addressed as part of a more comprehensive refactor of the annotation code with JumboAnnotations.
if ((fragmentLikelihoods.isPresent() && haplotypeLikelihoods.isPresent()) || readHaplotypeAlleleLikelihoods.isPresent()) {
jumboInfoAnnotations.stream()
.map(annot -> annot.annotate(ref, features, vc, likelihoods,
fragmentLikelihoods.isPresent()? fragmentLikelihoods.get() : null,
.map(annot -> annot.annotate(ref, features, vc, likelihoods, fragmentLikelihoods.orElse(null),
haplotypeLikelihoods.isPresent()? haplotypeLikelihoods.get(): readHaplotypeAlleleLikelihoods.get()))
.forEach(infoAnnotMap::putAll);
}
Expand All @@ -416,7 +406,7 @@ private GenotypesContext annotateGenotypes(final ReferenceContext ref,
final Optional<AlleleLikelihoods<Fragment, Allele>> fragmentLikelihoods,
final Optional<AlleleLikelihoods<Fragment, Haplotype>> haplotypeLikelihoods,
final Predicate<VariantAnnotation> addAnnot) {
if (!jumboGenotypeAnnotations.isEmpty() && (!fragmentLikelihoods.isPresent() || !haplotypeLikelihoods.isPresent())) {
if (!jumboGenotypeAnnotations.isEmpty() && (fragmentLikelihoods.isEmpty() || haplotypeLikelihoods.isEmpty())) {
jumboAnnotationsLogger.warn("Jumbo genotype annotations requested but fragment likelihoods or haplotype likelihoods were not given.");
}
if ( genotypeAnnotations.isEmpty() && jumboGenotypeAnnotations.isEmpty()) {
Expand Down Expand Up @@ -471,7 +461,7 @@ public VAExpression(String fullExpression, List<FeatureInput<VariantContext>> da

final String bindingName = fullExpression.substring(0, indexOfDot);
Optional<FeatureInput<VariantContext>> binding = dataSourceList.stream().filter(ds -> ds.getName().equals(bindingName)).findFirst();
if (!binding.isPresent()) {
if (binding.isEmpty()) {
throw new UserException.BadInput("The requested expression '"+fullExpression+"' is invalid, could not find vcf input file");
}
this.binding = binding.get();
Expand All @@ -482,7 +472,7 @@ public void sethInfo(VCFInfoHeaderLine hInfo) {
}
}

protected List<VAExpression> getRequestedExpressions() { return expressions; }
private List<VAExpression> getRequestedExpressions() { return expressions; }

// select specific expressions to use
public void addExpressions(Set<String> expressionsToUse, List<FeatureInput<VariantContext>> dataSources, boolean expressionAlleleConcordance) {//, Set<VCFHeaderLines>) {
Expand Down Expand Up @@ -524,7 +514,7 @@ private void annotateExpressions(final VariantContext vc,
} else if (expression.fieldName.equals("ALT")) {
attributes.put(expression.fullName, expressionVC.getAlternateAllele(0).getDisplayString());
} else if (expression.fieldName.equals("FILTER")) {
final String filterString = expressionVC.isFiltered() ? expressionVC.getFilters().stream().collect(Collectors.joining(",")) : "PASS";
final String filterString = expressionVC.isFiltered() ? String.join(",", expressionVC.getFilters()) : "PASS";
attributes.put(expression.fullName, filterString);
} else if (expressionVC.hasAttribute(expression.fieldName)) {
// find the info field
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,8 +123,8 @@ public final class Mutect2Engine implements AssemblyRegionEvaluator, AutoCloseab

private final Optional<F1R2CountsCollector> f1R2CountsCollector;

private PileupQualBuffer tumorPileupQualBuffer;
private PileupQualBuffer normalPileupQualBuffer;
private final PileupQualBuffer tumorPileupQualBuffer;
private final PileupQualBuffer normalPileupQualBuffer;

/**
* Create and initialize a new HaplotypeCallerEngine given a collection of HaplotypeCaller arguments, a reads header,
Expand Down Expand Up @@ -454,7 +454,7 @@ public void close() {
likelihoodCalculationEngine.close();
aligner.close();
haplotypeBAMWriter.ifPresent(HaplotypeBAMWriter::close);
assembledEventMapVcfOutputWriter.ifPresent(writer -> {assembledEventMapVariants.get().forEach(writer::add); writer.close();});
assembledEventMapVcfOutputWriter.ifPresent(writer -> {assembledEventMapVariants.orElseThrow().forEach(writer::add); writer.close();});
referenceReader.close();
genotypingEngine.close();
}
Expand Down Expand Up @@ -493,7 +493,7 @@ public ActivityProfileState isActive(final AlignmentContext context, final Refer
final ReadPileup normalPileup = pileup.makeFilteredPileup(pe -> isNormalSample(ReadUtils.getSampleName(pe.getRead(), header)));
normalPileupQualBuffer.accumulateQuals(normalPileup, refBase, MTAC.pcrSnvQual);
final Pair<Integer, ByteArrayList> bestNormalAltAllele = normalPileupQualBuffer.likeliestIndexAndQuals();
if (bestNormalAltAllele.getLeft() == bestNormalAltAllele.getLeft()) {
if (Objects.equals(bestNormalAltAllele.getLeft(), bestNormalAltAllele.getLeft())) {
final int normalAltCount = bestNormalAltAllele.getRight().size();
final double normalQualSum = normalPileupQualBuffer.qualSum(bestNormalAltAllele.getLeft());
if (normalAltCount > normalPileup.size() * MAX_ALT_FRACTION_IN_NORMAL && normalQualSum > MAX_NORMAL_QUAL_SUM) {
Expand Down Expand Up @@ -549,7 +549,7 @@ public static List<Double> getAttributeAsDoubleList(final VariantContext vc, fin
return ((Number) x).doubleValue();
} else {
String string = (String) x;
return string.equals(VCFConstants.MISSING_VALUE_v4) ? defaultValue : Double.valueOf(string); // throws an exception if this isn't a string
return string.equals(VCFConstants.MISSING_VALUE_v4) ? defaultValue : Double.parseDouble(string); // throws an exception if this isn't a string
}
}).collect(Collectors.toList());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ public CalledHaplotypes callMutations(
continue;
}

final List<Allele> allAllelesToEmit = ListUtils.union(Arrays.asList(mergedVC.getReference()), tumorAltAlleles);
final List<Allele> allAllelesToEmit = ListUtils.union(Collections.singletonList(mergedVC.getReference()), tumorAltAlleles);

final Map<String, Object> negativeLogPopulationAFAnnotation =
getNegativeLogPopulationAFAnnotation(featureContext.getValues(MTAC.germlineResource, loc),
Expand All @@ -191,7 +191,7 @@ public CalledHaplotypes callMutations(

if (hasNormal) {
callVcb.attribute(GATKVCFConstants.NORMAL_ARTIFACT_LOG_10_ODDS_KEY,
Arrays.stream(normalArtifactLogOdds.asDoubleArray(tumorAltAlleles)).map(x->MathUtils.logToLog10(x)).toArray());
Arrays.stream(normalArtifactLogOdds.asDoubleArray(tumorAltAlleles)).map(MathUtils::logToLog10).toArray());
callVcb.attribute(GATKVCFConstants.NORMAL_LOG_10_ODDS_KEY,
Arrays.stream(normalLogOdds.asDoubleArray(tumorAltAlleles)).map(MathUtils::logToLog10).toArray());
}
Expand Down Expand Up @@ -250,7 +250,7 @@ public CalledHaplotypes callMutations(
// this annotation is irrelevant
if (haplotypesByEvent.containsKey(event)) {
final Haplotype bestHaplotype = haplotypesByEvent.get(event).stream().
max(Comparator.comparingInt(h -> haplotypeSupportCounts.getOrDefault(h, new MutableInt(0)).intValue())).get();
max(Comparator.comparingInt(h -> haplotypeSupportCounts.getOrDefault(h, new MutableInt(0)).intValue())).orElseThrow();

eventCountAnnotations.computeIfAbsent(outputCall, vc -> new ArrayList<>())
.add((int) bestHaplotype.getEventMap().getEvents().stream().filter(potentialSomaticEventsInRegion::contains).count());
Expand Down Expand Up @@ -365,7 +365,7 @@ public double visit(int row, int column, double value) {
}

public static <EVIDENCE extends Locatable> LikelihoodMatrix<EVIDENCE, Allele> combinedLikelihoodMatrix(final List<LikelihoodMatrix<EVIDENCE, Allele>> matrices, final AlleleList<Allele> alleleList) {
final List<EVIDENCE> reads = matrices.stream().flatMap(m -> m.evidence().stream()).collect(Collectors.toList());
final List<EVIDENCE> reads = matrices.stream().flatMap(m -> m.evidence().stream()).toList();
final AlleleLikelihoods<EVIDENCE, Allele> combinedLikelihoods = new AlleleLikelihoods<>(SampleList.singletonSampleList("COMBINED"), alleleList, ImmutableMap.of("COMBINED", reads));

int combinedReadIndex = 0;
Expand Down Expand Up @@ -394,7 +394,7 @@ private <E> Optional<E> getForNormal(final Supplier<E> supplier) {
* need the ref allele in case the germline resource has a more or less parsimoniuous representation
* For example, eg ref = A, alt = C; germline ref = AT, germline alt = CT
* @param afOfAllelesNotInGermlineResource default value of germline AF annotation
* @return
* @return AF Annotation
*/
private static Map<String, Object> getNegativeLogPopulationAFAnnotation(List<VariantContext> germlineResourceVariants,
final List<Allele> allAlleles,
Expand All @@ -411,7 +411,7 @@ private static Map<String, Object> getNegativeLogPopulationAFAnnotation(List<Var
* * For example, eg ref = A, alt = C; germline ref = AT, germline alt = CT
* @param germlineVCs Germline resource variant contexts from which AF INFO field is drawn
* @param afOfAllelesNotInGermlineResource Default value of population AF annotation
* @return
* @return double array
*/
@VisibleForTesting
static double[] getGermlineAltAlleleFrequencies(final List<Allele> allAlleles, final List<VariantContext> germlineVCs, final double afOfAllelesNotInGermlineResource) {
Expand Down Expand Up @@ -446,6 +446,6 @@ static double[] getGermlineAltAlleleFrequencies(final List<Allele> allAlleles, f
*/
@Override
public void close() {
mutect3DatasetEngine.ifPresent(engine -> engine.close());
mutect3DatasetEngine.ifPresent(Mutect3DatasetEngine::close);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import org.broadinstitute.hellbender.utils.Utils;
import org.broadinstitute.hellbender.utils.param.ParamUtils;

import java.io.Serial;
import java.util.*;
import java.util.function.Predicate;
import java.util.stream.Collectors;
Expand All @@ -22,6 +23,7 @@
* Extract simple VariantContext events from a single haplotype
*/
public final class EventMap extends TreeMap<Integer, Event> {
@Serial
private static final long serialVersionUID = 1L;

private static final Logger logger = LogManager.getLogger(EventMap.class);
Expand Down Expand Up @@ -173,7 +175,7 @@ public void replaceEvent(final Event newEvent) {
* e1 can be SNP, and e2 can then be either a insertion or deletion.
* If e1 is an indel, then e2 must be the opposite type (e1 deletion => e2 must be an insertion)
*/
protected static Event makeCompoundEvents(final Event e1, final Event e2) {
static Event makeCompoundEvents(final Event e1, final Event e2) {
Utils.validateArg( e1.getStart() == e2.getStart(), "e1 and e2 must have the same start");

if ( e1.isSNP() || e2.isSNP()) {
Expand Down

0 comments on commit d79b7cf

Please sign in to comment.