Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

update htsjdk to 2.19.0 #5812

Merged
merged 1 commit into from
Mar 27, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -58,12 +58,12 @@ repositories {
}

final requiredJavaVersion = "8"
final htsjdkVersion = System.getProperty('htsjdk.version','2.18.2')
final picardVersion = System.getProperty('picard.version','2.18.25')
final htsjdkVersion = System.getProperty('htsjdk.version','2.19.0')
final picardVersion = System.getProperty('picard.version','2.19.0')
final barclayVersion = System.getProperty('barclay.version','2.1.0')
final sparkVersion = System.getProperty('spark.version', '2.2.0')
final hadoopVersion = System.getProperty('hadoop.version', '2.8.2')
final disqVersion = System.getProperty('disq.version','0.2.0')
final disqVersion = System.getProperty('disq.version','0.3.0')
final genomicsdbVersion = System.getProperty('genomicsdb.version','1.0.0-rc2')
final testNGVersion = '6.11'
// Using the shaded version to avoid conflicts between its protobuf dependency
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ private static void createBaiAndSplittingIndex(final File inputBam, final File i
assertBamIsCoordinateSorted(header);
final SBIIndexWriter indexer = new SBIIndexWriter(out, granularity);

final BAMIndexer bamIndexer = new BAMIndexer(IOUtils.replaceExtension(index, BAMIndex.BAMIndexSuffix), header);
final BAMIndexer bamIndexer = new BAMIndexer(IOUtils.replaceExtension(index, BAMIndex.BAI_INDEX_SUFFIX), header);
BAMFileSpan lastFilePointer = null;
for(final SAMRecord read : reader){
BAMFileSpan filePointer = (BAMFileSpan) read.getFileSource().getFilePointer();
Expand All @@ -149,7 +149,7 @@ private static void createBaiAndSplittingIndex(final File inputBam, final File i

private static void assertBamIsCoordinateSorted(final SAMFileHeader header) {
if( header.getSortOrder() != SAMFileHeader.SortOrder.coordinate) {
throw new UserException.BadInput("Cannot create a " + BAMIndex.BAMIndexSuffix + " index for a file " +
throw new UserException.BadInput("Cannot create a " + BAMIndex.BAI_INDEX_SUFFIX + " index for a file " +
"that isn't coordinate sorted.");
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,23 +1,18 @@
package org.broadinstitute.hellbender.tools.spark.sv.utils;

import htsjdk.samtools.SAMFlag;
import htsjdk.samtools.reference.FastaReferenceWriter;
import org.broadinstitute.hellbender.exceptions.GATKException;
import org.broadinstitute.hellbender.tools.spark.sv.discovery.alignment.AlignedContig;
import org.broadinstitute.hellbender.tools.spark.sv.discovery.alignment.AlignmentInterval;
import org.broadinstitute.hellbender.utils.Utils;
import org.broadinstitute.hellbender.utils.bwa.BwaMemAligner;
import org.broadinstitute.hellbender.utils.bwa.BwaMemAlignment;
import org.broadinstitute.hellbender.utils.bwa.BwaMemIndex;
import org.broadinstitute.hellbender.utils.reference.FastaReferenceWriter;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.*;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.Predicate;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
package org.broadinstitute.hellbender.tools.walkers.fasta;

import com.google.common.primitives.Bytes;
import htsjdk.samtools.reference.FastaReferenceWriter;
import htsjdk.samtools.reference.FastaReferenceWriterBuilder;
import it.unimi.dsi.fastutil.bytes.ByteArrayList;
import org.broadinstitute.barclay.argparser.Argument;
import org.broadinstitute.barclay.argparser.CommandLineProgramProperties;
Expand All @@ -13,7 +15,6 @@
import org.broadinstitute.hellbender.exceptions.UserException;
import org.broadinstitute.hellbender.utils.SimpleInterval;
import org.broadinstitute.hellbender.utils.io.IOUtils;
import org.broadinstitute.hellbender.utils.reference.FastaReferenceWriter;
import picard.cmdline.programgroups.ReferenceProgramGroup;

import java.io.IOException;
Expand Down Expand Up @@ -82,7 +83,10 @@ public class FastaReferenceMaker extends ReferenceWalker {
public void onTraversalStart() {
final Path path = IOUtils.getPath(output);
try {
writer = new FastaReferenceWriter(path, basesPerLine, true, true);
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is the behavior previously controlled by the two true booleans here being preserved in the new FastaReferenceWriterBuilder call?

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Looks like these were makeFai and makeDict, both of which are true by default in the new builder.

writer = new FastaReferenceWriterBuilder()
.setFastaFile(path)
.setBasesPerLine(basesPerLine)
.build();
} catch (IOException e) {
throw new UserException.CouldNotCreateOutputFile("Couldn't create " + output + ", encountered exception: " + e.getMessage(), e);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -384,8 +384,7 @@ private OutputAlleleSubset calculateOutputAlleleSubset(final AFCalculationResult
} else {
// we want to keep the NON_REF symbolic allele but only in the absence of a non-symbolic allele, e.g.
// if we combined a ref / NON_REF gVCF with a ref / alt gVCF
final boolean isNonRefWhichIsLoneAltAllele = alternativeAlleleCount == 1 && allele.equals(
Allele.NON_REF_ALLELE);
final boolean isNonRefWhichIsLoneAltAllele = alternativeAlleleCount == 1 && allele.equals(Allele.NON_REF_ALLELE);
final boolean isPlausible = afCalculationResult.isPolymorphicPhredScaledQual(allele, configuration.genotypeArgs.STANDARD_CONFIDENCE_FOR_CALLING);

siteIsMonomorphic &= !isPlausible;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ static void runHooks() {
for (Path path : toBeDeleted) {
try {
IOUtils.deleteRecursively(path);
} catch (IOException | SecurityException e) {
} catch (SecurityException e) {
// do nothing if cannot be deleted, because it is a shutdown hook
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import htsjdk.samtools.BamFileIoUtils;
import htsjdk.samtools.cram.build.CramIO;
import htsjdk.samtools.util.BlockCompressedInputStream;
import htsjdk.samtools.util.IOUtil;
import htsjdk.tribble.Tribble;
import htsjdk.tribble.util.TabixUtils;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
Expand Down Expand Up @@ -702,7 +703,7 @@ public static Path createTempPath(String name, String extension) {
final String filename = path.getFileName().toString();
IOUtils.deleteOnExit(path.resolveSibling(filename + Tribble.STANDARD_INDEX_EXTENSION));
IOUtils.deleteOnExit(path.resolveSibling(filename + TabixUtils.STANDARD_INDEX_EXTENSION));
IOUtils.deleteOnExit(path.resolveSibling(filename + BAMIndex.BAMIndexSuffix));
IOUtils.deleteOnExit(path.resolveSibling(filename + BAMIndex.BAI_INDEX_SUFFIX));
IOUtils.deleteOnExit(path.resolveSibling(filename.replaceAll(extension + "$", ".bai")));
IOUtils.deleteOnExit(path.resolveSibling(filename + ".md5"));

Expand Down Expand Up @@ -1021,14 +1022,8 @@ public static void deleteOnExit(final Path fileToDelete){
* Delete rootPath recursively
* @param rootPath is the file/directory to be deleted
*/
public static void deleteRecursively(final Path rootPath) throws IOException {
final List<Path> pathsToDelete = Files.walk(rootPath)
.sorted(Comparator.reverseOrder())
.collect(Collectors.toList());

for (Path path : pathsToDelete) {
Files.deleteIfExists(path);
}
public static void deleteRecursively(final Path rootPath) {
IOUtil.recursiveDelete(rootPath);
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

New implementation in htsjdk. We might want to just remove this one actually.

}

/**
Expand Down
Loading