private static long[] coordinatesFromQueryIntervals(BAMIndex index, QueryInterval[] queries) { ArrayList<BAMFileSpan> spanList = new ArrayList<>(1); Arrays.asList(queries).forEach(qi -> spanList.add(index.getSpanOverlapping(qi.referenceIndex, qi.start, qi.end))); BAMFileSpan spanArray[] = new BAMFileSpan[spanList.size()]; for (int i = 0; i < spanList.size(); i++) { spanArray[i] = spanList.get(i); } return BAMFileSpan.merge(spanArray).toCoordinateArray(); }
final long startOfLastLinearBin = getIndex().getStartOfLastLinearBin(); if (startOfLastLinearBin != -1) { mCompressedInputStream.seek(startOfLastLinearBin);
public void close() { if (mStream != null) { mStream.close(); } if (mIndex != null) { mIndex.close(); } mStream = null; mFileHeader = null; mIndex = null; }
for (int i = 0; i < numRefs; i++) { BAMIndexMetaData meta = bai.getMetaData(i); count += meta.getAlignedRecordCount();
public static long getAlignedReadCount(String bam) throws IOException{ /* ------------------------------------------------------ */ /* This chunk prepares SamReader from local bam or URL bam */ UrlValidator urlValidator = new UrlValidator(); SamReaderFactory srf=SamReaderFactory.make(); srf.validationStringency(ValidationStringency.SILENT); SamReader samReader; if(urlValidator.isValid(bam)){ samReader = SamReaderFactory.makeDefault().open( SamInputResource.of(new URL(bam)).index(new URL(bam + ".bai")) ); } else { samReader= srf.open(new File(bam)); } /* ------------------------------------------------------ */ List<SAMSequenceRecord> sequences = samReader.getFileHeader().getSequenceDictionary().getSequences(); long alnCount= 0; for(SAMSequenceRecord x : sequences){ alnCount += samReader.indexing().getIndex().getMetaData(x.getSequenceIndex()).getAlignedRecordCount(); } samReader.close(); return alnCount; }
private static long[] coordinatesFromQueryIntervals(BAMIndex index, QueryInterval[] queries) { ArrayList<BAMFileSpan> spanList = new ArrayList<>(1); Arrays.asList(queries).forEach(qi -> spanList.add(index.getSpanOverlapping(qi.referenceIndex, qi.start, qi.end))); BAMFileSpan spanArray[] = new BAMFileSpan[spanList.size()]; for (int i = 0; i < spanList.size(); i++) { spanArray[i] = spanList.get(i); } return BAMFileSpan.merge(spanArray).toCoordinateArray(); }
final long startOfLastLinearBin = getIndex().getStartOfLastLinearBin(); if (startOfLastLinearBin != -1) { mCompressedInputStream.seek(startOfLastLinearBin);
final BAMIndexMetaData metaData_0 = index.getMetaData(0); Assert.assertNotNull(metaData_0); Assert.assertEquals(metaData_0.getAlignedRecordCount(), 1); final BAMIndexMetaData metaData_1 = index.getMetaData(1); Assert.assertNotNull(metaData_1); Assert.assertEquals(metaData_1.getAlignedRecordCount(), 3); final BAMIndexMetaData metaData_2 = index.getMetaData(2); Assert.assertNotNull(metaData_2); Assert.assertEquals(metaData_2.getAlignedRecordCount(), 2);
@Override public void close() { if (mCompressedInputStream != null) { try { mCompressedInputStream.close(); } catch (IOException e) { throw new RuntimeIOException("Exception closing compressed input stream.", e); } } if (mStream != null) { mStream.close(); } if (mIndex != null) { mIndex.close(); } mStream = null; mFileHeader = null; mIndex = null; }
/** * Use the index to determine the chunk boundaries for the required intervals. * @param intervals the intervals to restrict reads to * @param fileIndex the BAM index to use * @return file pointer pairs corresponding to chunk boundaries */ public static BAMFileSpan getFileSpan(QueryInterval[] intervals, BAMIndex fileIndex) { final BAMFileSpan[] inputSpans = new BAMFileSpan[intervals.length]; for (int i = 0; i < intervals.length; ++i) { final QueryInterval interval = intervals[i]; final BAMFileSpan span = fileIndex.getSpanOverlapping(interval.referenceIndex, interval.start, interval.end); inputSpans[i] = span; } final BAMFileSpan span; if (inputSpans.length > 0) { span = BAMFileSpan.merge(inputSpans); } else { span = null; } return span; }
final long startOfLastLinearBin = getIndex().getStartOfLastLinearBin(); if (startOfLastLinearBin != -1) { mCompressedInputStream.seek(startOfLastLinearBin);
public static long countSamTotalRecord(final File samFile) { final SamReader reader = SamReaderFactory.make().open(samFile); assert reader.hasIndex(); long total = 0; for (int i = 0; i < reader.getFileHeader().getSequenceDictionary().size(); i++) { total += reader.indexing().getIndex().getMetaData(i).getAlignedRecordCount(); total += reader.indexing().getIndex().getMetaData(i).getUnalignedRecordCount(); } return total; } }
@Override public void close() { if (mCompressedInputStream != null) { try { mCompressedInputStream.close(); } catch (IOException e) { throw new RuntimeIOException("Exception closing compressed input stream.", e); } } if (mStream != null) { mStream.close(); } if (mIndex != null) { mIndex.close(); } mStream = null; mFileHeader = null; mIndex = null; }
/** * Use the index to determine the chunk boundaries for the required intervals. * @param intervals the intervals to restrict reads to * @param fileIndex the BAM index to use * @return file pointer pairs corresponding to chunk boundaries */ public static BAMFileSpan getFileSpan(QueryInterval[] intervals, BAMIndex fileIndex) { final BAMFileSpan[] inputSpans = new BAMFileSpan[intervals.length]; for (int i = 0; i < intervals.length; ++i) { final QueryInterval interval = intervals[i]; final BAMFileSpan span = fileIndex.getSpanOverlapping(interval.referenceIndex, interval.start, interval.end); inputSpans[i] = span; } final BAMFileSpan span; if (inputSpans.length > 0) { span = BAMFileSpan.merge(inputSpans); } else { span = null; } return span; }
final long startOfLastLinearBin = getIndex().getStartOfLastLinearBin(); if (startOfLastLinearBin != -1) { mCompressedInputStream.seek(startOfLastLinearBin);
Assert.assertTrue(reader.hasIndex()); final int sequenceIndex = reader.getFileHeader().getSequenceIndex("11"); BAMIndexMetaData metaData = reader.indexing().getIndex().getMetaData(sequenceIndex); Assert.assertEquals(EXPECTED_ALL_READS - EXPECTED_UNMAPPED_READS, metaData.getAlignedRecordCount());
@Override public void close() { if (mCompressedInputStream != null) { try { mCompressedInputStream.close(); } catch (IOException e) { throw new RuntimeIOException("Exception closing compressed input stream.", e); } } if (mStream != null) { mStream.close(); } if (mIndex != null) { mIndex.close(); } mStream = null; mFileHeader = null; mIndex = null; }
/** * Use the index to determine the chunk boundaries for the required intervals. * @param intervals the intervals to restrict reads to * @param fileIndex the BAM index to use * @return file pointer pairs corresponding to chunk boundaries */ public static BAMFileSpan getFileSpan(QueryInterval[] intervals, BAMIndex fileIndex) { final BAMFileSpan[] inputSpans = new BAMFileSpan[intervals.length]; for (int i = 0; i < intervals.length; ++i) { final QueryInterval interval = intervals[i]; final BAMFileSpan span = fileIndex.getSpanOverlapping(interval.referenceIndex, interval.start, interval.end); inputSpans[i] = span; } final BAMFileSpan span; if (inputSpans.length > 0) { span = BAMFileSpan.merge(inputSpans); } else { span = null; } return span; }
long startOfLastLinearBin = idx.getStartOfLastLinearBin(); long noCoordinateCount = ((AbstractBAMFileIndex) idx).getNoCoordinateCount(); if (startOfLastLinearBin != -1 && noCoordinateCount > 0) {
public long[] getBAMIndexPointers(File indexFile, SAMSequenceDictionary dictionary, String sequenceName, int alignmentStart, int alignmentEnd) { long[] filePointers = new long[0]; final int referenceIndex = dictionary.getSequenceIndex(sequenceName); if (referenceIndex != -1) { final BAMIndex fileIndex = BAMIndexFactory.SHARED_INSTANCE.createCachingIndex(indexFile, dictionary); final BAMFileSpan fileSpan = fileIndex.getSpanOverlapping(referenceIndex, alignmentStart, alignmentEnd); filePointers = fileSpan != null ? fileSpan.toCoordinateArray() : null; } return filePointers; } }