@Override public boolean canDecode(final String potentialInput) { return canDecodeFile(potentialInput, VCF3_MAGIC_HEADER); } }
String version = tmp[1].toLowerCase(); if (version.startsWith("vcfv3")) { return new VCF3Codec(); } else { return new VCFCodec();
@Test public void testGetTabixFormat() { Assert.assertEquals(new VCFCodec().getTabixFormat(), TabixFormat.VCF); Assert.assertEquals(new VCF3Codec().getTabixFormat(), TabixFormat.VCF); }
/** * parse the filter string, first checking to see if we already have parsed it in a previous attempt * @param filterString the string to parse * @return a set of the filters applied */ protected List<String> parseFilters(String filterString) { // null for unfiltered if ( filterString.equals(VCFConstants.UNFILTERED) ) return null; // empty set for passes filters List<String> fFields = new ArrayList<String>(); if ( filterString.equals(VCFConstants.PASSES_FILTERS_v3) ) return new ArrayList<String>(fFields); if ( filterString.length() == 0 ) generateException("The VCF specification requires a valid filter status"); // do we have the filter string cached? if ( filterHash.containsKey(filterString) ) return new ArrayList<String>(filterHash.get(filterString)); // otherwise we have to parse and cache the value if ( filterString.indexOf(VCFConstants.FILTER_CODE_SEPARATOR) == -1 ) fFields.add(filterString); else fFields.addAll(Arrays.asList(filterString.split(VCFConstants.FILTER_CODE_SEPARATOR))); filterHash.put(filterString, fFields); return fFields; }
@Test public void TestSpanDelParseAlleles() { final List<Allele> list = VCF3Codec.parseAlleles("A", Allele.SPAN_DEL_STRING, 0); }
return new VCFWrapperCodec(new VCF3Codec(), genome);
/** * parse the filter string, first checking to see if we already have parsed it in a previous attempt * @param filterString the string to parse * @return a set of the filters applied */ @Override protected List<String> parseFilters(String filterString) { // null for unfiltered if ( filterString.equals(VCFConstants.UNFILTERED) ) return null; // empty set for passes filters List<String> fFields = new ArrayList<String>(); if ( filterString.equals(VCFConstants.PASSES_FILTERS_v3) ) return new ArrayList<String>(fFields); if (filterString.isEmpty()) generateException("The VCF specification requires a valid filter status"); // do we have the filter string cached? if ( filterHash.containsKey(filterString) ) return new ArrayList<String>(filterHash.get(filterString)); // otherwise we have to parse and cache the value if ( filterString.indexOf(VCFConstants.FILTER_CODE_SEPARATOR) == -1 ) fFields.add(filterString); else fFields.addAll(Arrays.asList(filterString.split(VCFConstants.FILTER_CODE_SEPARATOR))); filterHash.put(filterString, fFields); return fFields; }
@Test(expectedExceptions = TribbleException.class) public void TestSpanDelParseAllelesException() { final List<Allele> list1 = VCF3Codec.parseAlleles(Allele.SPAN_DEL_STRING, "A", 0); }
@Test @Ignore("Requires largedata bundle") public void testIntervalIndex33() throws Exception { String testFile = TestUtils.LARGE_DATA_DIR + "CEU.SRP000032.2010_03_v3.3.genotypes.head.vcf"; FeatureCodec codec = new VCF3Codec(); tstIntervalIndex(testFile, codec); }
@Override public boolean canDecode(final String potentialInput) { return canDecodeFile(potentialInput, VCF3_MAGIC_HEADER); } }
/** * parse the filter string, first checking to see if we already have parsed it in a previous attempt * @param filterString the string to parse * @return a set of the filters applied */ @Override protected List<String> parseFilters(String filterString) { // null for unfiltered if ( filterString.equals(VCFConstants.UNFILTERED) ) return null; // empty set for passes filters List<String> fFields = new ArrayList<String>(); if ( filterString.equals(VCFConstants.PASSES_FILTERS_v3) ) return new ArrayList<String>(fFields); if (filterString.isEmpty()) generateException("The VCF specification requires a valid filter status"); // do we have the filter string cached? if ( filterHash.containsKey(filterString) ) return new ArrayList<String>(filterHash.get(filterString)); // otherwise we have to parse and cache the value if ( filterString.indexOf(VCFConstants.FILTER_CODE_SEPARATOR) == -1 ) fFields.add(filterString); else fFields.addAll(Arrays.asList(filterString.split(VCFConstants.FILTER_CODE_SEPARATOR))); filterHash.put(filterString, fFields); return fFields; }
@Test public void simpleTest() throws Exception { final VCF3Codec codec = new VCF3Codec(); final FeatureReader<VariantContext> reader = AbstractFeatureReader.getFeatureReader(SMALL_VCF.getAbsolutePath(), codec, false); final VCFHeader headerFromFile = (VCFHeader)reader.getHeader(); final File vcf = File.createTempFile("TabixOnTheFlyIndexCreationTest.", IOUtil.COMPRESSED_VCF_FILE_EXTENSION); final File tabix = new File(vcf.getAbsolutePath() + TabixUtils.STANDARD_INDEX_EXTENSION); vcf.deleteOnExit(); tabix.deleteOnExit(); final VariantContextWriter vcfWriter = new VariantContextWriterBuilder() .setOutputFile(vcf) .setReferenceDictionary(headerFromFile.getSequenceDictionary()) .setOptions(EnumSet.of(Options.INDEX_ON_THE_FLY, Options.ALLOW_MISSING_FIELDS_IN_HEADER)) .build(); vcfWriter.writeHeader(headerFromFile); final CloseableTribbleIterator<VariantContext> it = reader.iterator(); while (it.hasNext()) { vcfWriter.add(it.next()); } it.close(); vcfWriter.close(); // Hard to validate, so just confirm that index can be read. new TabixIndex(tabix); } }
@Override public boolean canDecode(final String potentialInput) { return canDecodeFile(potentialInput, VCF3_MAGIC_HEADER); } }