/** * initialize this VCF header * * @param header the header */ public void writeHeader(VCFHeader header) { writer.writeHeader(header); }
@Override public void writeHeader(final VCFHeader header) { for ( final VariantContextWriter writer : writers ) writer.writeHeader(header); }
@Override public void writeHeader(VCFHeader header) { innerWriter.writeHeader(header); }
@Override public void writeHeader(VCFHeader header) { innerWriter.writeHeader(header); }
@Override public void writeHeader(VCFHeader header) { innerWriter.writeHeader(header); }
@Override public void writeHeader(final VCFHeader header) { this.underlyingWriter.writeHeader(header); }
public void writeHeader(VCFHeader header) { if (useSingleSample != null) { // only want to output context for one sample Set<String> singSampSet = new TreeSet<String>(); singSampSet.add(useSingleSample); header = new VCFHeader(header.getMetaDataInSortedOrder(), singSampSet); } innerWriter.writeHeader(header); }
private void init( OutputStream output, VCFHeader header, final boolean writeHeader, TaskAttemptContext ctx) throws IOException { final BCFStoppableOutputStream stopOut = new BCFStoppableOutputStream(!writeHeader, output); writer = createVariantContextWriter(ctx == null ? null : ctx.getConfiguration(), stopOut); writer.writeHeader(header); stopOut.stopped = false; setInputHeader(header); }
private void init( OutputStream output, VCFHeader header, final boolean writeHeader, TaskAttemptContext ctx) throws IOException { final BCFStoppableOutputStream stopOut = new BCFStoppableOutputStream(!writeHeader, output); writer = createVariantContextWriter(ctx == null ? null : ctx.getConfiguration(), stopOut); writer.writeHeader(header); stopOut.stopped = false; setInputHeader(header); }
private void init( OutputStream output, VCFHeader header, boolean writeHeader, TaskAttemptContext ctx) throws IOException { final StoppableOutputStream stopOut = new StoppableOutputStream(!writeHeader, output); writer = createVariantContextWriter(ctx == null ? null : ctx.getConfiguration(), stopOut); writer.writeHeader(header); stopOut.stopped = false; setInputHeader(header); }
@Override public boolean pre() { String study = metadata.getStudies().get(0).getId(); List<String> samples = metadata.getStudies().get(0).getIndividuals().stream() .flatMap(individual -> individual.getSamples().stream()).map(Sample::getId).collect(Collectors.toList()); VCFHeader vcfHeader = new VariantStudyMetadataToVCFHeaderConverter().convert(metadata.getStudies().get(0), annotations); variantContextWriter = VcfUtils.createVariantContextWriter(outputStream, vcfHeader.getSequenceDictionary(), Options.ALLOW_MISSING_FIELDS_IN_HEADER); variantContextWriter.writeHeader(vcfHeader); converter = newConverter(study, samples, annotations); return true; }
public void open(OutputStream os, Options... writerOptions) { variantConverter = new VariantAvroToVariantContextConverter(metadata.getId(), sampleNames, formats, annotations); // create the variant context writer outputStream = Objects.requireNonNull(os); writer = VcfUtils.createVariantContextWriter(outputStream, vcfHeader.getSequenceDictionary(), writerOptions); // write VCF header writer.writeHeader(vcfHeader); }
public void initialize() { final String trackName = variantCollection.variants.getName(); final Set<String> samples = SampleUtils.getSampleListWithVCFHeader(getToolkit(), Arrays.asList(trackName)); final Map<String, VCFHeader> vcfHeaders = GATKVCFUtils.getVCFHeadersFromRods(getToolkit(), Arrays.asList(trackName)); final Set<VCFHeaderLine> headerLines = vcfHeaders.get(trackName).getMetaDataInSortedOrder(); baseWriter.writeHeader(new VCFHeader(headerLines, samples)); vcfWriter = new SortingVariantContextWriter(baseWriter, 200); }
private static void writeVCsToFile(final VariantContextWriter writer, final VCFHeader header, final Iterable<VariantContext> vcs) { // write writer.writeHeader(header); for ( VariantContext vc : vcs ) if (vc != null) writer.add(vc); writer.close(); }
private void init( OutputStream output, VCFHeader header, boolean writeHeader) throws IOException { final StoppableOutputStream stopOut = new StoppableOutputStream(!writeHeader, output); writer = new VariantContextWriterBuilder().clearOptions() .setOutputStream(stopOut).build(); writer.writeHeader(header); stopOut.stopped = false; setInputHeader(header); }
@Override public void initialize() { vcfRods = GATKVCFUtils.getVCFHeadersFromRods(getToolkit()); final Set<String> samples = SampleUtils.getSampleList(vcfRods, GATKVariantContextUtils.GenotypeMergeType.REQUIRE_UNIQUE); final Set<VCFHeaderLine> headerLines = VCFUtils.smartMergeHeaders(vcfRods.values(), true); headerLines.addAll(Arrays.asList(ChromosomeCountConstants.descriptions)); headerLines.add(VCFStandardHeaderLines.getInfoLine(VCFConstants.DEPTH_KEY)); final VCFHeader vcfHeader = new VCFHeader(headerLines, samples); vcfWriter.writeHeader(vcfHeader); }
private void init( OutputStream output, VCFHeader header, final boolean writeHeader) throws IOException { final BCFStoppableOutputStream stopOut = new BCFStoppableOutputStream(!writeHeader, output); writer = new VariantContextWriterBuilder().clearOptions() .setOption(Options.FORCE_BCF) .setOutputBCFStream(stopOut).build(); writer.writeHeader(header); stopOut.stopped = false; setInputHeader(header); }
private void innerWriteToFifo(String pathToFifo) { // Do not enable INDEX_OF_THE_FLY because that is not compatible with writing to a pipe. final VariantContextWriterBuilder builder = new VariantContextWriterBuilder() .clearOptions() .setReferenceDictionary(dictionary); Path vcfPath = Paths.get(pathToFifo); VariantContextWriter writer = builder.setOutputPath(vcfPath).build(); writer.writeHeader(new VCFHeader()); writer.close(); }
@Override public void initialize() { super.initialize(); if (getToolkit().getIntervals() == null || getToolkit().getIntervals().isEmpty()) throw new UserException("This tool only works if you provide one or more intervals (use the -L argument). If you want to run whole genome, use -T DepthOfCoverage instead."); intervalMap = new LinkedHashMap<>(INITIAL_HASH_SIZE); intervalListIterator = new PeekableIterator<>(getToolkit().getIntervals().iterator()); // get all of the unique sample names for the VCF Header samples = ReadUtils.getSAMFileSamples(getToolkit().getSAMFileHeader()); vcfWriter.writeHeader(new VCFHeader(getHeaderInfo(), samples)); // pre load all the statistics classes because it is costly to operate on the JVM and we only want to do it once. loadAllPlugins(thresholds); }
@Test public void testWriteHeader() throws Exception { FeatureCodec codec = CodecFactory.getCodec(inpath, genome); AbstractFeatureReader<Feature, ?> bfs = AbstractFeatureReader.getFeatureReader(inpath, codec, false); VCFHeader header0 = (VCFHeader) bfs.getHeader(); VariantContextWriter writer = getWriter(); writer.writeHeader(header0); writer.close(); AbstractFeatureReader<Feature, ?> bfs1 = AbstractFeatureReader.getFeatureReader(outFile.getAbsolutePath(), codec, false); VCFHeader header1 = (VCFHeader) bfs1.getHeader(); assertHeadersEquals(header0, header1); }