@Override public int doWork(String[] args) { boolean compressed = false; int maxRecordsInRAM = 100000; long count = -1L; File fileout = null; com.github.lindenb.jvarkit.util.cli.GetOpt opt = new com.github.lindenb.jvarkit.util.cli.GetOpt(); int c; while ((c = opt.getopt(args, getGetOptDefault() + "o:n:N:T:b")) != -1) { switch (c) { case 'b': compressed = true; break; case 'N': maxRecordsInRAM = Integer.parseInt(opt.getOptArg()); break; case 'n': count = Long.parseLong(opt.getOptArg()); break; case 'o': fileout = new File(opt.getOptArg()); break; case 'T': this.addTmpDirectory(new File(opt.getOptArg())); break; default: { switch (handleOtherOptions(c, opt, null)) { case EXIT_FAILURE: return -1; case EXIT_SUCCESS: return 0; default: break; } } } } if (count < -1L) // -1 == infinite { error("Bad count:" + count); return -1; } SamReader samReader = null; SAMRecordIterator iter = null; SAMFileWriter samWriter = null; Random random = new Random(); CloseableIterator<RandSamRecord> iter2 = null; try { SamFileReaderFactory.setDefaultValidationStringency(ValidationStringency.SILENT); if (opt.getOptInd() == args.length) { info("Reading from stdin"); samReader = SamFileReaderFactory.mewInstance().openStdin(); } else if (opt.getOptInd() + 1 == args.length) { File filename = new File(args[opt.getOptInd()]); info("Reading from " + filename); samReader = SamFileReaderFactory.mewInstance().open(filename); } else { error("Illegal number of arguments."); return -1; } SAMFileHeader header = samReader.getFileHeader(); header = header.clone(); header.setSortOrder(SortOrder.unsorted); header.addComment("Processed with " + getProgramName() + " : " + getProgramCommandLine()); SAMFileWriterFactory sfw = new SAMFileWriterFactory(); sfw.setCreateIndex(false); sfw.setCreateMd5File(false); if (fileout == null) { if (compressed) { samWriter = sfw.makeBAMWriter(header, true, System.out); } else { samWriter = sfw.makeSAMWriter(header, true, System.out); } } else { samWriter = sfw.makeSAMOrBAMWriter(header, true, fileout); this.addTmpDirectory(fileout); } iter = samReader.iterator(); SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(samReader.getFileHeader().getSequenceDictionary()); SortingCollection<RandSamRecord> sorter = SortingCollection.newInstance( RandSamRecord.class, new RandSamRecordCodec(header), new RandSamRecordComparator(), maxRecordsInRAM, getTmpDirectories()); sorter.setDestructiveIteration(true); while (iter.hasNext()) { RandSamRecord r = new RandSamRecord(); r.rand_index = random.nextInt(); r.samRecord = progress.watch(iter.next()); sorter.add(r); } iter.close(); iter = null; sorter.doneAdding(); iter2 = sorter.iterator(); if (count == -1) { while (iter2.hasNext()) { samWriter.addAlignment(iter2.next().samRecord); } } else { while (iter2.hasNext() && count > 0) { samWriter.addAlignment(iter2.next().samRecord); count--; } } iter2.close(); iter2 = null; sorter.cleanup(); progress.finish(); } catch (Exception e) { error(e); return -1; } finally { CloserUtil.close(iter); CloserUtil.close(iter2); CloserUtil.close(samReader); CloserUtil.close(samWriter); } return 0; }
/** Combines multiple SAM/BAM files into one. */ @Override protected int doWork() { boolean matchedSortOrders = true; // read interval list if it is defined final List<Interval> intervalList = (INTERVALS == null ? null : IntervalList.fromFile(INTERVALS).uniqued().getIntervals()); // map reader->iterator used if INTERVALS is defined final Map<SamReader, CloseableIterator<SAMRecord>> samReaderToIterator = new HashMap<SamReader, CloseableIterator<SAMRecord>>(INPUT.size()); // Open the files for reading and writing final List<SamReader> readers = new ArrayList<SamReader>(); final List<SAMFileHeader> headers = new ArrayList<SAMFileHeader>(); { SAMSequenceDictionary dict = null; // Used to try and reduce redundant SDs in memory for (final File inFile : INPUT) { IOUtil.assertFileIsReadable(inFile); final SamReader in = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).open(inFile); if (INTERVALS != null) { if (!in.hasIndex()) throw new PicardException( "Merging with interval but Bam file is not indexed " + inFile); final CloseableIterator<SAMRecord> samIterator = new SamRecordIntervalIteratorFactory() .makeSamRecordIntervalIterator(in, intervalList, true); samReaderToIterator.put(in, samIterator); } readers.add(in); headers.add(in.getFileHeader()); // A slightly hackish attempt to keep memory consumption down when merging multiple files // with // large sequence dictionaries (10,000s of sequences). If the dictionaries are identical, // then // replace the duplicate copies with a single dictionary to reduce the memory footprint. if (dict == null) { dict = in.getFileHeader().getSequenceDictionary(); } else if (dict.equals(in.getFileHeader().getSequenceDictionary())) { in.getFileHeader().setSequenceDictionary(dict); } matchedSortOrders = matchedSortOrders && in.getFileHeader().getSortOrder() == SORT_ORDER; } } // If all the input sort orders match the output sort order then just merge them and // write on the fly, otherwise setup to merge and sort before writing out the final file IOUtil.assertFileIsWritable(OUTPUT); final boolean presorted; final SAMFileHeader.SortOrder headerMergerSortOrder; final boolean mergingSamRecordIteratorAssumeSorted; if (matchedSortOrders || SORT_ORDER == SAMFileHeader.SortOrder.unsorted || ASSUME_SORTED || INTERVALS != null) { log.info( "Input files are in same order as output so sorting to temp directory is not needed."); headerMergerSortOrder = SORT_ORDER; mergingSamRecordIteratorAssumeSorted = ASSUME_SORTED; presorted = true; } else { log.info("Sorting input files using temp directory " + TMP_DIR); headerMergerSortOrder = SAMFileHeader.SortOrder.unsorted; mergingSamRecordIteratorAssumeSorted = false; presorted = false; } final SamFileHeaderMerger headerMerger = new SamFileHeaderMerger(headerMergerSortOrder, headers, MERGE_SEQUENCE_DICTIONARIES); final MergingSamRecordIterator iterator; // no interval defined, get an iterator for the whole bam if (intervalList == null) { iterator = new MergingSamRecordIterator(headerMerger, readers, mergingSamRecordIteratorAssumeSorted); } else { // show warning related to https://github.com/broadinstitute/picard/pull/314/files log.info( "Warning: merged bams from different interval lists may contain the same read in both files"); iterator = new MergingSamRecordIterator(headerMerger, samReaderToIterator, true); } final SAMFileHeader header = headerMerger.getMergedHeader(); for (final String comment : COMMENT) { header.addComment(comment); } header.setSortOrder(SORT_ORDER); final SAMFileWriterFactory samFileWriterFactory = new SAMFileWriterFactory(); if (USE_THREADING) { samFileWriterFactory.setUseAsyncIo(true); } final SAMFileWriter out = samFileWriterFactory.makeSAMOrBAMWriter(header, presorted, OUTPUT); // Lastly loop through and write out the records final ProgressLogger progress = new ProgressLogger(log, PROGRESS_INTERVAL); while (iterator.hasNext()) { final SAMRecord record = iterator.next(); out.addAlignment(record); progress.record(record); } log.info("Finished reading inputs."); for (final CloseableIterator<SAMRecord> iter : samReaderToIterator.values()) CloserUtil.close(iter); CloserUtil.close(readers); out.close(); return 0; }