public void loadBWFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { String trackName = locator.getTrackName(); String trackId = locator.getPath(); String path = locator.getPath(); BBFileReader reader = new BBFileReader(path); BigWigDataSource bigwigSource = new BigWigDataSource(reader, genome); if (reader.isBigWigFile()) { DataSourceTrack track = new DataSourceTrack(locator, trackId, trackName, bigwigSource); newTracks.add(track); } else if (reader.isBigBedFile()) { if (locator.getPath().contains("RRBS_cpgMethylation") || locator.getPath().contains("BiSeq_cpgMethylation") || (reader.getAutoSql() != null && reader.getAutoSql().startsWith("table BisulfiteSeq"))) { loadMethylTrack(locator, reader, newTracks, genome); } else { FeatureTrack track = new FeatureTrack(locator, trackId, trackName, bigwigSource); newTracks.add(track); } } else { throw new RuntimeException("Unknown BIGWIG type: " + locator.getPath()); } }
private void loadCufflinksFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { final String path = locator.getPath(); final String s = path.toLowerCase(); List<DataTrack> cuffTracks = new ArrayList<DataTrack>(); if (s.endsWith("fpkm_tracking")) { FPKMTrackingCodec codec = new FPKMTrackingCodec(path); List<FPKMValue> values = CufflinksParser.parse(codec, path); for (int sampleIndex = 0; sampleIndex < codec.getNumSamples(); sampleIndex++) { CufflinksDataSource ds = new CufflinksDataSource(sampleIndex, values, genome); String supId = String.format("q%02d", sampleIndex); DataTrack track = new DataSourceTrack( locator, locator.getPath() + " " + supId, locator.getTrackName() + " " + supId, ds); cuffTracks.add(track); } } else if (s.endsWith("gene_exp.diff") || s.endsWith("cds_exp.diff")) { AsciiFeatureCodec<ExpDiffValue> codec = new ExpDiffCodec(path); List<ExpDiffValue> values = CufflinksParser.parse(codec, path); CufflinksDataSource ds = new CufflinksDataSource(values, genome); DataTrack track = new DataSourceTrack(locator, locator.getPath(), locator.getTrackName(), ds); cuffTracks.add(track); } else { throw new RuntimeException("Unsupported file type: " + path); } for (DataTrack track : cuffTracks) { track.setTrackType(TrackType.FPKM); CufflinksTrack.setCufflinksScale(track); newTracks.add(track); } }
/** * Add the provided SegmentedDataSet to the list of tracks, set other relevant properties * * @param locator * @param newTracks * @param genome * @param ds */ private void loadSegTrack( ResourceLocator locator, List<Track> newTracks, Genome genome, SegmentedDataSet ds) { String path = locator.getPath(); TrackProperties props = null; if (ds instanceof SegmentedAsciiDataSet) { props = ((SegmentedAsciiDataSet) ds).getTrackProperties(); } // The "freq" track. TODO - make this optional if ((ds.getType() == TrackType.COPY_NUMBER || ds.getType() == TrackType.CNV) && ds.getSampleNames().size() > 1) { FreqData fd = new FreqData(ds, genome); String freqTrackId = path; String freqTrackName = "CNV Summary"; CNFreqTrack freqTrack = new CNFreqTrack(locator, freqTrackId, freqTrackName, fd); newTracks.add(freqTrack); } for (String trackName : ds.getSampleNames()) { String trackId = path + "_" + trackName; SegmentedDataSource dataSource = new SegmentedDataSource(trackName, ds); DataSourceTrack track = new DataSourceTrack(locator, trackId, trackName, dataSource); track.setRendererClass(HeatmapRenderer.class); track.setTrackType(ds.getType()); if (props != null) { track.setProperties(props); } newTracks.add(track); } }
private void loadGMT(ResourceLocator locator) throws IOException { List<GeneList> lists = GeneListManager.getInstance().loadGMTFile(locator.getPath()); if (lists.size() == 1) { GeneList gl = lists.get(0); IGV.getInstance().setGeneList(gl, true); } else { MessageUtils.showMessage("Loaded " + lists.size() + " gene lists."); } }
private void loadSyntentyMapping(ResourceLocator locator, List<Track> newTracks) { List<BlastMapping> mappings = (new BlastParser()).parse(locator.getPath()); List<htsjdk.tribble.Feature> features = new ArrayList<htsjdk.tribble.Feature>(mappings.size()); features.addAll(mappings); Genome genome = GenomeManager.getInstance().getCurrentGenome(); FeatureTrack track = new FeatureTrack(locator, new FeatureCollectionSource(features, genome)); track.setName(locator.getTrackName()); // track.setRendererClass(AlignmentBlockRenderer.class); newTracks.add(track); }
private void loadIGVFile(ResourceLocator locator, List<Track> newTracks, Genome genome) { if (locator.isLocal()) { if (!checkSize(locator)) { return; } } String dsName = locator.getTrackName(); IGVDataset ds = new IGVDataset(locator, genome); ds.setName(dsName); TrackProperties trackProperties = ds.getTrackProperties(); String path = locator.getPath(); TrackType type = ds.getType(); for (String trackName : ds.getTrackNames()) { DatasetDataSource dataSource = new DatasetDataSource(trackName, ds, genome); String trackId = path + "_" + trackName; DataSourceTrack track = new DataSourceTrack(locator, trackId, trackName, dataSource); // track.setRendererClass(HeatmapRenderer.class); track.setTrackType(ds.getType()); track.setProperties(trackProperties); if (type == TrackType.ALLELE_FREQUENCY) { track.setRendererClass(PointsRenderer.class); track.setHeight(40); } newTracks.add(track); } }
private void loadVCFListFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException, TribbleIndexNotFoundException { TribbleListFeatureSource src = new TribbleListFeatureSource(locator.getPath(), genome); VCFHeader header = (VCFHeader) src.getHeader(); // Test if the input VCF file contains methylation rate data: // This is determined by testing for the presence of two sample format fields: MR and GB, used // in the // rendering of methylation rate. // MR is the methylation rate on a scale of 0 to 100% and GB is the number of bases that pass // filter for the position. GB is needed to avoid displaying positions for which limited // coverage // prevents reliable estimation of methylation rate. boolean enableMethylationRateSupport = (header.getFormatHeaderLine("MR") != null && header.getFormatHeaderLine("GB") != null); List<String> allSamples = new ArrayList(header.getGenotypeSamples()); VariantTrack t = new VariantTrack(locator, src, allSamples, enableMethylationRateSupport); // VCF tracks handle their own margin t.setMargin(0); newTracks.add(t); }
private void loadMethylTrack( ResourceLocator locator, BBFileReader reader, List<Track> newTracks, Genome genome) throws IOException { MethylTrack track = new MethylTrack(locator, reader, genome); newTracks.add(track); }
/** * Load a rnai gene score file and create a datasource and track. * * <p> * * @param locator * @param newTracks */ private void loadRNAiGeneScoreFile( ResourceLocator locator, List<Track> newTracks, RNAIGeneScoreParser.Type type, Genome genome) { RNAIGeneScoreParser parser = new RNAIGeneScoreParser(locator.getPath(), type, genome); Collection<RNAIDataSource> dataSources = parser.parse(); String path = locator.getPath(); for (RNAIDataSource ds : dataSources) { String name = ds.getName(); String trackId = path + "_" + name; DataSourceTrack track = new DataSourceTrack(locator, trackId, name, ds); // Set attributes. This "hack" is neccessary to register these attributes with the // attribute manager to get displayed. track.setAttributeValue("SCREEN", ds.getScreen()); if ((ds.getCondition() != null) && (ds.getCondition().length() > 0)) { track.setAttributeValue("CONDITION", ds.getCondition()); } track.setHeight(80); // track.setDataRange(new DataRange(-3, 0, 3)); newTracks.add(track); } }
private void loadFromDBProfile(ResourceLocator profileLocator, List<Track> newTracks) throws IOException { DBProfile dbProfile = DBProfile.parseProfile(profileLocator.getPath()); for (DBProfile.DBTable table : dbProfile.getTableList()) { SQLCodecSource source = SQLCodecSource.getFromTable(table); if (source != null) { CachingFeatureSource cachingReader = new CachingFeatureSource(source); FeatureTrack track = new FeatureTrack(profileLocator, cachingReader); track.setName(source.getTableName()); newTracks.add(track); } else if (table.getFormat().equals("seg")) { Genome genome = GenomeManager.getInstance().getCurrentGenome(); SegmentedAsciiDataSet ds = (new SegmentedReader(table.getDbLocator(), genome)).loadFromDB(table); loadSegTrack(table.getDbLocator(), newTracks, genome, ds); } else if (table.getFormat().equals("sample.info")) { // TODO sampleIdColumnLabel was previously hardcoded as "SAMPLE_ID_ARRAY" // TODO Basically I'm shoehorning this information into a field usually used for something // else. Only slightly better String sampleIdColumnLabel = table.getBinColName(); if (sampleIdColumnLabel == null) { throw new IllegalArgumentException( "Profile must have binColName specifying the sample id column label"); } (new SampleInfoSQLReader(table, sampleIdColumnLabel)).load(); } } }
/** * Load features from a genbank (.gbk)file. This method ignores the fasta section. To define a * genome from a genbank file use GenomeManager. * * @param newTracks * @param genome * @throws IOException */ private void loadGbkFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { GenbankParser genbankParser = new GenbankParser(locator.getPath()); genbankParser.readFeatures(false); FeatureCollectionSource src = new FeatureCollectionSource(genbankParser.getFeatures(), genome); FeatureTrack track = new FeatureTrack(locator, src); newTracks.add(track); }
/** * Load GWAS PLINK result file * * @param locator * @param newTracks * @throws IOException */ private void loadGWASFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { GWASParser gwasParser = new GWASParser(locator, genome); GWASData gwasData = gwasParser.parse(); GWASTrack gwasTrack = new GWASTrack(locator, locator.getPath(), locator.getFileName(), gwasData, gwasParser); newTracks.add(gwasTrack); }
private void loadSMAPFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { List<Feature> features = SMAPParser.parseFeatures(locator, genome); FeatureCollectionSource src = new FeatureCollectionSource(features, genome); FeatureTrack track = new FeatureTrack(locator, locator.getName(), src); track.setRendererClass(SMAPRenderer.class); track.setDisplayMode(Track.DisplayMode.EXPANDED); newTracks.add(track); }
/** * Load a mutation file (".mut" or ".maf"). * * @param locator * @param newTracks */ private void loadMutFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException, TribbleIndexNotFoundException { MutationTrackLoader loader = new MutationTrackLoader(); List<FeatureTrack> mutationTracks = loader.loadMutationTracks(locator, genome); for (FeatureTrack track : mutationTracks) { track.setTrackType(TrackType.MUTATION); track.setRendererClass(MutationRenderer.class); newTracks.add(track); } }
private void loadListFile(ResourceLocator locator, List<Track> newTracks, Genome genome) { try { FeatureSource source = new FeatureDirSource(locator, genome); FeatureTrack track = new FeatureTrack(locator, source); track.setName(locator.getTrackName()); track.setVisibilityWindow(0); newTracks.add(track); } catch (IOException ex) { throw new RuntimeException(ex); } }
private void loadGctFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { if (locator.isLocal()) { if (!checkSize(locator)) { return; } } ExpressionFileParser parser = null; ExpressionDataset ds = null; parser = new ExpressionFileParser(locator, null, genome); ds = parser.createDataset(); if (ds.isEmpty()) { String message = "The probes in the file <br> " + locator.getPath() + "<br>" + "could not be mapped to genomic positions. This can be corrected by specify a probe mapping<br>" + "file from the Preferences window (Probes tab), or by specifing the genomic positions in the<br>" + "expression data file. Please see the user guide for more details."; MessageUtils.showMessage(message); } else { ds.setName(locator.getTrackName()); ds.setNormalized(true); ds.setLogValues(true); /* * File outputFile = new File(IGV.DEFAULT_USER_DIRECTORY, file.getName() + ".h5"); * OverlappingProcessor proc = new OverlappingProcessor(ds); * proc.setZoomMax(0); * proc.process(outputFile.getAbsolutePath()); * loadH5File(outputFile, messages, attributeList, group); */ // Counter for generating ID TrackProperties trackProperties = ds.getTrackProperties(); String path = locator.getPath(); for (String trackName : ds.getTrackNames()) { DatasetDataSource dataSource = new DatasetDataSource(trackName, ds, genome); String trackId = path + "_" + trackName; Track track = new DataSourceTrack(locator, trackId, trackName, dataSource); track.setRendererClass(HeatmapRenderer.class); track.setProperties(trackProperties); newTracks.add(track); } } }
private void loadGobyCountsArchive( ResourceLocator locator, List<Track> newTracks, Genome genome) { if (log.isDebugEnabled()) { log.debug("Loading Goby counts archive: " + locator.toString()); } String trackId = locator.getSampleId() + " coverage"; String trackName = locator.getFileName(); final DataSource dataSource = new GobyCountArchiveDataSource(locator); DataSourceTrack track = new DataSourceTrack(locator, trackId, trackName, dataSource); newTracks.add(track); }
public void loadTDFFile(ResourceLocator locator, List<Track> newTracks, Genome genome) { log.debug("Loading TDF file " + locator.getPath()); TDFReader reader = TDFReader.getReader(locator); TrackType type = reader.getTrackType(); TrackProperties props = null; String trackLine = reader.getTrackLine(); if (trackLine != null && trackLine.length() > 0) { props = new TrackProperties(); ParsingUtils.parseTrackLine(trackLine, props); } // In case of conflict between the resource locator display name and the track properties name, // use the resource locator String name = locator.getName(); if (name != null && props != null) { props.setName(name); } if (name == null) { name = props == null ? locator.getTrackName() : props.getName(); } int trackNumber = 0; String path = locator.getPath(); boolean multiTrack = reader.getTrackNames().length > 1; for (String heading : reader.getTrackNames()) { String trackId = multiTrack ? path + "_" + heading : path; String trackName = multiTrack ? heading : name; final DataSource dataSource = locator.getPath().endsWith(".counts") ? new GobyCountArchiveDataSource(locator) : new TDFDataSource(reader, trackNumber, heading, genome); DataSourceTrack track = new DataSourceTrack(locator, trackId, trackName, dataSource); String displayName = (name == null || multiTrack) ? heading : name; track.setName(displayName); track.setTrackType(type); if (props != null) { track.setProperties(props); } newTracks.add(track); trackNumber++; } }
/** * Load the input file as a feature, mutation, or maf (multiple alignment) file. * * @param locator * @param newTracks */ private void loadTribbleFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException, TribbleIndexNotFoundException { String typeString = locator.getTypeString(); // Mutation (mut, maf, vcf) files are handled special. Check here, rather than depend on order // in giant case statement. if (MutationTrackLoader.isMutationAnnotationFile(locator)) { loadMutFile(locator, newTracks, genome); // Must be tried before generic "loadIndexed" below } else if (VariantTrack.isVCF(typeString)) { loadVCF(locator, newTracks, genome); } else { TribbleFeatureSource tribbleFeatureSource = TribbleFeatureSource.getFeatureSource(locator, genome); FeatureSource src = GFFFeatureSource.isGFF(locator.getPath()) ? new GFFFeatureSource(tribbleFeatureSource) : tribbleFeatureSource; // Create feature source and track FeatureTrack t = new FeatureTrack(locator, src); t.setName(locator.getTrackName()); // t.setRendererClass(BasicTribbleRenderer.class); // Set track properties from header Object header = tribbleFeatureSource.getHeader(); if (header != null && header instanceof FeatureFileHeader) { FeatureFileHeader ffh = (FeatureFileHeader) header; if (ffh.getTrackType() != null) { t.setTrackType(ffh.getTrackType()); } if (ffh.getTrackProperties() != null) { t.setProperties(ffh.getTrackProperties()); } if (ffh.getTrackType() == TrackType.REPMASK) { t.setHeight(15); } } if (locator.getPath().contains(".narrowPeak") || locator.getPath().contains(".broadPeak") || locator.getPath().contains(".gappedPeak")) { t.setUseScore(true); } newTracks.add(t); } }
private void loadEwigIBFFile(ResourceLocator locator, List<Track> newTracks, Genome genome) { TDFReader reader = TDFReader.getReader(locator.getPath()); TrackProperties props = null; String trackLine = reader.getTrackLine(); if (trackLine != null && trackLine.length() > 0) { props = new TrackProperties(); ParsingUtils.parseTrackLine(trackLine, props); } EWigTrack track = new EWigTrack(locator, genome); if (props != null) { track.setProperties(props); } track.setName(locator.getTrackName()); newTracks.add(track); }
private void loadWigFile(ResourceLocator locator, List<Track> newTracks, Genome genome) { if (locator.isLocal()) { if (!checkSize(locator)) { return; } } WiggleDataset ds = (new WiggleParser(locator, genome)).parse(); TrackProperties props = ds.getTrackProperties(); // In case of conflict between the resource locator display name and the track properties name, // use the resource locator String name = props == null ? null : props.getName(); String label = locator.getName(); if (name == null) { name = locator.getFileName(); } else if (label != null) { props.setName(label); // erase name rom track properties } String path = locator.getPath(); boolean multiTrack = ds.getTrackNames().length > 1; for (String heading : ds.getTrackNames()) { String trackId = multiTrack ? path + "_" + heading : path; String trackName = multiTrack ? heading : name; DatasetDataSource dataSource = new DatasetDataSource(trackId, ds, genome); DataSourceTrack track = new DataSourceTrack(locator, trackId, trackName, dataSource); String displayName = (label == null || multiTrack) ? heading : label; track.setName(displayName); track.setProperties(props); track.setTrackType(ds.getType()); if (ds.getType() == TrackType.EXPR) { track.setWindowFunction(WindowFunction.none); } newTracks.add(track); } }
private void loadRnaiGctFile(ResourceLocator locator, List<Track> newTracks, Genome genome) { RNAIGCTDatasetParser parser = new RNAIGCTDatasetParser(locator, genome); Collection<RNAIDataSource> dataSources = parser.parse(); if (dataSources != null) { String path = locator.getPath(); for (RNAIDataSource ds : dataSources) { String trackId = path + "_" + ds.getName(); DataSourceTrack track = new DataSourceTrack(locator, trackId, ds.getName(), ds); // Set attributes. track.setAttributeValue("SCREEN", ds.getScreen()); track.setHeight(80); newTracks.add(track); } } }
private void loadDASResource(ResourceLocator locator, List<Track> currentTracks) throws DataLoadException { // TODO Connect and get all the attributes of the DAS server, and run the appropriate load // statements // TODO Currently we are only going to be doing features // TODO -- move the source creation to a factory DASFeatureSource featureSource = null; try { featureSource = new DASFeatureSource(locator); } catch (MalformedURLException e) { log.error("Malformed URL", e); throw new DataLoadException("Error: Malformed URL "); } FeatureTrack track = new FeatureTrack(locator, featureSource); // Try to create a sensible name from the path String name = locator.getName(); if (name == null || name.length() == 0) { if (locator.getPath().contains("genome.ucsc.edu")) { name = featureSource.getType(); } else { name = featureSource.getPath().replace("/das/", "").replace("/features", ""); } } track.setName(name); // A hack until we can notate this some other way if (locator.getPath().contains("cosmic")) { track.setRendererClass(CosmicFeatureRenderer.class); track.setMinimumHeight(2); track.setHeight(20); track.setDisplayMode(Track.DisplayMode.EXPANDED); } else { track.setRendererClass(IGVFeatureRenderer.class); track.setMinimumHeight(35); track.setHeight(45); } currentTracks.add(track); }
private void loadAlignmentsTrack(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { try { String dsName = locator.getTrackName(); // If the user tried to load the index, look for the file (this is a common mistake) if (locator.getTypeString().endsWith(".sai") || locator.getTypeString().endsWith(".bai")) { MessageUtils.showMessage( "<html><b>ERROR:</b> Loading SAM/BAM index files are not supported: " + locator.getPath() + "<br>Load the SAM or BAM file directly. "); return; } AlignmentDataManager dataManager = new AlignmentDataManager(locator, genome); // Check that alignments we loaded actually match some data. Many BAM files will contain some // sequences // not represented in the genome, buf if there are no matches warn the user. List<String> seqNames = dataManager.getSequenceNames(); if (seqNames != null && seqNames.size() > 0) { if (!checkSequenceNames(locator.getPath(), genome, seqNames)) { return; } } if (locator.getTypeString().endsWith("bam") || locator.getTypeString().endsWith("cram")) { if (!dataManager.hasIndex()) { MessageUtils.showMessage( "<html>Could not load index file for: " + locator.getPath() + "<br> An index file is required for SAM & BAM files."); return; } } AlignmentTrack alignmentTrack = new AlignmentTrack(locator, dataManager, genome); // parser.loadTrack(locator, dsName); alignmentTrack.setName(dsName); alignmentTrack.setVisible( PreferenceManager.getInstance().getAsBoolean(PreferenceManager.SAM_SHOW_ALIGNMENT_TRACK)); // Create coverage track CoverageTrack covTrack = new CoverageTrack(locator, dsName + " Coverage", alignmentTrack, genome); covTrack.setVisible( PreferenceManager.getInstance().getAsBoolean(PreferenceManager.SAM_SHOW_COV_TRACK)); newTracks.add(covTrack); covTrack.setDataManager(dataManager); dataManager.setCoverageTrack(covTrack); alignmentTrack.setCoverageTrack(covTrack); // Search for precalculated coverage data // Skip for GA4GH & SU2C resources if (!(Ga4ghAPIHelper.RESOURCE_TYPE.equals(locator.getType()) || locator.getPath().contains("dataformat=.bam") || OAuthUtils.isGoogleCloud(locator.getPath()))) { String covPath = locator.getCoverage(); if (covPath == null) { boolean bypassFileAutoDiscovery = PreferenceManager.getInstance() .getAsBoolean(PreferenceManager.BYPASS_FILE_AUTO_DISCOVERY); String path = locator.getPath(); if (!bypassFileAutoDiscovery && !path.contains("/query.cgi?")) { covPath = path + ".tdf"; } } if (covPath != null) { if (FileUtils.resourceExists(covPath)) { log.debug("Loading TDF for coverage: " + covPath); try { TDFReader reader = TDFReader.getReader(covPath); TDFDataSource ds = new TDFDataSource(reader, 0, dsName + " coverage", genome); covTrack.setDataSource(ds); } catch (Exception e) { log.error("Error loading coverage TDF file", e); } } } } boolean showSpliceJunctionTrack = PreferenceManager.getInstance().getAsBoolean(PreferenceManager.SAM_SHOW_JUNCTION_TRACK); SpliceJunctionTrack spliceJunctionTrack = new SpliceJunctionTrack( locator, dsName + " Junctions", dataManager, alignmentTrack, SpliceJunctionTrack.StrandOption.BOTH); spliceJunctionTrack.setHeight(60); spliceJunctionTrack.setVisible(showSpliceJunctionTrack); newTracks.add(spliceJunctionTrack); alignmentTrack.setSpliceJunctionTrack(spliceJunctionTrack); newTracks.add(alignmentTrack); log.debug("Alignment track loaded"); } catch (IndexNotFoundException e) { MessageUtils.showMessage( "<html>Could not find the index file for <br><br> " + e.getSamFile() + "<br><br>Note: The index file can be created using igvtools and must be in the same directory as the .sam file."); } }
private void loadPeakTrack(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { PeakTrack t = new PeakTrack(locator, genome); newTracks.add(t); }
private void loadMultipleAlignmentTrack( ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { MultipleAlignmentTrack t = new MultipleAlignmentTrack(locator, genome); t.setName("Multiple Alignments"); newTracks.add(t); }
private void loadGisticFile(ResourceLocator locator, List<Track> newTracks) { GisticTrack track = GisticFileParser.loadData(locator); track.setName(locator.getTrackName()); newTracks.add(track); }
private void loadBasePairFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException { BasePairFileParser parser = new BasePairFileParser(); newTracks.add(parser.loadTrack(locator, genome)); // should create one track from the given file }
private void loadDRangerFile(ResourceLocator locator, List<Track> newTracks, Genome genome) { DRangerParser parser = new DRangerParser(); newTracks.addAll(parser.loadTracks(locator, genome)); }