private void loadFromDBProfile(ResourceLocator profileLocator, List<Track> newTracks) throws IOException { DBProfile dbProfile = DBProfile.parseProfile(profileLocator.getPath()); for (DBProfile.DBTable table : dbProfile.getTableList()) { SQLCodecSource source = SQLCodecSource.getFromTable(table); if (source != null) { CachingFeatureSource cachingReader = new CachingFeatureSource(source); FeatureTrack track = new FeatureTrack(profileLocator, cachingReader); track.setName(source.getTableName()); newTracks.add(track); } else if (table.getFormat().equals("seg")) { Genome genome = GenomeManager.getInstance().getCurrentGenome(); SegmentedAsciiDataSet ds = (new SegmentedReader(table.getDbLocator(), genome)).loadFromDB(table); loadSegTrack(table.getDbLocator(), newTracks, genome, ds); } else if (table.getFormat().equals("sample.info")) { // TODO sampleIdColumnLabel was previously hardcoded as "SAMPLE_ID_ARRAY" // TODO Basically I'm shoehorning this information into a field usually used for something // else. Only slightly better String sampleIdColumnLabel = table.getBinColName(); if (sampleIdColumnLabel == null) { throw new IllegalArgumentException( "Profile must have binColName specifying the sample id column label"); } (new SampleInfoSQLReader(table, sampleIdColumnLabel)).load(); } } }
private void loadListFile(ResourceLocator locator, List<Track> newTracks, Genome genome) { try { FeatureSource source = new FeatureDirSource(locator, genome); FeatureTrack track = new FeatureTrack(locator, source); track.setName(locator.getTrackName()); track.setVisibilityWindow(0); newTracks.add(track); } catch (IOException ex) { throw new RuntimeException(ex); } }
private void loadSyntentyMapping(ResourceLocator locator, List<Track> newTracks) { List<BlastMapping> mappings = (new BlastParser()).parse(locator.getPath()); List<htsjdk.tribble.Feature> features = new ArrayList<htsjdk.tribble.Feature>(mappings.size()); features.addAll(mappings); Genome genome = GenomeManager.getInstance().getCurrentGenome(); FeatureTrack track = new FeatureTrack(locator, new FeatureCollectionSource(features, genome)); track.setName(locator.getTrackName()); // track.setRendererClass(AlignmentBlockRenderer.class); newTracks.add(track); }
/** * Load the input file as a feature, mutation, or maf (multiple alignment) file. * * @param locator * @param newTracks */ private void loadTribbleFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException, TribbleIndexNotFoundException { String typeString = locator.getTypeString(); // Mutation (mut, maf, vcf) files are handled special. Check here, rather than depend on order // in giant case statement. if (MutationTrackLoader.isMutationAnnotationFile(locator)) { loadMutFile(locator, newTracks, genome); // Must be tried before generic "loadIndexed" below } else if (VariantTrack.isVCF(typeString)) { loadVCF(locator, newTracks, genome); } else { TribbleFeatureSource tribbleFeatureSource = TribbleFeatureSource.getFeatureSource(locator, genome); FeatureSource src = GFFFeatureSource.isGFF(locator.getPath()) ? new GFFFeatureSource(tribbleFeatureSource) : tribbleFeatureSource; // Create feature source and track FeatureTrack t = new FeatureTrack(locator, src); t.setName(locator.getTrackName()); // t.setRendererClass(BasicTribbleRenderer.class); // Set track properties from header Object header = tribbleFeatureSource.getHeader(); if (header != null && header instanceof FeatureFileHeader) { FeatureFileHeader ffh = (FeatureFileHeader) header; if (ffh.getTrackType() != null) { t.setTrackType(ffh.getTrackType()); } if (ffh.getTrackProperties() != null) { t.setProperties(ffh.getTrackProperties()); } if (ffh.getTrackType() == TrackType.REPMASK) { t.setHeight(15); } } if (locator.getPath().contains(".narrowPeak") || locator.getPath().contains(".broadPeak") || locator.getPath().contains(".gappedPeak")) { t.setUseScore(true); } newTracks.add(t); } }
private void loadDASResource(ResourceLocator locator, List<Track> currentTracks) throws DataLoadException { // TODO Connect and get all the attributes of the DAS server, and run the appropriate load // statements // TODO Currently we are only going to be doing features // TODO -- move the source creation to a factory DASFeatureSource featureSource = null; try { featureSource = new DASFeatureSource(locator); } catch (MalformedURLException e) { log.error("Malformed URL", e); throw new DataLoadException("Error: Malformed URL "); } FeatureTrack track = new FeatureTrack(locator, featureSource); // Try to create a sensible name from the path String name = locator.getName(); if (name == null || name.length() == 0) { if (locator.getPath().contains("genome.ucsc.edu")) { name = featureSource.getType(); } else { name = featureSource.getPath().replace("/das/", "").replace("/features", ""); } } track.setName(name); // A hack until we can notate this some other way if (locator.getPath().contains("cosmic")) { track.setRendererClass(CosmicFeatureRenderer.class); track.setMinimumHeight(2); track.setHeight(20); track.setDisplayMode(Track.DisplayMode.EXPANDED); } else { track.setRendererClass(IGVFeatureRenderer.class); track.setMinimumHeight(35); track.setHeight(45); } currentTracks.add(track); }