// Pressing the mouse stops and starts the sound public void mousePressed() { if (tone.isPlaying()) { tone.stop(); // The sound can be stopped with the function stop(). } else { tone.repeat(); } }
private void addSample( Map<String, Map<String, Sample>> map, String key, String sample, long len, double depth, String gene) { if (map.containsKey(key)) { Map<String, Sample> sampleMap = map.get(key); if (sampleMap.containsKey(sample)) { Sample sampleObj = sampleMap.get(sample); sampleObj.addLen(len); sampleObj.addCov(depth); } else { Sample sampleObj = new Sample(key, sample, gene, len, depth); sampleMap.put(sample, sampleObj); } } else { Sample sampleObj = new Sample(key, sample, gene, len, depth); Map<String, Sample> sampleMap = new LinkedHashMap<>(); sampleMap.put(sample, sampleObj); map.put(key, sampleMap); } }
public void updateModelWeight(List<Sample> sampleList) { for (FeatureValue featureValue : featureMap.values()) { featureValue.setTempWeight(0); } for (Sample sample : sampleList) { Map<String, Double> probMap = predict(sample.getFeatures()); for (String feature : sample.getFeatures()) { for (String label : labelSet) { this.setTempWeight( label, feature, this.getTempWeight(label, feature) + probMap.get(label) * (1.0 / sampleList.size())); } } } // GIS; features.setLastWeight(); for (FeatureValue featureValue : featureMap.values()) { featureValue.setModelWeight( featureValue.getModelWeight() + 1.0 / maxFeaturePerSample * Math.log(featureValue.getEmpiricalWeight() / featureValue.getTempWeight())); } }
public void draw() { if (tone.isPlaying()) { background(255); } else { background(100); } // Set the volume to a range between 0 and 1.0 float ratio = (float) mouseX / width; tone.setVolume(ratio); // Set the rate to a range between 0 and 88,200 // Changing the rate alters the pitch ratio = (float) mouseY / height; // The rate is set according to the mouseY position. tone.setRate(ratio * 88200); // Draw some rectangles to show what is going on stroke(0); fill(175); rect(0, 160, mouseX, 20); stroke(0); fill(175); rect(160, 0, 20, mouseY); }
private void printResult(List<String> bad) { for (Map.Entry<String, Map<String, Sample>> entry : samples.entrySet()) { if (!bad.contains(entry.getKey())) { for (Map.Entry<String, Sample> entrySample : entry.getValue().entrySet()) { Sample sample = entrySample.getValue(); String title = amplicon ? entry.getKey() : sample.getTitle(genes.get(sample.getGene())); String result = sample.getResultString(title); if (useControlSamples) { addControlSamples(result, sample); } System.out.println(result); } } } }
private double getMedDepth(double[] depth, Set<String> samp) { int i = 0; for (Map.Entry<String, Map<String, Sample>> entry : samples.entrySet()) { Map<String, Sample> sampleMap = entry.getValue(); for (Map.Entry<String, Sample> sampleEntry : sampleMap.entrySet()) { Sample sample = sampleEntry.getValue(); double norm1 = Precision.round((sample.getCov() * factor.get(sample.getSample())), 2); sample.setNorm1(norm1); depth[i++] = norm1; samp.add(sample.getSample()); } } return median.evaluate(depth); }
private void setFactor2(double medDepth, List<String> bad, Map<String, Double> factor2) { for (Map.Entry<String, Map<String, Sample>> entry : samples.entrySet()) { if (!bad.contains(entry.getKey())) { List<Double> list = new LinkedList<>(); for (Map.Entry<String, Sample> entrySample : entry.getValue().entrySet()) { Sample sample = entrySample.getValue(); list.add(sample.getNorm1()); } double median = new Median().evaluate(toDoubleArray(list)); if (median != 0) { factor2.put(entry.getKey(), medDepth / median); } else { factor2.put(entry.getKey(), 0.0); } } } }
private void validate() { validatePedigreeIDUniqueness(); if (validationStrictness != PedigreeValidationType.SILENT) { // check that samples in data sources are all annotated, if anything is annotated if (!samplesFromPedigrees.isEmpty() && !samplesFromDataSources.isEmpty()) { final Set<String> sampleNamesFromPedigrees = samplesFromPedigrees.stream().map(Sample::getID).collect(Collectors.toSet()); for (final Sample dsSample : samplesFromDataSources) if (!sampleNamesFromPedigrees.contains(dsSample.getID())) { throw new UserException( "Sample " + dsSample.getID() + " found in data sources but not in pedigree files with STRICT pedigree validation"); } } } }
private void setNorm( double medDepth, List<String> bad, Map<String, Double> factor2, Map<String, Double> sampleMedian) { for (Map.Entry<String, Map<String, Sample>> entry : samples.entrySet()) { if (!bad.contains(entry.getKey())) { for (Map.Entry<String, Sample> entrySample : entry.getValue().entrySet()) { Sample sample = entrySample.getValue(); double norm1 = sample.getNorm1(); double fact2 = factor2.get(entry.getKey()); double smplMed = sampleMedian.get(sample.getSample()); sample.setNorm1b(Precision.round(norm1 * fact2 + 0.1, 2)); sample.setNorm2( Precision.round( medDepth != 0 ? log.value((norm1 * fact2 + 0.1) / medDepth) / log.value(2) : 0, 2)); sample.setNorm3( Precision.round( smplMed != 0 ? log.value((norm1 * fact2 + 0.1) / smplMed) / log.value(2) : 0, 2)); } } } }
public void setup() { size(200, 200); // Start Sonia engine. Sonia.start(this); // Create a new sample object. tone = new Sample("tone.wav"); // Loop the sound forever // (well, at least until stop() is called) tone.repeat(); smooth(); }
@Test public void testSamples() { try { // get row count of experiments in the dataset int expected = getDataSet().getTable("Sample").getRowCount(); // get number of experiments from the DAO int actual = getSampleDAO().count(); // test data contains 2 experiments, check size of returned list TestCase.assertEquals("Wrong number of Sample", expected, actual); System.out.println("Expected number of Sample: " + expected + ", actual: " + actual); for (Sample d : random(getSampleDAO(), actual, 5)) { TestCase.assertNotNull(d); TestCase.assertNotNull(d.getId()); } } catch (Exception e) { e.printStackTrace(); TestCase.fail(); } }
private void addControlSamples(String result, Sample sample) { List<Double> list = new LinkedList<>(); for (String s : controlSamples) { for (Map.Entry<String, Map<String, Sample>> entry : samples.entrySet()) { list.add(entry.getValue().get(s).getNorm1b()); } } if (!list.isEmpty()) { double meanVal = mean.evaluate(toDoubleArray(list)); StringBuilder builder = new StringBuilder(result); builder .append("\t") .append( String.format( "%.3f%n", meanVal == 0 ? sample.getNorm1b() / meanVal / log.value(2) : 0)); } }
private Sample feedSampleData(Sample sample) throws Exception { sample.setProjectId(projectId); sample.setProjectName(projectName); // set project level by adding 1 to selected parent project's level if (sample.getParentSampleName() == null || sample.getParentSampleName().equals("0")) sample.setParentSampleName(null); String parentSampleName = sample.getParentSampleName(); if (parentSampleName != null && !parentSampleName.isEmpty() && !parentSampleName.equals("0")) { Sample selectedParentSample = readPersister.getSample(projectId, parentSampleName); sample.setSampleLevel(selectedParentSample.getSampleLevel() + 1); } else { sample.setParentSampleId(null); sample.setSampleLevel(1); } return sample; }
private MultiLoadParameter createMultiLoadParameter( String projectName, Project project, Sample sample, List<FileReadAttributeBean> frab) throws Exception { MultiLoadParameter loadParameter = new MultiLoadParameter(); boolean isSampleRegistration = false; boolean isProjectRegistration = false; if (this.eventName.equals(Constants.EVENT_PROJECT_REGISTRATION) && project.getProjectName() != null && !project.getProjectName().isEmpty()) { isProjectRegistration = true; List<Project> projectList = new ArrayList<Project>(); projectList.add(feedProjectData(project)); loadParameter.addProjects(projectList); /* * loads all event meta attributes from the parent * by hkim 6/11/13 */ List<EventMetaAttribute> emas = this.readPersister.getEventMetaAttributes( projectName, null); // , Constants.EVENT_PROJECT_REGISTRATION); if (emas != null && emas.size() > 0) { List<EventMetaAttribute> newEmas = new ArrayList<EventMetaAttribute>(emas.size()); for (EventMetaAttribute ema : emas) { EventMetaAttribute newEma = new EventMetaAttribute(); newEma.setProjectName(project.getProjectName()); newEma.setEventName(ema.getEventName()); newEma.setEventTypeLookupId(ema.getEventTypeLookupId()); newEma.setAttributeName(ema.getAttributeName()); newEma.setNameLookupId(ema.getNameLookupId()); newEma.setActive(ema.isActive()); newEma.setRequired(ema.isRequired()); newEma.setDesc(ema.getDesc()); newEma.setDataType(ema.getDataType()); newEma.setLabel(ema.getLabel()); newEma.setOntology(ema.getOntology()); newEma.setOptions(ema.getOptions()); newEma.setSampleRequired(ema.isSampleRequired()); newEmas.add(newEma); } loadParameter.addEventMetaAttributes(newEmas); } else { throw new Exception( String.format( "Event Metadata has not been set up for the parent project and the '%s' event type.", Constants.EVENT_PROJECT_REGISTRATION)); } List<SampleMetaAttribute> smas = this.readPersister.getSampleMetaAttributes(projectId); if (smas != null && smas.size() > 0) { List<SampleMetaAttribute> newSmas = new ArrayList<SampleMetaAttribute>(smas.size()); for (SampleMetaAttribute sma : smas) { SampleMetaAttribute newSma = new SampleMetaAttribute(); newSma.setProjectName(project.getProjectName()); newSma.setAttributeName(sma.getAttributeName()); newSma.setNameLookupId(sma.getNameLookupId()); newSma.setDataType(sma.getDataType()); newSma.setDesc(sma.getDesc()); newSma.setLabel(sma.getLabel()); newSma.setOntology(sma.getOntology()); newSma.setOptions(sma.getOptions()); newSma.setRequired(sma.isRequired()); newSma.setActive(sma.isActive()); newSmas.add(newSma); } loadParameter.addSampleMetaAttributes(newSmas); } List<ProjectMetaAttribute> pmas = this.readPersister.getProjectMetaAttributes(projectName); if (pmas != null && pmas.size() > 0) { List<ProjectMetaAttribute> newPmas = new ArrayList<ProjectMetaAttribute>(pmas.size()); for (ProjectMetaAttribute pma : pmas) { ProjectMetaAttribute newPma = new ProjectMetaAttribute(); newPma.setProjectName(project.getProjectName()); newPma.setAttributeName(pma.getAttributeName()); newPma.setDataType(pma.getDataType()); newPma.setDesc(pma.getDesc()); newPma.setLabel(pma.getLabel()); newPma.setNameLookupId(pma.getNameLookupId()); newPma.setOntology(pma.getOntology()); newPma.setOptions(pma.getOptions()); newPma.setRequired(pma.isRequired()); newPma.setActive(pma.isActive()); newPmas.add(newPma); } loadParameter.addProjectMetaAttributes(newPmas); } } else if (this.eventName.equals(Constants.EVENT_SAMPLE_REGISTRATION) && sample.getSampleName() != null && !sample.getSampleName().isEmpty()) { isSampleRegistration = true; List<Sample> sampleList = new ArrayList<Sample>(); sampleList.add(feedSampleData(sample)); loadParameter.addSamples(sampleList); } List<FileReadAttributeBean> loadingList = null; if (frab != null && frab.size() > 0) { loadingList = processFileReadBeans( isProjectRegistration ? project.getProjectName() : projectName, isSampleRegistration ? sample.getSampleName() : this.sampleName, frab); } if (loadingList != null && loadingList.size() > 0) { if (isProjectRegistration) { loadParameter.addProjectRegistrations(Constants.EVENT_PROJECT_REGISTRATION, loadingList); } else if (isSampleRegistration) { loadParameter.addSampleRegistrations(Constants.EVENT_SAMPLE_REGISTRATION, loadingList); } else { loadParameter.addEvents(this.eventName, loadingList); } } return loadParameter; }
public String execute() { String rtnVal = SUCCESS; UserTransaction tx = null; try { sampleName = sampleName != null && sampleName.equals("0") ? null : sampleName; if (jobType != null) { boolean isProjectRegistration = eventName.equals(Constants.EVENT_PROJECT_REGISTRATION); boolean isSampleRegistration = eventName.equals(Constants.EVENT_SAMPLE_REGISTRATION); if (projectName == null || projectName.equals("0") || eventName == null || eventName.equals("0")) throw new Exception("Project or Event type is not selected."); if (jobType.equals("insert")) { // loads single event tx = (UserTransaction) new InitialContext().lookup("java:comp/UserTransaction"); tx.begin(); psewt.loadAll( null, this.createMultiLoadParameter(projectName, loadingProject, loadingSample, beanList)); this.reset(); } else if (jobType.equals("grid")) { // loads multiple events from grid view tx = (UserTransaction) new InitialContext().lookup("java:comp/UserTransaction"); tx.begin(); for (GridBean gBean : gridList) { if (gBean != null) { if (isProjectRegistration && gBean.getProjectName() != null && gBean.getProjectPublic() != null) { loadingProject = new Project(); loadingProject.setProjectName(gBean.getProjectName()); loadingProject.setIsPublic(Integer.valueOf(gBean.getProjectPublic())); } else if (isSampleRegistration && gBean.getSampleName() != null && gBean.getSamplePublic() != null) { loadingSample = new Sample(); loadingSample.setSampleName(gBean.getSampleName()); loadingSample.setParentSampleName(gBean.getParentSampleName()); loadingSample.setIsPublic(Integer.valueOf(gBean.getSamplePublic())); } else { if (gBean.getSampleName() != null) { this.sampleName = gBean.getSampleName(); } } List<FileReadAttributeBean> fBeanList = gBean.getBeanList(); if (fBeanList != null && fBeanList.size() > 0) { psewt.loadAll( null, this.createMultiLoadParameter( projectName, loadingProject, loadingSample, fBeanList)); } } } this.reset(); } else if (jobType.equals("file")) { // loads data from a CSV file to grid view if (!this.uploadFile.canRead()) { throw new Exception("Error in reading the file."); } else { try { CSVReader reader = new CSVReader(new FileReader(this.uploadFile)); int lineCount = 0; List<String> columns = new ArrayList<String>(); String currProjectName = null; gridList = new ArrayList<GridBean>(); boolean hasSampleName = false; String[] line; while ((line = reader.readNext()) != null) { if (lineCount != 1) { if (lineCount == 0) { Collections.addAll(columns, line); hasSampleName = columns.indexOf("SampleName") >= 0; } else { int colIndex = 0; currProjectName = line[colIndex++]; if (!isProjectRegistration && !currProjectName.equals(this.projectName)) { throw new Exception(MULTIPLE_SUBJECT_IN_FILE_MESSAGE); } GridBean gBean = new GridBean(); gBean.setProjectName(currProjectName); if (hasSampleName) { gBean.setSampleName(line[(colIndex++)]); } if (isProjectRegistration) { gBean.setProjectName(currProjectName); gBean.setProjectPublic(line[(colIndex++)]); } else if (isSampleRegistration) { gBean.setParentSampleName(line[(colIndex++)]); gBean.setSamplePublic(line[(colIndex++)]); } gBean.setBeanList(new ArrayList<FileReadAttributeBean>()); for (; colIndex < columns.size(); colIndex++) { FileReadAttributeBean fBean = new FileReadAttributeBean(); fBean.setProjectName( isProjectRegistration ? currProjectName : this.projectName); fBean.setAttributeName(columns.get(colIndex)); fBean.setAttributeValue(line[colIndex]); gBean.getBeanList().add(fBean); } this.gridList.add(gBean); } } lineCount++; } jobType = "grid"; } catch (Exception ex) { throw ex; } } } else if (jobType.equals("template")) { // download template List<EventMetaAttribute> emaList = readPersister.getEventMetaAttributes(projectName, eventName); /* * removing the sanity check on sample requirement since multiple sample support is in action * by hkim 5/2/13 ModelValidator validator = new ModelValidator(); validator.validateEventTemplateSanity(emaList, projectName, sampleName, eventName); */ TemplatePreProcessingUtils cvsUtils = new TemplatePreProcessingUtils(); String templateType = jobType.substring(jobType.indexOf("_") + 1); downloadStream = cvsUtils.buildFileContent(templateType, emaList, projectName, sampleName, eventName); downloadContentType = templateType.equals("c") ? "csv" : "vnd.ms-excel"; rtnVal = Constants.FILE_DOWNLOAD_MSG; } } } catch (Exception ex) { logger.error("Exception in EventLoader : " + ex.toString()); ex.printStackTrace(); if (ex.getClass() == ForbiddenResourceException.class) { addActionError(Constants.DENIED_USER_EDIT_MESSAGE); return Constants.FORBIDDEN_ACTION_RESPONSE; } else if (ex.getClass() == ForbiddenResourceException.class) { addActionError(Constants.DENIED_USER_EDIT_MESSAGE); return LOGIN; } else if (ex.getClass() == ParseException.class) addActionError(Constants.INVALID_DATE_MESSAGE); else { addActionError(ex.toString()); } // deletes uploaded files in event of error if (loadedFiles != null && loadedFiles.size() > 0) { for (String filePath : loadedFiles) { File tempFile = new File(fileStoragePath + filePath); if (tempFile.exists()) tempFile.delete(); } } try { if (tx != null) tx.rollback(); } catch (SystemException se) { addActionError(se.toString()); } rtnVal = ERROR; } finally { try { // get project list for the drop down box List<String> projectNameList = new ArrayList<String>(); if (projectNames == null || projectNames.equals("")) { projectNameList.add("ALL"); } else if (projectNames.contains(",")) { projectNameList.addAll(Arrays.asList(projectNames.split(","))); } else { projectNameList.add(projectNames); } projectList = readPersister.getProjects(projectNameList); if (tx != null && tx.getStatus() != Status.STATUS_NO_TRANSACTION) { tx.commit(); } if (jobType != null && jobType.equals("grid") && this.uploadFile != null) { this.uploadFile.delete(); } } catch (Exception ex) { ex.printStackTrace(); } } return rtnVal; }
public void run() { String line; String id = null; config.refgenes = new ArrayList<RefGene>(); config.dbsnp = new ArrayList<dbSNP>(); try { // System.err.println("quality read start"); // debug boolean need_ruler_rebuild = false; boolean need_title = false; SAMRegion sr = null; while (true) { line = br.readLine(); if (line == null) break; // EOF String[] stuff = line.split("\t"); if (stuff[0].equals("sample2id")) { Sample sample = Sample.get_sample(stuff[1]); for (int i = 2; i < stuff.length; i++) { config.read2sample.put(new String(stuff[i]), sample); } } else if (stuff[0].equals("consensus_label")) { config.CONSENSUS_TAG = new String(stuff[1]); } else if (stuff[0].equals("ruler_start")) { config.ruler_start = Integer.parseInt(stuff[1]); need_ruler_rebuild = true; } else if (stuff[0].equals("title")) { config.title = stuff[1]; need_title = true; } else if (stuff[0].equals("refGene")) { config.refgenes.add(new RefGene(stuff)); } else if (stuff[0].equals("dbsnp")) { config.dbsnp.add(new dbSNP(stuff)); } else if (stuff[0].toUpperCase().indexOf("SAM_") == 0) { // FUGLY SAMResourceTags v = SAMResourceTags.valueOf(stuff[0].toUpperCase()); if (v.equals(SAMResourceTags.SAM_URL)) config.sams.add(new SAMResource()); config.sams.get(config.sams.size() - 1).import_data(v, stuff[1]); } else if (stuff[0].equals("reference_sequence")) { StringBuffer target_sequence = new StringBuffer(); while (true) { line = br.readLine(); if (line.equals(">")) { // config.target_sequence = target_sequence.toString().toCharArray(); config.target_sequence = target_sequence.toString().getBytes(); break; } else { target_sequence.append(line); } } } else if (stuff[0].equals("target_region")) { sr = new SAMRegion(); sr.tname = new String(stuff[1]); sr.range = new Range(Integer.parseInt(stuff[2]), Integer.parseInt(stuff[3])); // System.err.println("range " + sr.tname + " " + sr.range.start + " " + sr.range.end); // // debug } else { System.err.println("error, don't recognize tag " + stuff[0]); // debug } } if (config.ruler_start > 0) { for (dbSNP snp : config.dbsnp) { snp.consensus_adjust(config.ruler_start); } for (RefGene rg : config.refgenes) { rg.consensus_adjust(config.ruler_start); } } if (config.dbsnp != null) { for (dbSNP snp : config.dbsnp) { // System.err.println("adding snp at " + snp.start + " = " + (snp.start + // config.ruler_start)); // debug } config.snp_config.snp_query = new dbSNPSet(config.dbsnp); } for (SAMResource sre : config.sams) { sre.set_region(sr); } if (av != null) { while (av.get_acepanel().is_built() == false) { // spin until dependency built try { System.err.println("MarkupReader spin..."); // debug Thread.sleep(50); } catch (Exception e) { } } PadMap pm = av.get_acepanel().get_assembly().get_padmap(); for (RefGene rg : config.refgenes) { rg.consensus_setup(pm); } Runnable later; if (need_title) { // System.err.println("title="+config.title); // debug later = new Runnable() { public void run() { av.setTitle(config.title); av.repaint(); } }; javax.swing.SwingUtilities.invokeLater(later); } if (need_ruler_rebuild) { // // ruler labeling has changed. // later = new Runnable() { public void run() { // av.get_acepanel().get_canvas().build_ruler(); if (config.start_unpadded_offset != 0) { AcePanel ap = av.get_acepanel(); PadMap pm = ap.get_assembly().get_padmap(); int upo = (config.start_unpadded_offset - config.ruler_start) + 1; // +1: convert to 1-based offset int po = pm.get_unpadded_to_padded(upo); System.err.println("upo=" + upo + " po=" + po); // debug SNPList sl = new SNPList(); sl.addElement(new SNP(po, 0.0)); ap.ace.set_snps(sl); ap.get_canvas().center_on(po); ap.get_canvas().repaint(); } } }; javax.swing.SwingUtilities.invokeLater(later); } if (config.enable_exon_navigation) { if (false) { System.err.println("DEBUG: exon nav disabled"); } else { av.get_acepanel().init_exon_navigation(); } } av.get_acepanel().get_assembly().build_summary_info(); // group samples by tumor/normal, if applicable // av.repaint(); later = new Runnable() { public void run() { av.repaint(); } }; javax.swing.SwingUtilities.invokeLater(later); } // System.err.println("quality read end"); // debug } catch (Exception e) { e.printStackTrace(); // debug } }
public final List<Sample> parse( Reader reader, EnumSet<MissingPedField> missingFields, SampleDB sampleDB) { final List<String> lines = new XReadLines(reader).readLines(); // What are the record offsets? final int familyPos = missingFields.contains(MissingPedField.NO_FAMILY_ID) ? -1 : 0; final int samplePos = familyPos + 1; final int paternalPos = missingFields.contains(MissingPedField.NO_PARENTS) ? -1 : samplePos + 1; final int maternalPos = missingFields.contains(MissingPedField.NO_PARENTS) ? -1 : paternalPos + 1; final int sexPos = missingFields.contains(MissingPedField.NO_SEX) ? -1 : Math.max(maternalPos, samplePos) + 1; final int phenotypePos = missingFields.contains(MissingPedField.NO_PHENOTYPE) ? -1 : Math.max(sexPos, Math.max(maternalPos, samplePos)) + 1; final int nExpectedFields = MathUtils.arrayMaxInt( Arrays.asList(samplePos, paternalPos, maternalPos, sexPos, phenotypePos)) + 1; // go through once and determine properties int lineNo = 1; boolean isQT = false; final List<String[]> splits = new ArrayList<String[]>(lines.size()); for (final String line : lines) { if (line.startsWith(commentMarker)) continue; if (line.trim().equals("")) continue; final String[] parts = line.split("\\s+"); if (parts.length != nExpectedFields) throw new UserException.MalformedFile( reader.toString(), "Bad PED line " + lineNo + ": wrong number of fields"); if (phenotypePos != -1) { isQT = isQT || !CATAGORICAL_TRAIT_VALUES.contains(parts[phenotypePos]); } splits.add(parts); lineNo++; } logger.info("Phenotype is other? " + isQT); // now go through and parse each record lineNo = 1; final List<Sample> samples = new ArrayList<Sample>(splits.size()); for (final String[] parts : splits) { String familyID = null, individualID, paternalID = null, maternalID = null; Gender sex = Gender.UNKNOWN; String quantitativePhenotype = Sample.UNSET_QT; Affection affection = Affection.UNKNOWN; if (familyPos != -1) familyID = maybeMissing(parts[familyPos]); individualID = parts[samplePos]; if (paternalPos != -1) paternalID = maybeMissing(parts[paternalPos]); if (maternalPos != -1) maternalID = maybeMissing(parts[maternalPos]); if (sexPos != -1) { if (parts[sexPos].equals(SEX_MALE)) sex = Gender.MALE; else if (parts[sexPos].equals(SEX_FEMALE)) sex = Gender.FEMALE; else sex = Gender.UNKNOWN; } if (phenotypePos != -1) { if (isQT) { if (parts[phenotypePos].equals(MISSING_VALUE1)) affection = Affection.UNKNOWN; else { affection = Affection.OTHER; quantitativePhenotype = parts[phenotypePos]; } } else { if (parts[phenotypePos].equals(MISSING_VALUE1)) affection = Affection.UNKNOWN; else if (parts[phenotypePos].equals(MISSING_VALUE2)) affection = Affection.UNKNOWN; else if (parts[phenotypePos].equals(PHENOTYPE_UNAFFECTED)) affection = Affection.UNAFFECTED; else if (parts[phenotypePos].equals(PHENOTYPE_AFFECTED)) affection = Affection.AFFECTED; else throw new ReviewedGATKException( "Unexpected phenotype type " + parts[phenotypePos] + " at line " + lineNo); } } final Sample s = new Sample( individualID, sampleDB, familyID, paternalID, maternalID, sex, affection, quantitativePhenotype); samples.add(s); sampleDB.addSample(s); lineNo++; } for (final Sample sample : new ArrayList<Sample>(samples)) { Sample dad = maybeAddImplicitSample( sampleDB, sample.getPaternalID(), sample.getFamilyID(), Gender.MALE); if (dad != null) samples.add(dad); Sample mom = maybeAddImplicitSample( sampleDB, sample.getMaternalID(), sample.getFamilyID(), Gender.FEMALE); if (mom != null) samples.add(mom); } return samples; }
public static void main(String[] args) { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); // Mat mat = Mat.eye( 3, 3, CvType.CV_8UC1 ); // System.out.println( "mat = " + mat.dump() ); Sample n = new Sample(); // n.templateMatching(); // put text in image // Mat data= Highgui.imread("images/erosion.jpg"); // Core.putText(data, "Sample", new Point(50,80), Core.FONT_HERSHEY_SIMPLEX, 1, new // Scalar(0,0,0),2); // // Highgui.imwrite("images/erosion2.jpg", data); // getting dct of an image String path = "images/croppedfeature/go (20).jpg"; path = "images/wordseg/img1.png"; Mat image = Highgui.imread(path, Highgui.IMREAD_GRAYSCALE); ArrayList<MatOfPoint> contours = new ArrayList<MatOfPoint>(); Imgproc.threshold(image, image, 0, 255, Imgproc.THRESH_OTSU); Imgproc.threshold(image, image, 220, 128, Imgproc.THRESH_BINARY_INV); Mat newImg = new Mat(45, 100, image.type()); newImg.setTo(new Scalar(0)); n.copyMat(image, newImg); int vgap = 25; int hgap = 45 / 3; Moments m = Imgproc.moments(image, false); Mat hu = new Mat(); Imgproc.HuMoments(m, hu); System.out.println(hu.dump()); // //divide the mat into 12 parts then get the features of each part // int count=1; // for(int j=0; j<45; j+=hgap){ // for(int i=0;i<100;i+=vgap){ // Mat result = newImg.submat(j, j+hgap, i, i+vgap); // // // Moments m= Imgproc.moments(result, false); // double m01= m.get_m01(); // double m00= m.get_m00(); // double m10 = m.get_m10(); // int x= m00!=0? (int)(m10/m00):0; // int y= m00!=0? (int)(m01/m00):0; // Mat hu= new Mat(); // Imgproc.HuMoments(m, hu); // System.out.println(hu.dump()); // System.out.println(count+" :"+x+" and "+y); // Imgproc.threshold(result, result, 0,254, Imgproc.THRESH_BINARY_INV); // Highgui.imwrite("images/submat/"+count+".jpg", result); // count++; // // } // } // // for(int i=vgap;i<100;i+=vgap){ // Point pt1= new Point(i, 0); // Point pt2= new Point(i, 99); // Core.line(newImg, pt1, pt2, new Scalar(0,0,0)); // } // for(int i=hgap;i<45;i+=hgap){ // Point pt1= new Point(0, i); // Point pt2= new Point(99, i); // Core.line(newImg, pt1, pt2, new Scalar(0,0,0)); // } // Highgui.imwrite("images/submat/copyto.jpg", newImg); }