/** * This example shows how to obtain a color. * * @param g * @param monitor * @throws RenderException */ public void render(Graphics2D g, IProgressMonitor monitor) throws RenderException { if (monitor == null) monitor = new NullProgressMonitor(); CsvReader reader = null; try { ILayer layer = getContext().getLayer(); IGeoResource resource = layer.findGeoResource(CSV.class); if (resource == null) return; ReferencedEnvelope bounds = getRenderBounds(); monitor.subTask("connecting"); CSV csv = resource.resolve(CSV.class, null); // LOOK UP STYLE IStyleBlackboard style = layer.getStyleBlackboard(); Color color = (Color) style.get(ColorStyle.ID); // DATA TO WORLD CoordinateReferenceSystem dataCRS = layer.getCRS(); CoordinateReferenceSystem worldCRS = context.getCRS(); MathTransform dataToWorld = CRS.findMathTransform(dataCRS, worldCRS, false); // DRAW FILE monitor.beginTask("csv render", csv.getSize()); reader = csv.reader(); reader.readHeaders(); int nameIndex = reader.getIndex("name"); Coordinate worldLocation = new Coordinate(); while (reader.readRecord()) { Point point = CSV.getPoint(reader); Coordinate dataLocation = point.getCoordinate(); try { JTS.transform(dataLocation, worldLocation, dataToWorld); } catch (TransformException e) { continue; } if (bounds != null && !bounds.contains(worldLocation)) { continue; // optimize! } java.awt.Point p = getContext().worldToPixel(worldLocation); g.setColor(color); g.fillRect(p.x - 2, p.y - 2, 6, 6); g.setColor(Color.BLACK); String name = reader.get(nameIndex); g.drawString(name, p.x + 15, p.y + 15); monitor.worked(1); if (monitor.isCanceled()) break; } } catch (IOException e) { throw new RenderException(e); // rethrow any exceptions encountered } catch (FactoryException e) { throw new RenderException(e); // rethrow any exceptions encountered } finally { if (reader != null) reader.close(); monitor.done(); } }
public StringBuffer uploadAndReportCustomDataFile( InputStream inputStream, long size, String fileFormat, char delimChar, List<String> listOfUIDsToUpdate, CustomFieldGroup customFieldGroup, PhenoCollection phenoCollection, boolean overwriteExisting) throws FileFormatException, ArkSystemException { List<PhenoCollection> phenoCollectionsWithTheirDataToInsert = new ArrayList<PhenoCollection>(); delimiterCharacter = delimChar; uploadReport = new StringBuffer(); InputStream convertedInputStream; if (fileFormat.equalsIgnoreCase(Constants.FileFormat.XLS.toString())) { XLStoCSV xlsToCSV = new XLStoCSV(delimiterCharacter); convertedInputStream = xlsToCSV.convertXlsInputStreamToCsv(inputStream); } else { convertedInputStream = inputStream; } InputStreamReader inputStreamReader = null; CsvReader csvReader = null; DecimalFormat decimalFormat = new DecimalFormat("0.00"); int subjectCount = 0; long updateFieldsCount = 0L; long insertFieldsCount = 0L; long emptyDataCount = 0L; try { inputStreamReader = new InputStreamReader(convertedInputStream); csvReader = new CsvReader(inputStreamReader, delimiterCharacter); String[] stringLineArray; List<LinkSubjectStudy> allSubjectWhichWillBeUpdated = null; if (listOfUIDsToUpdate.size() > 0) { allSubjectWhichWillBeUpdated = iArkCommonService.getUniqueSubjectsWithTheseUIDs(study, listOfUIDsToUpdate); } else { allSubjectWhichWillBeUpdated = new ArrayList<LinkSubjectStudy>(); } if (size <= 0) { uploadReport.append( "ERROR: The input size was not greater than 0. Actual length reported: "); uploadReport.append(size); uploadReport.append("\n"); throw new FileFormatException( "The input size was not greater than 0. Actual length reported: " + size); } csvReader.readHeaders(); List<String> fieldNameCollection = Arrays.asList(csvReader.getHeaders()); ArkFunction phenoCustomFieldArkFunction = iArkCommonService.getArkFunctionByName( Constants.FUNCTION_KEY_VALUE_PHENO_COLLECTION); // "); List<CustomFieldDisplay> cfdsThatWeNeed = iArkCommonService.getCustomFieldDisplaysIn( fieldNameCollection, study, phenoCustomFieldArkFunction, customFieldGroup); // Paul has requested - in pheno we only insert List<PhenoData> dataThatWeHave = // iArkCommonService.getCustomFieldDataFor(cfdsThatWeNeed, allSubjectWhichWillBeUpdated); // read one line which contains potentially many custom fields QuestionnaireStatus uploadingStatus = iPhenotypicService.getPhenoCollectionStatusByName( Constants.PHENO_COLLECTION_STATUS_UPLOADED); while (csvReader.readRecord()) { List<PhenoData> phenoDataToInsertForThisPhenoCollection = new ArrayList<PhenoData>(); log.info("reading record " + subjectCount); stringLineArray = csvReader.getValues(); String subjectUID = stringLineArray[0]; String recordDate = stringLineArray[1]; Date recordDate_asDate = (recordDate.isEmpty() ? new Date() : simpleDateFormat.parse(recordDate)); LinkSubjectStudy subject = getSubjectByUIDFromExistList(allSubjectWhichWillBeUpdated, subjectUID); // log.info("get subject from list"); CustomField customField = null; List<PhenoCollection> subjectExistingMatchingPhenoCollections = iPhenotypicService.getSubjectMatchingPhenoCollections( subject, customFieldGroup, recordDate_asDate); PhenoCollection phenoCollectionIntoDB = new PhenoCollection(); if (subjectExistingMatchingPhenoCollections.size() == 0 || !overwriteExisting) { phenoCollectionIntoDB.setDescription(phenoCollection.getDescription()); phenoCollectionIntoDB.setLinkSubjectStudy(subject); // phenoCollectionIntoDB.setName(phenoCollection.getName()); phenoCollectionIntoDB.setQuestionnaire(customFieldGroup); if (recordDate.isEmpty()) { phenoCollectionIntoDB.setRecordDate(new Date()); } else { phenoCollectionIntoDB.setRecordDate(recordDate_asDate); } phenoCollectionIntoDB.setStatus( uploadingStatus); // TODO for this to be UPLOADED TYPE STATUS } else { if (subjectExistingMatchingPhenoCollections.size() == 1) { recordDate_asDate = (recordDate.isEmpty() ? new Date() : simpleDateFormat.parse(recordDate)); phenoCollectionIntoDB = subjectExistingMatchingPhenoCollections.get(0); } else { subjectCount++; continue; } } for (CustomFieldDisplay cfd : cfdsThatWeNeed) { String theDataAsString = null; customField = cfd.getCustomField(); if (csvReader.getIndex(cfd.getCustomField().getName()) < 0) { for (String nameAsSeenInFile : fieldNameCollection) { if (nameAsSeenInFile.equalsIgnoreCase(cfd.getCustomField().getName())) { theDataAsString = csvReader.get(nameAsSeenInFile); } } } else { theDataAsString = csvReader.get(cfd.getCustomField().getName()); } if (theDataAsString != null && !theDataAsString.isEmpty()) { PhenoData dataToInsert = new PhenoData(); dataToInsert.setCustomFieldDisplay(cfd); // as much as i disagree...pheno data isn't tied to subject....pheno collection is // dataToInsert.setLinkSubjectStudy(subject); setValue(customField, cfd, dataToInsert, theDataAsString); boolean flag = true; for (PhenoData phenoData : phenoCollectionIntoDB.getPhenoData()) { if (phenoData.getCustomFieldDisplay().getId() == cfd.getId()) { phenoData.setDateDataValue(dataToInsert.getDateDataValue()); phenoData.setErrorDataValue(dataToInsert.getErrorDataValue()); phenoData.setNumberDataValue(dataToInsert.getNumberDataValue()); phenoData.setTextDataValue(dataToInsert.getTextDataValue()); flag = false; break; } } if (flag) { phenoDataToInsertForThisPhenoCollection.add(dataToInsert); } insertFieldsCount++; } else { emptyDataCount++; } } phenoCollectionIntoDB.getPhenoData().addAll(phenoDataToInsertForThisPhenoCollection); log.info(phenoCollectionIntoDB.toString()); phenoCollectionsWithTheirDataToInsert.add(phenoCollectionIntoDB); subjectCount++; } log.info( "finished message for " + subjectCount + "\n DATA inserts = " + insertFieldsCount + " phenocollections = " + phenoCollectionsWithTheirDataToInsert.size() + " amount of empty scells =" + emptyDataCount); } catch (IOException ioe) { uploadReport.append( "SYSTEM ERROR: Unexpected I/O exception whilst reading the subject data file\n"); log.error("processMatrixSubjectFile IOException stacktrace:", ioe); throw new ArkSystemException("Unexpected I/O exception whilst reading the subject data file"); } catch (Exception ex) { uploadReport.append( "SYSTEM ERROR: Unexpected exception whilst reading the subject data file\n"); log.error("processMatrixSubjectFile Exception stacktrace:", ex); throw new ArkSystemException( "Unexpected exception occurred when trying to process subject data file"); } finally { uploadReport.append("Total file size: "); uploadReport.append(decimalFormat.format(size / 1024.0 / 1024.0)); uploadReport.append(" MB"); uploadReport.append("\n"); if (csvReader != null) { try { csvReader.close(); } catch (Exception ex) { log.error("Cleanup operation failed: csvRdr.close()", ex); } } if (inputStreamReader != null) { try { inputStreamReader.close(); } catch (Exception ex) { log.error("Cleanup operation failed: isr.close()", ex); } } } uploadReport.append("Processed "); uploadReport.append(subjectCount); uploadReport.append(" rows."); uploadReport.append("\n"); uploadReport.append("Inserted "); uploadReport.append(insertFieldsCount); uploadReport.append(" rows of data."); uploadReport.append("\n"); uploadReport.append("Updated "); uploadReport.append(updateFieldsCount); uploadReport.append(" rows of data."); uploadReport.append("\n"); // TODO better exceptionhandling iPhenotypicService.processPhenoCollectionsWithTheirDataToInsertBatch( phenoCollectionsWithTheirDataToInsert, study); return uploadReport; }