예제 #1
0
  private PhenoData setValue(
      CustomField customField,
      CustomFieldDisplay customFieldDisplay,
      PhenoData data,
      String theDataAsString) {
    //		log.warn("cf=" + customField + "\ndata=" + data+ "dataAsString=" + theDataAsString);

    if (customField.getFieldType().getName().equalsIgnoreCase(Constants.FIELD_TYPE_NUMBER)) {
      data.setNumberDataValue(new Double(theDataAsString));
    } else if (customField.getFieldType().getName().equalsIgnoreCase(Constants.FIELD_TYPE_DATE)) {
      DateFormat dateFormat = new SimpleDateFormat(au.org.theark.core.Constants.DD_MM_YYYY);
      Date dateFieldValue;
      try {
        dateFieldValue = dateFormat.parse(theDataAsString);
        data.setDateDataValue(dateFieldValue);
      } catch (ParseException e) {
        data.setErrorDataValue(theDataAsString);
      }
    } else if (customField
        .getFieldType()
        .getName()
        .equalsIgnoreCase(Constants.FIELD_TYPE_CHARACTER)) {
      if (customField.getEncodedValues() != null
          && !customField.getEncodedValues().isEmpty()
          && customFieldDisplay.getAllowMultiselect()) {
        if (theDataAsString != null) {
          theDataAsString = theDataAsString.replaceAll(" ", ";");
        }
      }
      data.setTextDataValue(theDataAsString);
    }
    return data;
  }
예제 #2
0
  public StringBuffer uploadAndReportCustomDataFile(
      InputStream inputStream,
      long size,
      String fileFormat,
      char delimChar,
      List<String> listOfUIDsToUpdate,
      CustomFieldGroup customFieldGroup,
      PhenoCollection phenoCollection,
      boolean overwriteExisting)
      throws FileFormatException, ArkSystemException {
    List<PhenoCollection> phenoCollectionsWithTheirDataToInsert = new ArrayList<PhenoCollection>();

    delimiterCharacter = delimChar;
    uploadReport = new StringBuffer();

    InputStream convertedInputStream;
    if (fileFormat.equalsIgnoreCase(Constants.FileFormat.XLS.toString())) {
      XLStoCSV xlsToCSV = new XLStoCSV(delimiterCharacter);
      convertedInputStream = xlsToCSV.convertXlsInputStreamToCsv(inputStream);
    } else {
      convertedInputStream = inputStream;
    }

    InputStreamReader inputStreamReader = null;
    CsvReader csvReader = null;
    DecimalFormat decimalFormat = new DecimalFormat("0.00");

    int subjectCount = 0;
    long updateFieldsCount = 0L;
    long insertFieldsCount = 0L;
    long emptyDataCount = 0L;
    try {
      inputStreamReader = new InputStreamReader(convertedInputStream);
      csvReader = new CsvReader(inputStreamReader, delimiterCharacter);
      String[] stringLineArray;

      List<LinkSubjectStudy> allSubjectWhichWillBeUpdated = null;
      if (listOfUIDsToUpdate.size() > 0) {
        allSubjectWhichWillBeUpdated =
            iArkCommonService.getUniqueSubjectsWithTheseUIDs(study, listOfUIDsToUpdate);
      } else {
        allSubjectWhichWillBeUpdated = new ArrayList<LinkSubjectStudy>();
      }
      if (size <= 0) {
        uploadReport.append(
            "ERROR:  The input size was not greater than 0. Actual length reported: ");
        uploadReport.append(size);
        uploadReport.append("\n");
        throw new FileFormatException(
            "The input size was not greater than 0. Actual length reported: " + size);
      }

      csvReader.readHeaders();

      List<String> fieldNameCollection = Arrays.asList(csvReader.getHeaders());
      ArkFunction phenoCustomFieldArkFunction =
          iArkCommonService.getArkFunctionByName(
              Constants.FUNCTION_KEY_VALUE_PHENO_COLLECTION); // ");

      List<CustomFieldDisplay> cfdsThatWeNeed =
          iArkCommonService.getCustomFieldDisplaysIn(
              fieldNameCollection, study, phenoCustomFieldArkFunction, customFieldGroup);

      // Paul has requested - in pheno we only insert List<PhenoData> dataThatWeHave =
      // iArkCommonService.getCustomFieldDataFor(cfdsThatWeNeed, allSubjectWhichWillBeUpdated);
      // read one line which contains potentially many custom fields
      QuestionnaireStatus uploadingStatus =
          iPhenotypicService.getPhenoCollectionStatusByName(
              Constants.PHENO_COLLECTION_STATUS_UPLOADED);

      while (csvReader.readRecord()) {
        List<PhenoData> phenoDataToInsertForThisPhenoCollection = new ArrayList<PhenoData>();
        log.info("reading record " + subjectCount);
        stringLineArray = csvReader.getValues();
        String subjectUID = stringLineArray[0];
        String recordDate = stringLineArray[1];
        Date recordDate_asDate =
            (recordDate.isEmpty() ? new Date() : simpleDateFormat.parse(recordDate));
        LinkSubjectStudy subject =
            getSubjectByUIDFromExistList(allSubjectWhichWillBeUpdated, subjectUID);
        // log.info("get subject from list");
        CustomField customField = null;
        List<PhenoCollection> subjectExistingMatchingPhenoCollections =
            iPhenotypicService.getSubjectMatchingPhenoCollections(
                subject, customFieldGroup, recordDate_asDate);
        PhenoCollection phenoCollectionIntoDB = new PhenoCollection();
        if (subjectExistingMatchingPhenoCollections.size() == 0 || !overwriteExisting) {
          phenoCollectionIntoDB.setDescription(phenoCollection.getDescription());
          phenoCollectionIntoDB.setLinkSubjectStudy(subject);
          //				phenoCollectionIntoDB.setName(phenoCollection.getName());
          phenoCollectionIntoDB.setQuestionnaire(customFieldGroup);
          if (recordDate.isEmpty()) {
            phenoCollectionIntoDB.setRecordDate(new Date());
          } else {
            phenoCollectionIntoDB.setRecordDate(recordDate_asDate);
          }
          phenoCollectionIntoDB.setStatus(
              uploadingStatus); // TODO for this to be UPLOADED TYPE STATUS
        } else {
          if (subjectExistingMatchingPhenoCollections.size() == 1) {
            recordDate_asDate =
                (recordDate.isEmpty() ? new Date() : simpleDateFormat.parse(recordDate));
            phenoCollectionIntoDB = subjectExistingMatchingPhenoCollections.get(0);
          } else {
            subjectCount++;
            continue;
          }
        }

        for (CustomFieldDisplay cfd : cfdsThatWeNeed) {

          String theDataAsString = null;
          customField = cfd.getCustomField();

          if (csvReader.getIndex(cfd.getCustomField().getName()) < 0) {
            for (String nameAsSeenInFile : fieldNameCollection) {
              if (nameAsSeenInFile.equalsIgnoreCase(cfd.getCustomField().getName())) {
                theDataAsString = csvReader.get(nameAsSeenInFile);
              }
            }
          } else {
            theDataAsString = csvReader.get(cfd.getCustomField().getName());
          }

          if (theDataAsString != null && !theDataAsString.isEmpty()) {
            PhenoData dataToInsert = new PhenoData();
            dataToInsert.setCustomFieldDisplay(cfd);
            // as much as i disagree...pheno data isn't tied to subject....pheno collection is
            // dataToInsert.setLinkSubjectStudy(subject);
            setValue(customField, cfd, dataToInsert, theDataAsString);
            boolean flag = true;
            for (PhenoData phenoData : phenoCollectionIntoDB.getPhenoData()) {
              if (phenoData.getCustomFieldDisplay().getId() == cfd.getId()) {
                phenoData.setDateDataValue(dataToInsert.getDateDataValue());
                phenoData.setErrorDataValue(dataToInsert.getErrorDataValue());
                phenoData.setNumberDataValue(dataToInsert.getNumberDataValue());
                phenoData.setTextDataValue(dataToInsert.getTextDataValue());
                flag = false;
                break;
              }
            }
            if (flag) {
              phenoDataToInsertForThisPhenoCollection.add(dataToInsert);
            }
            insertFieldsCount++;
          } else {
            emptyDataCount++;
          }
        }
        phenoCollectionIntoDB.getPhenoData().addAll(phenoDataToInsertForThisPhenoCollection);
        log.info(phenoCollectionIntoDB.toString());
        phenoCollectionsWithTheirDataToInsert.add(phenoCollectionIntoDB);
        subjectCount++;
      }
      log.info(
          "finished message for "
              + subjectCount
              + "\n      DATA inserts = "
              + insertFieldsCount
              + "  phenocollections = "
              + phenoCollectionsWithTheirDataToInsert.size()
              + "  amount of empty scells ="
              + emptyDataCount);
    } catch (IOException ioe) {
      uploadReport.append(
          "SYSTEM ERROR:   Unexpected I/O exception whilst reading the subject data file\n");
      log.error("processMatrixSubjectFile IOException stacktrace:", ioe);
      throw new ArkSystemException("Unexpected I/O exception whilst reading the subject data file");
    } catch (Exception ex) {
      uploadReport.append(
          "SYSTEM ERROR:   Unexpected exception whilst reading the subject data file\n");
      log.error("processMatrixSubjectFile Exception stacktrace:", ex);
      throw new ArkSystemException(
          "Unexpected exception occurred when trying to process subject data file");
    } finally {
      uploadReport.append("Total file size: ");
      uploadReport.append(decimalFormat.format(size / 1024.0 / 1024.0));
      uploadReport.append(" MB");
      uploadReport.append("\n");

      if (csvReader != null) {
        try {
          csvReader.close();
        } catch (Exception ex) {
          log.error("Cleanup operation failed: csvRdr.close()", ex);
        }
      }
      if (inputStreamReader != null) {
        try {
          inputStreamReader.close();
        } catch (Exception ex) {
          log.error("Cleanup operation failed: isr.close()", ex);
        }
      }
    }
    uploadReport.append("Processed ");
    uploadReport.append(subjectCount);
    uploadReport.append(" rows.");
    uploadReport.append("\n");
    uploadReport.append("Inserted ");
    uploadReport.append(insertFieldsCount);
    uploadReport.append(" rows of data.");
    uploadReport.append("\n");
    uploadReport.append("Updated ");
    uploadReport.append(updateFieldsCount);
    uploadReport.append(" rows of data.");
    uploadReport.append("\n");

    // TODO better exceptionhandling
    iPhenotypicService.processPhenoCollectionsWithTheirDataToInsertBatch(
        phenoCollectionsWithTheirDataToInsert, study);
    return uploadReport;
  }