示例#1
0
  @Override
  protected void onSearch(AjaxRequestTarget target) {
    target.add(feedbackPanel);

    // Set study in context
    Long studyId =
        (Long)
            SecurityUtils.getSubject()
                .getSession()
                .getAttribute(au.org.theark.core.Constants.STUDY_CONTEXT_ID);
    // Get a list of all Fields for the Study in context
    Study study = iArkCommonService.getStudy(studyId);

    Upload searchUpload = getModelObject().getUpload();
    searchUpload.setStudy(study);
    searchUpload.setArkFunction(
        iArkCommonService.getArkFunctionByName(Constants.FUNCTION_KEY_VALUE_DATA_DICTIONARY));

    Collection<Upload> uploadCollection = iArkCommonService.searchUploads(searchUpload);

    if (uploadCollection != null && uploadCollection.size() == 0) {
      this.info("Uploads with the specified criteria does not exist in the system.");
      target.add(feedbackPanel);
    }

    getModelObject().setUploadCollection(uploadCollection);

    listView.removeAll();
    arkCrudContainerVO.getSearchResultPanelContainer().setVisible(true);
    target.add(arkCrudContainerVO.getSearchResultPanelContainer());
  }
示例#2
0
  /*
   * (non-Javadoc)
   *
   * @see au.org.theark.core.web.form.AbstractDetailForm#onSave(org.apache.wicket.markup.html.form.Form, org.apache.wicket.ajax.AjaxRequestTarget)
   */
  @Override
  protected void onSave(Form<CustomFieldGroupVO> containerForm, AjaxRequestTarget target) {
    if (getModelObject().getCustomFieldGroup().getId() == null) {
      // Create
      ArkFunction arkFunction =
          iArkCommonService.getArkFunctionByName(
              au.org.theark.core.Constants.FUNCTION_KEY_VALUE_PHENO_COLLECTION);
      Long studyId =
          (Long)
              SecurityUtils.getSubject()
                  .getSession()
                  .getAttribute(au.org.theark.core.Constants.STUDY_CONTEXT_ID);
      Study study = iArkCommonService.getStudy(studyId);
      getModelObject().getCustomFieldGroup().setArkFunction(arkFunction);
      getModelObject().getCustomFieldGroup().setStudy(study);

      try {
        iPhenotypicService.createCustomFieldGroup(getModelObject());
        initCustomFieldDataListPanel();
        this.info("Data Set has been created successfully.");
      } catch (EntityExistsException e) {
        this.error("A Data Set with the same name already exisits. Please choose a unique one.");
      } catch (ArkSystemException e) {
        this.error("A System error occured. Please contact Administrator.");
      }
    } else {

      try {
        iPhenotypicService.updateCustomFieldGroup(getModelObject());
        initCustomFieldDataListPanel();
        this.info("Data Set has been updated successfully.");

      } catch (EntityExistsException e) {
        this.error("A Data Set with the same name already exisits. Please choose a unique one.");
        e.printStackTrace();
      } catch (ArkSystemException e) {
        this.error("A System error occured. Please contact Administrator.");
        e.printStackTrace();
      }
    }
    target.add(
        arkCrudContainerVO
            .getWmcForCustomFieldDisplayListPanel()); // Repaint this List of Custom Field Displays
    onSavePostProcess(target); // Post process
  }
示例#3
0
  @SuppressWarnings("unchecked")
  private void setSelectedCustomFieldsFromFile() {
    if (fileUploadField.getFileUpload() != null) {
      ArkFunction arkFunction =
          iArkCommonService.getArkFunctionByName(
              au.org.theark.core.Constants.FUNCTION_KEY_VALUE_PHENO_COLLECTION);
      Long studyId =
          (Long)
              SecurityUtils.getSubject()
                  .getSession()
                  .getAttribute(au.org.theark.core.Constants.STUDY_CONTEXT_ID);
      Study study = iArkCommonService.getStudy(studyId);
      ArrayList<CustomField> selectedCustomFields =
          (ArrayList<CustomField>)
              iArkCommonService.matchCustomFieldsFromInputFile(
                  fileUploadField.getFileUpload(), study, arkFunction);
      cpModel.getObject().setSelectedCustomFields(selectedCustomFields);
    }

    initCustomFieldPalette();
    arkCrudContainerVO.getDetailPanelFormContainer().addOrReplace(customFieldPalette);
  }
示例#4
0
  public StringBuffer uploadAndReportCustomDataFile(
      InputStream inputStream,
      long size,
      String fileFormat,
      char delimChar,
      List<String> listOfUIDsToUpdate,
      CustomFieldGroup customFieldGroup,
      PhenoCollection phenoCollection,
      boolean overwriteExisting)
      throws FileFormatException, ArkSystemException {
    List<PhenoCollection> phenoCollectionsWithTheirDataToInsert = new ArrayList<PhenoCollection>();

    delimiterCharacter = delimChar;
    uploadReport = new StringBuffer();

    InputStream convertedInputStream;
    if (fileFormat.equalsIgnoreCase(Constants.FileFormat.XLS.toString())) {
      XLStoCSV xlsToCSV = new XLStoCSV(delimiterCharacter);
      convertedInputStream = xlsToCSV.convertXlsInputStreamToCsv(inputStream);
    } else {
      convertedInputStream = inputStream;
    }

    InputStreamReader inputStreamReader = null;
    CsvReader csvReader = null;
    DecimalFormat decimalFormat = new DecimalFormat("0.00");

    int subjectCount = 0;
    long updateFieldsCount = 0L;
    long insertFieldsCount = 0L;
    long emptyDataCount = 0L;
    try {
      inputStreamReader = new InputStreamReader(convertedInputStream);
      csvReader = new CsvReader(inputStreamReader, delimiterCharacter);
      String[] stringLineArray;

      List<LinkSubjectStudy> allSubjectWhichWillBeUpdated = null;
      if (listOfUIDsToUpdate.size() > 0) {
        allSubjectWhichWillBeUpdated =
            iArkCommonService.getUniqueSubjectsWithTheseUIDs(study, listOfUIDsToUpdate);
      } else {
        allSubjectWhichWillBeUpdated = new ArrayList<LinkSubjectStudy>();
      }
      if (size <= 0) {
        uploadReport.append(
            "ERROR:  The input size was not greater than 0. Actual length reported: ");
        uploadReport.append(size);
        uploadReport.append("\n");
        throw new FileFormatException(
            "The input size was not greater than 0. Actual length reported: " + size);
      }

      csvReader.readHeaders();

      List<String> fieldNameCollection = Arrays.asList(csvReader.getHeaders());
      ArkFunction phenoCustomFieldArkFunction =
          iArkCommonService.getArkFunctionByName(
              Constants.FUNCTION_KEY_VALUE_PHENO_COLLECTION); // ");

      List<CustomFieldDisplay> cfdsThatWeNeed =
          iArkCommonService.getCustomFieldDisplaysIn(
              fieldNameCollection, study, phenoCustomFieldArkFunction, customFieldGroup);

      // Paul has requested - in pheno we only insert List<PhenoData> dataThatWeHave =
      // iArkCommonService.getCustomFieldDataFor(cfdsThatWeNeed, allSubjectWhichWillBeUpdated);
      // read one line which contains potentially many custom fields
      QuestionnaireStatus uploadingStatus =
          iPhenotypicService.getPhenoCollectionStatusByName(
              Constants.PHENO_COLLECTION_STATUS_UPLOADED);

      while (csvReader.readRecord()) {
        List<PhenoData> phenoDataToInsertForThisPhenoCollection = new ArrayList<PhenoData>();
        log.info("reading record " + subjectCount);
        stringLineArray = csvReader.getValues();
        String subjectUID = stringLineArray[0];
        String recordDate = stringLineArray[1];
        Date recordDate_asDate =
            (recordDate.isEmpty() ? new Date() : simpleDateFormat.parse(recordDate));
        LinkSubjectStudy subject =
            getSubjectByUIDFromExistList(allSubjectWhichWillBeUpdated, subjectUID);
        // log.info("get subject from list");
        CustomField customField = null;
        List<PhenoCollection> subjectExistingMatchingPhenoCollections =
            iPhenotypicService.getSubjectMatchingPhenoCollections(
                subject, customFieldGroup, recordDate_asDate);
        PhenoCollection phenoCollectionIntoDB = new PhenoCollection();
        if (subjectExistingMatchingPhenoCollections.size() == 0 || !overwriteExisting) {
          phenoCollectionIntoDB.setDescription(phenoCollection.getDescription());
          phenoCollectionIntoDB.setLinkSubjectStudy(subject);
          //				phenoCollectionIntoDB.setName(phenoCollection.getName());
          phenoCollectionIntoDB.setQuestionnaire(customFieldGroup);
          if (recordDate.isEmpty()) {
            phenoCollectionIntoDB.setRecordDate(new Date());
          } else {
            phenoCollectionIntoDB.setRecordDate(recordDate_asDate);
          }
          phenoCollectionIntoDB.setStatus(
              uploadingStatus); // TODO for this to be UPLOADED TYPE STATUS
        } else {
          if (subjectExistingMatchingPhenoCollections.size() == 1) {
            recordDate_asDate =
                (recordDate.isEmpty() ? new Date() : simpleDateFormat.parse(recordDate));
            phenoCollectionIntoDB = subjectExistingMatchingPhenoCollections.get(0);
          } else {
            subjectCount++;
            continue;
          }
        }

        for (CustomFieldDisplay cfd : cfdsThatWeNeed) {

          String theDataAsString = null;
          customField = cfd.getCustomField();

          if (csvReader.getIndex(cfd.getCustomField().getName()) < 0) {
            for (String nameAsSeenInFile : fieldNameCollection) {
              if (nameAsSeenInFile.equalsIgnoreCase(cfd.getCustomField().getName())) {
                theDataAsString = csvReader.get(nameAsSeenInFile);
              }
            }
          } else {
            theDataAsString = csvReader.get(cfd.getCustomField().getName());
          }

          if (theDataAsString != null && !theDataAsString.isEmpty()) {
            PhenoData dataToInsert = new PhenoData();
            dataToInsert.setCustomFieldDisplay(cfd);
            // as much as i disagree...pheno data isn't tied to subject....pheno collection is
            // dataToInsert.setLinkSubjectStudy(subject);
            setValue(customField, cfd, dataToInsert, theDataAsString);
            boolean flag = true;
            for (PhenoData phenoData : phenoCollectionIntoDB.getPhenoData()) {
              if (phenoData.getCustomFieldDisplay().getId() == cfd.getId()) {
                phenoData.setDateDataValue(dataToInsert.getDateDataValue());
                phenoData.setErrorDataValue(dataToInsert.getErrorDataValue());
                phenoData.setNumberDataValue(dataToInsert.getNumberDataValue());
                phenoData.setTextDataValue(dataToInsert.getTextDataValue());
                flag = false;
                break;
              }
            }
            if (flag) {
              phenoDataToInsertForThisPhenoCollection.add(dataToInsert);
            }
            insertFieldsCount++;
          } else {
            emptyDataCount++;
          }
        }
        phenoCollectionIntoDB.getPhenoData().addAll(phenoDataToInsertForThisPhenoCollection);
        log.info(phenoCollectionIntoDB.toString());
        phenoCollectionsWithTheirDataToInsert.add(phenoCollectionIntoDB);
        subjectCount++;
      }
      log.info(
          "finished message for "
              + subjectCount
              + "\n      DATA inserts = "
              + insertFieldsCount
              + "  phenocollections = "
              + phenoCollectionsWithTheirDataToInsert.size()
              + "  amount of empty scells ="
              + emptyDataCount);
    } catch (IOException ioe) {
      uploadReport.append(
          "SYSTEM ERROR:   Unexpected I/O exception whilst reading the subject data file\n");
      log.error("processMatrixSubjectFile IOException stacktrace:", ioe);
      throw new ArkSystemException("Unexpected I/O exception whilst reading the subject data file");
    } catch (Exception ex) {
      uploadReport.append(
          "SYSTEM ERROR:   Unexpected exception whilst reading the subject data file\n");
      log.error("processMatrixSubjectFile Exception stacktrace:", ex);
      throw new ArkSystemException(
          "Unexpected exception occurred when trying to process subject data file");
    } finally {
      uploadReport.append("Total file size: ");
      uploadReport.append(decimalFormat.format(size / 1024.0 / 1024.0));
      uploadReport.append(" MB");
      uploadReport.append("\n");

      if (csvReader != null) {
        try {
          csvReader.close();
        } catch (Exception ex) {
          log.error("Cleanup operation failed: csvRdr.close()", ex);
        }
      }
      if (inputStreamReader != null) {
        try {
          inputStreamReader.close();
        } catch (Exception ex) {
          log.error("Cleanup operation failed: isr.close()", ex);
        }
      }
    }
    uploadReport.append("Processed ");
    uploadReport.append(subjectCount);
    uploadReport.append(" rows.");
    uploadReport.append("\n");
    uploadReport.append("Inserted ");
    uploadReport.append(insertFieldsCount);
    uploadReport.append(" rows of data.");
    uploadReport.append("\n");
    uploadReport.append("Updated ");
    uploadReport.append(updateFieldsCount);
    uploadReport.append(" rows of data.");
    uploadReport.append("\n");

    // TODO better exceptionhandling
    iPhenotypicService.processPhenoCollectionsWithTheirDataToInsertBatch(
        phenoCollectionsWithTheirDataToInsert, study);
    return uploadReport;
  }
示例#5
0
  public VelocityContext getBioCollectionLabelContext(BioCollection bioCollection) {
    VelocityContext velocityContext = new VelocityContext();
    LinkSubjectStudy linkSubjectStudy = null;
    try {
      linkSubjectStudy =
          iArkCommonService.getSubjectByUID(
              bioCollection.getLinkSubjectStudy().getSubjectUID(), bioCollection.getStudy());
    } catch (EntityNotFoundException e) {
      log.error(e.getMessage());
    }

    try {
      bioCollection = iBioCollectionDao.getBioCollection(bioCollection.getId());
    } catch (EntityNotFoundException e) {
      log.error(e.getMessage());
    }

    String subjectUid = linkSubjectStudy.getSubjectUID();

    // ArkFunction arkFunction =
    // iArkCommonService.getArkFunctionByName(au.org.theark.core.Constants.FUNCTION_KEY_VALUE_LIMS_COLLECTION);
    ArkFunction arkFunction =
        iArkCommonService.getArkFunctionByName(
            au.org.theark.core.Constants.FUNCTION_KEY_VALUE_LIMS_CUSTOM_FIELD);

    // Custom field name "FAMILYID"
    BioCollectionCustomFieldData bioCollectionCustomFieldData =
        iBioCollectionDao.getBioCollectionCustomFieldData(bioCollection, arkFunction, "FAMILYID");
    String familyId = bioCollectionCustomFieldData.getTextDataValue();

    // Custom field name "ASRBNO"
    bioCollectionCustomFieldData =
        iBioCollectionDao.getBioCollectionCustomFieldData(bioCollection, arkFunction, "ASRBNO");
    String asrbno = bioCollectionCustomFieldData.getTextDataValue();

    String collectionDate = new String();
    if (bioCollection.getCollectionDate() != null) {
      collectionDate = simpleDateFormat.format(bioCollection.getCollectionDate());
    }
    String refDoctor = new String();
    if (bioCollection.getRefDoctor() != null) {
      refDoctor = bioCollection.getRefDoctor();
    }
    String dateOfBirth = new String();
    if (bioCollection.getLinkSubjectStudy().getPerson().getDateOfBirth() != null) {
      dateOfBirth =
          simpleDateFormat.format(bioCollection.getLinkSubjectStudy().getPerson().getDateOfBirth());
    }
    String sex = new String();
    if (bioCollection.getLinkSubjectStudy().getPerson().getGenderType() != null) {
      sex = bioCollection.getLinkSubjectStudy().getPerson().getGenderType().getName();
    }
    String collectionID = bioCollection.getBiocollectionUid();
    String subjectFirstName = linkSubjectStudy.getPerson().getFirstName();
    String subjectLastName = linkSubjectStudy.getPerson().getLastName();
    String initials = subjectFirstName.charAt(0) + "" + subjectLastName.charAt(0);

    velocityContext.put("initials", initials);
    velocityContext.put("firstName", subjectFirstName);
    velocityContext.put("lastName", subjectLastName);
    velocityContext.put("collectionID", collectionID);
    velocityContext.put("subjectUid", subjectUid);
    velocityContext.put("familyId", familyId);
    velocityContext.put("asrbno", asrbno);
    velocityContext.put("collectionDate", collectionDate);
    velocityContext.put("refDoctor", refDoctor);
    velocityContext.put("dateOfBirth", dateOfBirth);
    velocityContext.put("sex", sex);

    return velocityContext;
  }