Ejemplo n.º 1
0
  /*
   * (non-Javadoc)
   *
   * @see au.org.theark.core.web.form.AbstractDetailForm#onSave(org.apache.wicket.markup.html.form.Form, org.apache.wicket.ajax.AjaxRequestTarget)
   */
  @Override
  protected void onSave(Form<CustomFieldGroupVO> containerForm, AjaxRequestTarget target) {
    if (getModelObject().getCustomFieldGroup().getId() == null) {
      // Create
      ArkFunction arkFunction =
          iArkCommonService.getArkFunctionByName(
              au.org.theark.core.Constants.FUNCTION_KEY_VALUE_PHENO_COLLECTION);
      Long studyId =
          (Long)
              SecurityUtils.getSubject()
                  .getSession()
                  .getAttribute(au.org.theark.core.Constants.STUDY_CONTEXT_ID);
      Study study = iArkCommonService.getStudy(studyId);
      getModelObject().getCustomFieldGroup().setArkFunction(arkFunction);
      getModelObject().getCustomFieldGroup().setStudy(study);

      try {
        iPhenotypicService.createCustomFieldGroup(getModelObject());
        initCustomFieldDataListPanel();
        this.info("Data Set has been created successfully.");
      } catch (EntityExistsException e) {
        this.error("A Data Set with the same name already exisits. Please choose a unique one.");
      } catch (ArkSystemException e) {
        this.error("A System error occured. Please contact Administrator.");
      }
    } else {

      try {
        iPhenotypicService.updateCustomFieldGroup(getModelObject());
        initCustomFieldDataListPanel();
        this.info("Data Set has been updated successfully.");

      } catch (EntityExistsException e) {
        this.error("A Data Set with the same name already exisits. Please choose a unique one.");
        e.printStackTrace();
      } catch (ArkSystemException e) {
        this.error("A System error occured. Please contact Administrator.");
        e.printStackTrace();
      }
    }
    target.add(
        arkCrudContainerVO
            .getWmcForCustomFieldDisplayListPanel()); // Repaint this List of Custom Field Displays
    onSavePostProcess(target); // Post process
  }
Ejemplo n.º 2
0
  /*
   * (non-Javadoc)
   *
   * @see au.org.theark.core.web.form.AbstractDetailForm#onDeleteConfirmed(org.apache.wicket.ajax.AjaxRequestTarget, java.lang.String)
   */
  @Override
  protected void onDeleteConfirmed(AjaxRequestTarget target, String selection) {
    // Get a list of CustomFields for the given group
    ArrayList<CustomField> selectedList =
        (ArrayList)
            iPhenotypicService.getCustomFieldsLinkedToCustomFieldGroup(
                getModelObject().getCustomFieldGroup());

    Boolean allowDelete = true;
    for (CustomField customField : selectedList) {
      if (customField.getCustomFieldHasData()) {
        allowDelete = false;
        break;
      }
    }
    if (allowDelete) {
      iPhenotypicService.deleteCustomFieldGroup(getModelObject());
      this.info("Data Set has been deleted successfully.");
      editCancelProcess(target);

    } else {
      this.error("This Data Set cannot be deleted.");
    }
  }
Ejemplo n.º 3
0
  public void onBeforeRender() {
    super.onBeforeRender();
    if (!isNew()) {
      List<CustomField> selectedCustomFieldList =
          iPhenotypicService.getCustomFieldsLinkedToCustomFieldGroup(
              getModelObject().getCustomFieldGroup());

      // Disable Delete button if selected fields exist
      if (!selectedCustomFieldList.isEmpty()) {
        AjaxButton deleteButton =
            (AjaxButton) arkCrudContainerVO.getEditButtonContainer().get("delete");
        deleteButton.setEnabled(false);
      }

      if (getModelObject().getCustomFieldGroup().getPublished()) {
        // Disable when published
        arkCrudContainerVO.getDetailPanelFormContainer().setEnabled(false);
      }
    }
  }
Ejemplo n.º 4
0
  private void initCustomFieldDataListPanel() {
    cfdProvider.setCriteriaModel(
        new PropertyModel<CustomFieldDisplay>(cpModel, "customFieldGroup"));
    List<CustomField> selectedList =
        iPhenotypicService.getCustomFieldsLinkedToCustomFieldGroup(
            getModelObject().getCustomFieldGroup());
    Boolean disableEditButton = false;
    if (getModelObject().getCustomFieldGroup().getPublished()) {
      for (CustomField customField : selectedList) {
        if (customField.getCustomFieldHasData()) {
          disableEditButton = true;
          break;
        }
      }
    }

    CustomFieldDisplayListPanel cfdListPanel =
        new CustomFieldDisplayListPanel(
            "cfdListPanel", feedBackPanel, arkCrudContainerVO, disableEditButton);
    cfdListPanel.setOutputMarkupId(true);
    cfdListPanel.initialisePanel();
    dataView = cfdListPanel.buildDataView(cfdProvider);
    dataView.setItemsPerPage(iArkCommonService.getRowsPerPage());

    AjaxPagingNavigator pageNavigator =
        new AjaxPagingNavigator("cfDisplayNavigator", dataView) {

          private static final long serialVersionUID = 1L;

          @Override
          protected void onAjaxEvent(AjaxRequestTarget target) {
            target.add(arkCrudContainerVO.getWmcForCustomFieldDisplayListPanel());
          }
        };
    cfdListPanel.addOrReplace(pageNavigator);
    cfdListPanel.addOrReplace(dataView);
    arkCrudContainerVO.getWmcForCustomFieldDisplayListPanel().addOrReplace(cfdListPanel);
  }
Ejemplo n.º 5
0
  public StringBuffer uploadAndReportCustomDataFile(
      InputStream inputStream,
      long size,
      String fileFormat,
      char delimChar,
      List<String> listOfUIDsToUpdate,
      CustomFieldGroup customFieldGroup,
      PhenoCollection phenoCollection,
      boolean overwriteExisting)
      throws FileFormatException, ArkSystemException {
    List<PhenoCollection> phenoCollectionsWithTheirDataToInsert = new ArrayList<PhenoCollection>();

    delimiterCharacter = delimChar;
    uploadReport = new StringBuffer();

    InputStream convertedInputStream;
    if (fileFormat.equalsIgnoreCase(Constants.FileFormat.XLS.toString())) {
      XLStoCSV xlsToCSV = new XLStoCSV(delimiterCharacter);
      convertedInputStream = xlsToCSV.convertXlsInputStreamToCsv(inputStream);
    } else {
      convertedInputStream = inputStream;
    }

    InputStreamReader inputStreamReader = null;
    CsvReader csvReader = null;
    DecimalFormat decimalFormat = new DecimalFormat("0.00");

    int subjectCount = 0;
    long updateFieldsCount = 0L;
    long insertFieldsCount = 0L;
    long emptyDataCount = 0L;
    try {
      inputStreamReader = new InputStreamReader(convertedInputStream);
      csvReader = new CsvReader(inputStreamReader, delimiterCharacter);
      String[] stringLineArray;

      List<LinkSubjectStudy> allSubjectWhichWillBeUpdated = null;
      if (listOfUIDsToUpdate.size() > 0) {
        allSubjectWhichWillBeUpdated =
            iArkCommonService.getUniqueSubjectsWithTheseUIDs(study, listOfUIDsToUpdate);
      } else {
        allSubjectWhichWillBeUpdated = new ArrayList<LinkSubjectStudy>();
      }
      if (size <= 0) {
        uploadReport.append(
            "ERROR:  The input size was not greater than 0. Actual length reported: ");
        uploadReport.append(size);
        uploadReport.append("\n");
        throw new FileFormatException(
            "The input size was not greater than 0. Actual length reported: " + size);
      }

      csvReader.readHeaders();

      List<String> fieldNameCollection = Arrays.asList(csvReader.getHeaders());
      ArkFunction phenoCustomFieldArkFunction =
          iArkCommonService.getArkFunctionByName(
              Constants.FUNCTION_KEY_VALUE_PHENO_COLLECTION); // ");

      List<CustomFieldDisplay> cfdsThatWeNeed =
          iArkCommonService.getCustomFieldDisplaysIn(
              fieldNameCollection, study, phenoCustomFieldArkFunction, customFieldGroup);

      // Paul has requested - in pheno we only insert List<PhenoData> dataThatWeHave =
      // iArkCommonService.getCustomFieldDataFor(cfdsThatWeNeed, allSubjectWhichWillBeUpdated);
      // read one line which contains potentially many custom fields
      QuestionnaireStatus uploadingStatus =
          iPhenotypicService.getPhenoCollectionStatusByName(
              Constants.PHENO_COLLECTION_STATUS_UPLOADED);

      while (csvReader.readRecord()) {
        List<PhenoData> phenoDataToInsertForThisPhenoCollection = new ArrayList<PhenoData>();
        log.info("reading record " + subjectCount);
        stringLineArray = csvReader.getValues();
        String subjectUID = stringLineArray[0];
        String recordDate = stringLineArray[1];
        Date recordDate_asDate =
            (recordDate.isEmpty() ? new Date() : simpleDateFormat.parse(recordDate));
        LinkSubjectStudy subject =
            getSubjectByUIDFromExistList(allSubjectWhichWillBeUpdated, subjectUID);
        // log.info("get subject from list");
        CustomField customField = null;
        List<PhenoCollection> subjectExistingMatchingPhenoCollections =
            iPhenotypicService.getSubjectMatchingPhenoCollections(
                subject, customFieldGroup, recordDate_asDate);
        PhenoCollection phenoCollectionIntoDB = new PhenoCollection();
        if (subjectExistingMatchingPhenoCollections.size() == 0 || !overwriteExisting) {
          phenoCollectionIntoDB.setDescription(phenoCollection.getDescription());
          phenoCollectionIntoDB.setLinkSubjectStudy(subject);
          //				phenoCollectionIntoDB.setName(phenoCollection.getName());
          phenoCollectionIntoDB.setQuestionnaire(customFieldGroup);
          if (recordDate.isEmpty()) {
            phenoCollectionIntoDB.setRecordDate(new Date());
          } else {
            phenoCollectionIntoDB.setRecordDate(recordDate_asDate);
          }
          phenoCollectionIntoDB.setStatus(
              uploadingStatus); // TODO for this to be UPLOADED TYPE STATUS
        } else {
          if (subjectExistingMatchingPhenoCollections.size() == 1) {
            recordDate_asDate =
                (recordDate.isEmpty() ? new Date() : simpleDateFormat.parse(recordDate));
            phenoCollectionIntoDB = subjectExistingMatchingPhenoCollections.get(0);
          } else {
            subjectCount++;
            continue;
          }
        }

        for (CustomFieldDisplay cfd : cfdsThatWeNeed) {

          String theDataAsString = null;
          customField = cfd.getCustomField();

          if (csvReader.getIndex(cfd.getCustomField().getName()) < 0) {
            for (String nameAsSeenInFile : fieldNameCollection) {
              if (nameAsSeenInFile.equalsIgnoreCase(cfd.getCustomField().getName())) {
                theDataAsString = csvReader.get(nameAsSeenInFile);
              }
            }
          } else {
            theDataAsString = csvReader.get(cfd.getCustomField().getName());
          }

          if (theDataAsString != null && !theDataAsString.isEmpty()) {
            PhenoData dataToInsert = new PhenoData();
            dataToInsert.setCustomFieldDisplay(cfd);
            // as much as i disagree...pheno data isn't tied to subject....pheno collection is
            // dataToInsert.setLinkSubjectStudy(subject);
            setValue(customField, cfd, dataToInsert, theDataAsString);
            boolean flag = true;
            for (PhenoData phenoData : phenoCollectionIntoDB.getPhenoData()) {
              if (phenoData.getCustomFieldDisplay().getId() == cfd.getId()) {
                phenoData.setDateDataValue(dataToInsert.getDateDataValue());
                phenoData.setErrorDataValue(dataToInsert.getErrorDataValue());
                phenoData.setNumberDataValue(dataToInsert.getNumberDataValue());
                phenoData.setTextDataValue(dataToInsert.getTextDataValue());
                flag = false;
                break;
              }
            }
            if (flag) {
              phenoDataToInsertForThisPhenoCollection.add(dataToInsert);
            }
            insertFieldsCount++;
          } else {
            emptyDataCount++;
          }
        }
        phenoCollectionIntoDB.getPhenoData().addAll(phenoDataToInsertForThisPhenoCollection);
        log.info(phenoCollectionIntoDB.toString());
        phenoCollectionsWithTheirDataToInsert.add(phenoCollectionIntoDB);
        subjectCount++;
      }
      log.info(
          "finished message for "
              + subjectCount
              + "\n      DATA inserts = "
              + insertFieldsCount
              + "  phenocollections = "
              + phenoCollectionsWithTheirDataToInsert.size()
              + "  amount of empty scells ="
              + emptyDataCount);
    } catch (IOException ioe) {
      uploadReport.append(
          "SYSTEM ERROR:   Unexpected I/O exception whilst reading the subject data file\n");
      log.error("processMatrixSubjectFile IOException stacktrace:", ioe);
      throw new ArkSystemException("Unexpected I/O exception whilst reading the subject data file");
    } catch (Exception ex) {
      uploadReport.append(
          "SYSTEM ERROR:   Unexpected exception whilst reading the subject data file\n");
      log.error("processMatrixSubjectFile Exception stacktrace:", ex);
      throw new ArkSystemException(
          "Unexpected exception occurred when trying to process subject data file");
    } finally {
      uploadReport.append("Total file size: ");
      uploadReport.append(decimalFormat.format(size / 1024.0 / 1024.0));
      uploadReport.append(" MB");
      uploadReport.append("\n");

      if (csvReader != null) {
        try {
          csvReader.close();
        } catch (Exception ex) {
          log.error("Cleanup operation failed: csvRdr.close()", ex);
        }
      }
      if (inputStreamReader != null) {
        try {
          inputStreamReader.close();
        } catch (Exception ex) {
          log.error("Cleanup operation failed: isr.close()", ex);
        }
      }
    }
    uploadReport.append("Processed ");
    uploadReport.append(subjectCount);
    uploadReport.append(" rows.");
    uploadReport.append("\n");
    uploadReport.append("Inserted ");
    uploadReport.append(insertFieldsCount);
    uploadReport.append(" rows of data.");
    uploadReport.append("\n");
    uploadReport.append("Updated ");
    uploadReport.append(updateFieldsCount);
    uploadReport.append(" rows of data.");
    uploadReport.append("\n");

    // TODO better exceptionhandling
    iPhenotypicService.processPhenoCollectionsWithTheirDataToInsertBatch(
        phenoCollectionsWithTheirDataToInsert, study);
    return uploadReport;
  }