Ejemplo n.º 1
0
  public String showRow(EntityBean e) {
    FilterBean fb = (FilterBean) e;
    Status s = fb.getStatus();

    // do the first row, just the "flat" properties
    String row = "<tr>\n";

    // filter name
    String colorOn = s.equals(Status.AVAILABLE) ? "" : "<font color='gray'>";
    String colorOff = s.equals(Status.AVAILABLE) ? "" : "</font>";
    row += "<td>" + colorOn + fb.getName() + colorOff + "</td>\n";

    row += "<td>" + fb.getDescription() + "</td>\n";
    row += "<td>" + fb.getOwner().getName() + "</td>\n";
    // created date
    row += "<td>" + fb.getCreatedDate().toString() + "</td>\n";
    // status
    row += "<td>" + s.getName() + "</td>\n";

    // actions
    row += "<td>";
    if (!s.equals(Status.DELETED)) {
      String confirmQuestion = "Are you sure you want to delete " + fb.getName() + "?";
      String onClick = "onClick=\"return confirm('" + confirmQuestion + "');\"";
      row += "<a href='" + ApplyFilterServlet.getLink(fb.getId()) + "'>view</a>";
      row += " <a href='" + EditFilterServlet.getLink(fb.getId()) + "'>edit</a>";
      row += " <a href='" + RemoveFilterServlet.getLink(fb.getId()) + "'" + onClick + ">delete</a>";
    } else {
      // write the servlet to restore filters later, tbh 01-23-2005

      /*String confirmQuestion = "Are you sure you want to restore " + u.getName() + "?";
      String onClick = "onClick=\"return confirm('" + confirmQuestion + "');\"";
      row += " <a href='" + DeleteUserServlet.getLink(u, EntityAction.RESTORE) + "'" + onClick + ">restore</a>";
      */
    }
    row += "</td>\n";

    row += "</tr>\n";

    row += "<tr>\n";
    row += "</tr>\n";

    return row;
  }
  @Override
  public void processRequest() throws Exception {
    resetPanel();
    panel.setStudyInfoShown(false);
    panel.setOrderedData(true);

    FormProcessor fp = new FormProcessor(request);
    // checks which module the requests are from
    String module = fp.getString(MODULE);
    // keep the module in the session
    session.setAttribute(MODULE, module);

    String action = request.getParameter("action");
    CRFVersionBean version = (CRFVersionBean) session.getAttribute("version");

    File xsdFile = new File(SpringServletAccess.getPropertiesDir(context) + "ODM1-3-0.xsd");
    File xsdFile2 = new File(SpringServletAccess.getPropertiesDir(context) + "ODM1-2-1.xsd");

    if (StringUtil.isBlank(action)) {
      logger.info("action is blank");
      request.setAttribute("version", version);
      forwardPage(Page.IMPORT_CRF_DATA);
    }
    if ("confirm".equalsIgnoreCase(action)) {
      String dir = SQLInitServlet.getField("filePath");
      if (!(new File(dir)).exists()) {
        logger.info("The filePath in datainfo.properties is invalid " + dir);
        addPageMessage(respage.getString("filepath_you_defined_not_seem_valid"));
        forwardPage(Page.IMPORT_CRF_DATA);
      }
      // All the uploaded files will be saved in filePath/crf/original/
      String theDir = dir + "crf" + File.separator + "original" + File.separator;
      if (!(new File(theDir)).isDirectory()) {
        (new File(theDir)).mkdirs();
        logger.info("Made the directory " + theDir);
      }
      // MultipartRequest multi = new MultipartRequest(request, theDir, 50 * 1024 * 1024);
      File f = null;
      try {
        f = uploadFile(theDir, version);

      } catch (Exception e) {
        logger.warn("*** Found exception during file upload***");
        e.printStackTrace();
      }
      if (f == null) {
        forwardPage(Page.IMPORT_CRF_DATA);
      }

      // TODO
      // validation steps
      // 1. valid xml - validated by file uploader below

      // LocalConfiguration config = LocalConfiguration.getInstance();
      // config.getProperties().setProperty(
      // "org.exolab.castor.parser.namespaces",
      // "true");
      // config
      // .getProperties()
      // .setProperty("org.exolab.castor.sax.features",
      // "http://xml.org/sax/features/validation,
      // http://apache.org/xml/features/validation/schema,
      // http://apache.org/xml/features/validation/schema-full-checking");
      // // above sets to validate against namespace

      Mapping myMap = new Mapping();
      String propertiesPath = CoreResources.PROPERTIES_DIR;
      myMap.loadMapping(propertiesPath + File.separator + "cd_odm_mapping.xml");

      Unmarshaller um1 = new Unmarshaller(myMap);
      // um1.addNamespaceToPackageMapping("http://www.cdisc.org/ns/odm/v1.3"
      // ,
      // "ODMContainer");
      boolean fail = false;
      ODMContainer odmContainer = new ODMContainer();
      try {

        // schemaValidator.validateAgainstSchema(f, xsdFile);
        // utf-8 compliance, tbh 06/2009
        InputStreamReader isr = new InputStreamReader(new FileInputStream(f), "UTF-8");
        odmContainer = (ODMContainer) um1.unmarshal(isr);

        System.out.println(
            "Found crf data container for study oid: "
                + odmContainer.getCrfDataPostImportContainer().getStudyOID());
        System.out.println(
            "found length of subject list: "
                + odmContainer.getCrfDataPostImportContainer().getSubjectData().size());
        // 2. validates against ODM 1.3
        // check it all below, throw an exception and route to a
        // different
        // page if not working

        // TODO this block of code needs the xerces serializer in order
        // to
        // work

        // StringWriter myWriter = new StringWriter();
        // Marshaller m1 = new Marshaller(myWriter);
        //
        // m1.setProperty("org.exolab.castor.parser.namespaces",
        // "true");
        // m1
        // .setProperty("org.exolab.castor.sax.features",
        // "http://xml.org/sax/features/validation,
        // http://apache.org/xml/features/validation/schema,
        // http://apache.org/xml/features/validation/schema-full-checking
        // ");
        //
        // m1.setMapping(myMap);
        // m1.setNamespaceMapping("",
        // "http://www.cdisc.org/ns/odm/v1.3");
        // m1.setSchemaLocation("http://www.cdisc.org/ns/odm/v1.3
        // ODM1-3.xsd");
        // m1.marshal(odmContainer);
        // if you havent thrown it, you wont throw it here
        addPageMessage(respage.getString("passed_xml_validation"));
      } catch (Exception me1) {
        me1.printStackTrace();
        // expanding it to all exceptions, but hoping to catch Marshal
        // Exception or SAX Exceptions
        logger.info("found exception with xml transform");
        //
        logger.info("trying 1.2.1");
        try {
          schemaValidator.validateAgainstSchema(f, xsdFile2);
          // for backwards compatibility, we also try to validate vs
          // 1.2.1 ODM 06/2008
          InputStreamReader isr = new InputStreamReader(new FileInputStream(f), "UTF-8");
          odmContainer = (ODMContainer) um1.unmarshal(isr);
        } catch (Exception me2) {
          // not sure if we want to report me2
          MessageFormat mf = new MessageFormat("");
          mf.applyPattern(respage.getString("your_xml_is_not_well_formed"));
          Object[] arguments = {me1.getMessage()};
          addPageMessage(mf.format(arguments));
          //
          // addPageMessage("Your XML is not well-formed, and does not
          // comply with the ODM 1.3 Schema. Please check it, and try
          // again. It returned the message: "
          // + me1.getMessage());
          // me1.printStackTrace();
          forwardPage(Page.IMPORT_CRF_DATA);
          // you can't really wait to forward because then you throw
          // NPEs
          // in the next few parts of the code
        }
      }
      // TODO need to output further here
      // 2.a. is the study the same one that the user is in right now?
      // 3. validates against study metadata
      // 3.a. is that study subject in that study?
      // 3.b. is that study event def in that study?
      // 3.c. is that site in that study?
      // 3.d. is that crf version in that study event def?
      // 3.e. are those item groups in that crf version?
      // 3.f. are those items in that item group?

      List<String> errors =
          getImportCRFDataService().validateStudyMetadata(odmContainer, ub.getActiveStudyId());
      if (errors != null) {
        // add to session
        // forward to another page
        logger.info(errors.toString());
        for (String error : errors) {
          addPageMessage(error);
        }
        if (errors.size() > 0) {
          // fail = true;
          forwardPage(Page.IMPORT_CRF_DATA);
        } else {
          addPageMessage(respage.getString("passed_study_check"));
          addPageMessage(respage.getString("passed_oid_metadata_check"));
        }
      }
      System.out.println("passed error check");
      // TODO ADD many validation steps before we get to the
      // session-setting below
      // 4. is the event in the correct status to accept data import?
      // -- scheduled, data entry started, completed
      // (and the event should already be created)
      // (and the event should be independent, ie not affected by other
      // events)

      List<EventCRFBean> eventCRFBeans =
          getImportCRFDataService().fetchEventCRFBeans(odmContainer, ub);

      ArrayList<Integer> permittedEventCRFIds = new ArrayList<Integer>();
      logger.info("found a list of eventCRFBeans: " + eventCRFBeans.toString());

      List<DisplayItemBeanWrapper> displayItemBeanWrappers =
          new ArrayList<DisplayItemBeanWrapper>();
      HashMap<String, String> totalValidationErrors = new HashMap<String, String>();
      HashMap<String, String> hardValidationErrors = new HashMap<String, String>();
      System.out.println("found event crfs " + eventCRFBeans.size());
      // -- does the event already exist? if not, fail
      if (!eventCRFBeans.isEmpty()) {
        for (EventCRFBean eventCRFBean : eventCRFBeans) {
          DataEntryStage dataEntryStage = eventCRFBean.getStage();
          Status eventCRFStatus = eventCRFBean.getStatus();

          logger.info(
              "Event CRF Bean: id "
                  + eventCRFBean.getId()
                  + ", data entry stage "
                  + dataEntryStage.getName()
                  + ", status "
                  + eventCRFStatus.getName());
          if (eventCRFStatus.equals(Status.AVAILABLE)
              || dataEntryStage.equals(DataEntryStage.INITIAL_DATA_ENTRY)
              || dataEntryStage.equals(DataEntryStage.INITIAL_DATA_ENTRY_COMPLETE)
              || dataEntryStage.equals(DataEntryStage.DOUBLE_DATA_ENTRY_COMPLETE)
              || dataEntryStage.equals(DataEntryStage.DOUBLE_DATA_ENTRY)) {
            // actually want the negative
            // was status == available and the stage questions, but
            // when you are at 'data entry complete' your status is
            // set to 'unavailable'.
            // >> tbh 09/2008
            // HOWEVER, when one event crf is removed and the rest
            // are good, what happens???
            // need to create a list and inform that one is blocked
            // and the rest are not...
            //
            permittedEventCRFIds.add(new Integer(eventCRFBean.getId()));
          } else {
            // fail = true;
            // addPageMessage(respage.getString(
            // "the_event_crf_not_correct_status"));
            // forwardPage(Page.IMPORT_CRF_DATA);
          }
        }

        // so that we don't repeat this following message
        // did we exclude all the event CRFs? if not, pass, else fail
        if (eventCRFBeans.size() >= permittedEventCRFIds.size()) {
          addPageMessage(respage.getString("passed_event_crf_status_check"));
        } else {
          fail = true;
          addPageMessage(respage.getString("the_event_crf_not_correct_status"));
        }
        // do they all have to have the right status to move
        // forward? answer from bug tracker = no
        // 5. do the items contain the correct data types?

        // 6. are all the related OIDs present?
        // that is to say, do we chain all the way down?
        // this is covered by the OID Metadata Check

        // 7. do the edit checks pass?
        // only then can we pass on to VERIFY_IMPORT_SERVLET

        // do we overwrite?

        // XmlParser xp = new XmlParser();
        // List<HashMap<String, String>> importedData =
        // xp.getData(f);

        // now we generate hard edit checks, and have to set that to the
        // screen. get that from the service, generate a summary bean to
        // set to either
        // page in the workflow, either verifyImport.jsp or import.jsp

        try {
          List<DisplayItemBeanWrapper> tempDisplayItemBeanWrappers =
              new ArrayList<DisplayItemBeanWrapper>();
          tempDisplayItemBeanWrappers =
              getImportCRFDataService()
                  .lookupValidationErrors(
                      request,
                      odmContainer,
                      ub,
                      totalValidationErrors,
                      hardValidationErrors,
                      permittedEventCRFIds);
          System.out.println(
              "generated display item bean wrappers " + tempDisplayItemBeanWrappers.size());
          System.out.println("size of total validation errors: " + totalValidationErrors.size());
          displayItemBeanWrappers.addAll(tempDisplayItemBeanWrappers);
        } catch (NullPointerException npe1) {
          // what if you have 2 event crfs but the third is a fake?
          fail = true;
          logger.debug("threw a NPE after calling lookup validation errors");
          addPageMessage(respage.getString("an_error_was_thrown_while_validation_errors"));
          System.out.println("threw the null pointer at line 323, import");
          // npe1.printStackTrace();
        } catch (OpenClinicaException oce1) {
          fail = true;
          logger.debug(
              "threw an OCE after calling lookup validation errors "
                  + oce1.getOpenClinicaMessage());
          addPageMessage(oce1.getOpenClinicaMessage());
          System.out.println("threw the openclinica message at line 327, import");
        }
      } else {
        fail = true;
        addPageMessage(respage.getString("no_event_crfs_matching_the_xml_metadata"));
      }
      // for (HashMap<String, String> crfData : importedData) {
      // DisplayItemBeanWrapper displayItemBeanWrapper =
      // testing(request,
      // crfData);
      // displayItemBeanWrappers.add(displayItemBeanWrapper);
      // errors = displayItemBeanWrapper.getValidationErrors();
      //
      // }
      if (fail) {
        System.out.println("failed here - forwarding...");
        forwardPage(Page.IMPORT_CRF_DATA);
      } else {
        addPageMessage(respage.getString("passing_crf_edit_checks"));
        session.setAttribute("importedData", displayItemBeanWrappers);
        session.setAttribute("validationErrors", totalValidationErrors);
        session.setAttribute("hardValidationErrors", hardValidationErrors);
        // above are updated 'statically' by the method that originally
        // generated the wrappers; soon the only thing we will use
        // wrappers for is the 'overwrite' flag

        System.out.println("found total validation errors: " + totalValidationErrors.size());
        logger.debug("+++ content of total validation errors: " + totalValidationErrors.toString());
        SummaryStatsBean ssBean =
            getImportCRFDataService()
                .generateSummaryStatsBean(odmContainer, displayItemBeanWrappers);
        session.setAttribute("summaryStats", ssBean);
        // will have to set hard edit checks here as well
        session.setAttribute(
            "subjectData", odmContainer.getCrfDataPostImportContainer().getSubjectData());
        System.out.println("did not fail - forwarding...");
        forwardPage(Page.VERIFY_IMPORT_SERVLET);
      }
    }
  }