Example #1
0
  public static Object read(Node node, String mappingFile) {
    Object object = null;

    InputStream mis = null;

    try {
      mis = JRLoader.getLocationInputStream(mappingFile);

      Mapping mapping = new Mapping();
      mapping.loadMapping(new InputSource(mis));

      object = read(node, mapping);
    } catch (JRException e) {
      throw new JRRuntimeException(e);
    } finally {
      if (mis != null) {
        try {
          mis.close();
        } catch (IOException e) {
        }
      }
    }

    return object;
  }
  private FileWriter handleLoadCastor(FileWriter writer, RulesPostImportContainer rpic) {

    try {
      // Create Mapping
      Mapping mapping = new Mapping();
      mapping.loadMapping(SpringServletAccess.getPropertiesDir(context) + "mappingMarshaller.xml");
      // Create XMLContext
      XMLContext xmlContext = new XMLContext();
      xmlContext.addMapping(mapping);

      Marshaller marshaller = xmlContext.createMarshaller();
      marshaller.setWriter(writer);
      marshaller.marshal(rpic);
      return writer;

    } catch (FileNotFoundException ex) {
      throw new OpenClinicaSystemException(ex.getMessage(), ex.getCause());
    } catch (IOException ex) {
      throw new OpenClinicaSystemException(ex.getMessage(), ex.getCause());
    } catch (MarshalException e) {
      throw new OpenClinicaSystemException(e.getMessage(), e.getCause());
    } catch (ValidationException e) {
      throw new OpenClinicaSystemException(e.getMessage(), e.getCause());
    } catch (MappingException e) {
      throw new OpenClinicaSystemException(e.getMessage(), e.getCause());
    } catch (Exception e) {
      throw new OpenClinicaSystemException(e.getMessage(), e.getCause());
    }
  }
Example #3
0
 public static Customer[] getAllCustomers() {
   Mapping mapping = new Mapping();
   try {
     mapping.loadMapping(
         new InputSource(
             Thread.currentThread()
                 .getContextClassLoader()
                 .getResource("mappings.xml")
                 .getFile()));
     Unmarshaller unmarshaller = new Unmarshaller(mapping);
     unmarshaller.setIgnoreExtraElements(true);
     CustomersContainer customersContainer =
         (CustomersContainer)
             unmarshaller.unmarshal(
                 new InputSource(
                     Thread.currentThread()
                         .getContextClassLoader()
                         .getResource("Customers.xml")
                         .getFile()));
     Customer[] customers = new Customer[customersContainer.getCustomers().size()];
     for (int i = 0; i < customersContainer.getCustomers().size(); i++) {
       customers[i] = customersContainer.getCustomers().get(i);
     }
     return customers;
   } catch (Exception e) {
     throw new RuntimeException(e);
   }
 }
Example #4
0
 protected static Mapping loadMappingFromString(
     String mappingLocation, String mappingContents, EntityResolver resolver) {
   InputSource mappIS = new org.xml.sax.InputSource(new StringReader(mappingContents));
   Mapping mapping = new Mapping();
   mapping.setEntityResolver(resolver);
   try {
     mapping.loadMapping(mappIS);
   } catch (Exception ex) {
     LOG.error("Error loading castor mapping (" + mappingLocation + "): " + ex.getMessage(), ex);
   }
   return mapping;
 }
 /**
  * Creates castor object using mapping configuration and mock xml file
  *
  * @param path Path to mock xml file
  * @return Generated object from xml file
  */
 private static Object createCastorObject(String path) {
   Object resultObject = null;
   try {
     Mapping mapping = new Mapping();
     mapping.loadMapping(CASTOR_MAPPING_FILE_PATH);
     Unmarshaller unmarshaller = new Unmarshaller(mapping);
     resultObject = unmarshaller.unmarshal(new InputSource(new FileReader(path)));
   } catch (MarshalException exception) {
     LOGGER.error(exception);
   } catch (ValidationException exception) {
     LOGGER.error(exception);
   } catch (MappingException exception) {
     LOGGER.error(exception);
   } catch (IOException exception) {
     LOGGER.error(exception);
   }
   return resultObject;
 }
  public WasabiNetClient(String url) {
    Mapping mapping = new Mapping();
    try {
      mapping.loadMapping(getClass().getResource("/META-INF/castor-mapping.xml").toURI().toURL());
      context = new XMLContext();
      context.addMapping(mapping);

      this.marshaller = context.createMarshaller();
      this.unmarshaller = context.createUnmarshaller();

      this.serviceUrl = url;
    } catch (URISyntaxException ex) {
      Logger.getLogger(WasabiNetClient.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
      Logger.getLogger(WasabiNetClient.class.getName()).log(Level.SEVERE, null, ex);
    } catch (MappingException ex) {
      Logger.getLogger(WasabiNetClient.class.getName()).log(Level.SEVERE, null, ex);
    }
  }
  private static Unmarshaller getUnmarshaller(String configFile, Class<?> classToUse) {
    //		String configFile = configFilesMap.get(classToUse);
    InputSource inputSource =
        new InputSource(RedmineXMLParser.class.getResourceAsStream(configFile));
    ClassLoader cl = RedmineXMLParser.class.getClassLoader();
    // Note: Castor XML is packed in a separate OSGI bundle, so
    // must set the classloader so that Castor will see our classes
    Mapping mapping = new Mapping(cl);
    mapping.loadMapping(inputSource);

    Unmarshaller unmarshaller;
    try {
      unmarshaller = new Unmarshaller(mapping);
    } catch (MappingException e) {
      throw new RuntimeException(e);
    }
    unmarshaller.setClass(classToUse);
    unmarshaller.setWhitespacePreserve(true);
    return unmarshaller;
  }
 public static synchronized Mapping getMapping() {
   try {
     if (mapping == null) {
       final URL url = CastorWriter.class.getResource("/CastorMappings.xml");
       mapping = new Mapping();
       mapping.loadMapping(url);
     }
     return mapping;
   } catch (Exception e) {
     throw new RuntimeException(e);
   }
 }
Example #9
0
  public static void write(Object object, String mappingFile, Writer writer) {
    InputStream mis = null;

    try {
      mis = JRLoader.getLocationInputStream(mappingFile);

      Mapping mapping = new Mapping();
      mapping.loadMapping(new InputSource(mis));

      write(object, mapping, writer);
    } catch (JRException e) {
      throw new JRRuntimeException(e);
    } finally {
      if (mis != null) {
        try {
          mis.close();
        } catch (IOException e) {
        }
      }
    }
  }
 /** @return default mapping file being used for xml serialziation/deserialization */
 public Mapping getMapping() throws XMLUtilityException {
   /* if no mapping file explicity specified then load the default */
   log.debug("mapping is null? " + (mapping == null));
   if (mapping == null) {
     log.info("mapping is null; will try to load it");
     try {
       EntityResolver resolver =
           new EntityResolver() {
             public InputSource resolveEntity(String publicId, String systemId) {
               if (publicId.equals("-//EXOLAB/Castor Object Mapping DTD Version 1.0//EN")) {
                 InputStream in =
                     Thread.currentThread()
                         .getContextClassLoader()
                         .getResourceAsStream("mapping.dtd");
                 return new InputSource(in);
               }
               return null;
             }
           };
       org.xml.sax.InputSource mappIS =
           new org.xml.sax.InputSource(
               Thread.currentThread()
                   .getContextClassLoader()
                   .getResourceAsStream(loadProperty(this.PROPERTIES_MAPPING_KEY)));
       Mapping localMapping = new Mapping();
       localMapping.setEntityResolver(resolver);
       localMapping.loadMapping(mappIS);
       return localMapping;
     } catch (IOException e) {
       log.error(
           "Error reading default xml mapping file ",
           e); // To change body of catch statement use File | Settings | File Templates.
       throw new XMLUtilityException("Error reading default xml mapping file ", e);
     }
   }
   return mapping;
 }
  @Override
  public void processRequest() throws Exception {
    resetPanel();
    panel.setStudyInfoShown(false);
    panel.setOrderedData(true);

    FormProcessor fp = new FormProcessor(request);
    // checks which module the requests are from
    String module = fp.getString(MODULE);
    // keep the module in the session
    session.setAttribute(MODULE, module);

    String action = request.getParameter("action");
    CRFVersionBean version = (CRFVersionBean) session.getAttribute("version");

    File xsdFile = new File(SpringServletAccess.getPropertiesDir(context) + "ODM1-3-0.xsd");
    File xsdFile2 = new File(SpringServletAccess.getPropertiesDir(context) + "ODM1-2-1.xsd");

    if (StringUtil.isBlank(action)) {
      logger.info("action is blank");
      request.setAttribute("version", version);
      forwardPage(Page.IMPORT_CRF_DATA);
    }
    if ("confirm".equalsIgnoreCase(action)) {
      String dir = SQLInitServlet.getField("filePath");
      if (!(new File(dir)).exists()) {
        logger.info("The filePath in datainfo.properties is invalid " + dir);
        addPageMessage(respage.getString("filepath_you_defined_not_seem_valid"));
        forwardPage(Page.IMPORT_CRF_DATA);
      }
      // All the uploaded files will be saved in filePath/crf/original/
      String theDir = dir + "crf" + File.separator + "original" + File.separator;
      if (!(new File(theDir)).isDirectory()) {
        (new File(theDir)).mkdirs();
        logger.info("Made the directory " + theDir);
      }
      // MultipartRequest multi = new MultipartRequest(request, theDir, 50 * 1024 * 1024);
      File f = null;
      try {
        f = uploadFile(theDir, version);

      } catch (Exception e) {
        logger.warn("*** Found exception during file upload***");
        e.printStackTrace();
      }
      if (f == null) {
        forwardPage(Page.IMPORT_CRF_DATA);
      }

      // TODO
      // validation steps
      // 1. valid xml - validated by file uploader below

      // LocalConfiguration config = LocalConfiguration.getInstance();
      // config.getProperties().setProperty(
      // "org.exolab.castor.parser.namespaces",
      // "true");
      // config
      // .getProperties()
      // .setProperty("org.exolab.castor.sax.features",
      // "http://xml.org/sax/features/validation,
      // http://apache.org/xml/features/validation/schema,
      // http://apache.org/xml/features/validation/schema-full-checking");
      // // above sets to validate against namespace

      Mapping myMap = new Mapping();
      String propertiesPath = CoreResources.PROPERTIES_DIR;
      myMap.loadMapping(propertiesPath + File.separator + "cd_odm_mapping.xml");

      Unmarshaller um1 = new Unmarshaller(myMap);
      // um1.addNamespaceToPackageMapping("http://www.cdisc.org/ns/odm/v1.3"
      // ,
      // "ODMContainer");
      boolean fail = false;
      ODMContainer odmContainer = new ODMContainer();
      try {

        // schemaValidator.validateAgainstSchema(f, xsdFile);
        // utf-8 compliance, tbh 06/2009
        InputStreamReader isr = new InputStreamReader(new FileInputStream(f), "UTF-8");
        odmContainer = (ODMContainer) um1.unmarshal(isr);

        System.out.println(
            "Found crf data container for study oid: "
                + odmContainer.getCrfDataPostImportContainer().getStudyOID());
        System.out.println(
            "found length of subject list: "
                + odmContainer.getCrfDataPostImportContainer().getSubjectData().size());
        // 2. validates against ODM 1.3
        // check it all below, throw an exception and route to a
        // different
        // page if not working

        // TODO this block of code needs the xerces serializer in order
        // to
        // work

        // StringWriter myWriter = new StringWriter();
        // Marshaller m1 = new Marshaller(myWriter);
        //
        // m1.setProperty("org.exolab.castor.parser.namespaces",
        // "true");
        // m1
        // .setProperty("org.exolab.castor.sax.features",
        // "http://xml.org/sax/features/validation,
        // http://apache.org/xml/features/validation/schema,
        // http://apache.org/xml/features/validation/schema-full-checking
        // ");
        //
        // m1.setMapping(myMap);
        // m1.setNamespaceMapping("",
        // "http://www.cdisc.org/ns/odm/v1.3");
        // m1.setSchemaLocation("http://www.cdisc.org/ns/odm/v1.3
        // ODM1-3.xsd");
        // m1.marshal(odmContainer);
        // if you havent thrown it, you wont throw it here
        addPageMessage(respage.getString("passed_xml_validation"));
      } catch (Exception me1) {
        me1.printStackTrace();
        // expanding it to all exceptions, but hoping to catch Marshal
        // Exception or SAX Exceptions
        logger.info("found exception with xml transform");
        //
        logger.info("trying 1.2.1");
        try {
          schemaValidator.validateAgainstSchema(f, xsdFile2);
          // for backwards compatibility, we also try to validate vs
          // 1.2.1 ODM 06/2008
          InputStreamReader isr = new InputStreamReader(new FileInputStream(f), "UTF-8");
          odmContainer = (ODMContainer) um1.unmarshal(isr);
        } catch (Exception me2) {
          // not sure if we want to report me2
          MessageFormat mf = new MessageFormat("");
          mf.applyPattern(respage.getString("your_xml_is_not_well_formed"));
          Object[] arguments = {me1.getMessage()};
          addPageMessage(mf.format(arguments));
          //
          // addPageMessage("Your XML is not well-formed, and does not
          // comply with the ODM 1.3 Schema. Please check it, and try
          // again. It returned the message: "
          // + me1.getMessage());
          // me1.printStackTrace();
          forwardPage(Page.IMPORT_CRF_DATA);
          // you can't really wait to forward because then you throw
          // NPEs
          // in the next few parts of the code
        }
      }
      // TODO need to output further here
      // 2.a. is the study the same one that the user is in right now?
      // 3. validates against study metadata
      // 3.a. is that study subject in that study?
      // 3.b. is that study event def in that study?
      // 3.c. is that site in that study?
      // 3.d. is that crf version in that study event def?
      // 3.e. are those item groups in that crf version?
      // 3.f. are those items in that item group?

      List<String> errors =
          getImportCRFDataService().validateStudyMetadata(odmContainer, ub.getActiveStudyId());
      if (errors != null) {
        // add to session
        // forward to another page
        logger.info(errors.toString());
        for (String error : errors) {
          addPageMessage(error);
        }
        if (errors.size() > 0) {
          // fail = true;
          forwardPage(Page.IMPORT_CRF_DATA);
        } else {
          addPageMessage(respage.getString("passed_study_check"));
          addPageMessage(respage.getString("passed_oid_metadata_check"));
        }
      }
      System.out.println("passed error check");
      // TODO ADD many validation steps before we get to the
      // session-setting below
      // 4. is the event in the correct status to accept data import?
      // -- scheduled, data entry started, completed
      // (and the event should already be created)
      // (and the event should be independent, ie not affected by other
      // events)

      List<EventCRFBean> eventCRFBeans =
          getImportCRFDataService().fetchEventCRFBeans(odmContainer, ub);

      ArrayList<Integer> permittedEventCRFIds = new ArrayList<Integer>();
      logger.info("found a list of eventCRFBeans: " + eventCRFBeans.toString());

      List<DisplayItemBeanWrapper> displayItemBeanWrappers =
          new ArrayList<DisplayItemBeanWrapper>();
      HashMap<String, String> totalValidationErrors = new HashMap<String, String>();
      HashMap<String, String> hardValidationErrors = new HashMap<String, String>();
      System.out.println("found event crfs " + eventCRFBeans.size());
      // -- does the event already exist? if not, fail
      if (!eventCRFBeans.isEmpty()) {
        for (EventCRFBean eventCRFBean : eventCRFBeans) {
          DataEntryStage dataEntryStage = eventCRFBean.getStage();
          Status eventCRFStatus = eventCRFBean.getStatus();

          logger.info(
              "Event CRF Bean: id "
                  + eventCRFBean.getId()
                  + ", data entry stage "
                  + dataEntryStage.getName()
                  + ", status "
                  + eventCRFStatus.getName());
          if (eventCRFStatus.equals(Status.AVAILABLE)
              || dataEntryStage.equals(DataEntryStage.INITIAL_DATA_ENTRY)
              || dataEntryStage.equals(DataEntryStage.INITIAL_DATA_ENTRY_COMPLETE)
              || dataEntryStage.equals(DataEntryStage.DOUBLE_DATA_ENTRY_COMPLETE)
              || dataEntryStage.equals(DataEntryStage.DOUBLE_DATA_ENTRY)) {
            // actually want the negative
            // was status == available and the stage questions, but
            // when you are at 'data entry complete' your status is
            // set to 'unavailable'.
            // >> tbh 09/2008
            // HOWEVER, when one event crf is removed and the rest
            // are good, what happens???
            // need to create a list and inform that one is blocked
            // and the rest are not...
            //
            permittedEventCRFIds.add(new Integer(eventCRFBean.getId()));
          } else {
            // fail = true;
            // addPageMessage(respage.getString(
            // "the_event_crf_not_correct_status"));
            // forwardPage(Page.IMPORT_CRF_DATA);
          }
        }

        // so that we don't repeat this following message
        // did we exclude all the event CRFs? if not, pass, else fail
        if (eventCRFBeans.size() >= permittedEventCRFIds.size()) {
          addPageMessage(respage.getString("passed_event_crf_status_check"));
        } else {
          fail = true;
          addPageMessage(respage.getString("the_event_crf_not_correct_status"));
        }
        // do they all have to have the right status to move
        // forward? answer from bug tracker = no
        // 5. do the items contain the correct data types?

        // 6. are all the related OIDs present?
        // that is to say, do we chain all the way down?
        // this is covered by the OID Metadata Check

        // 7. do the edit checks pass?
        // only then can we pass on to VERIFY_IMPORT_SERVLET

        // do we overwrite?

        // XmlParser xp = new XmlParser();
        // List<HashMap<String, String>> importedData =
        // xp.getData(f);

        // now we generate hard edit checks, and have to set that to the
        // screen. get that from the service, generate a summary bean to
        // set to either
        // page in the workflow, either verifyImport.jsp or import.jsp

        try {
          List<DisplayItemBeanWrapper> tempDisplayItemBeanWrappers =
              new ArrayList<DisplayItemBeanWrapper>();
          tempDisplayItemBeanWrappers =
              getImportCRFDataService()
                  .lookupValidationErrors(
                      request,
                      odmContainer,
                      ub,
                      totalValidationErrors,
                      hardValidationErrors,
                      permittedEventCRFIds);
          System.out.println(
              "generated display item bean wrappers " + tempDisplayItemBeanWrappers.size());
          System.out.println("size of total validation errors: " + totalValidationErrors.size());
          displayItemBeanWrappers.addAll(tempDisplayItemBeanWrappers);
        } catch (NullPointerException npe1) {
          // what if you have 2 event crfs but the third is a fake?
          fail = true;
          logger.debug("threw a NPE after calling lookup validation errors");
          addPageMessage(respage.getString("an_error_was_thrown_while_validation_errors"));
          System.out.println("threw the null pointer at line 323, import");
          // npe1.printStackTrace();
        } catch (OpenClinicaException oce1) {
          fail = true;
          logger.debug(
              "threw an OCE after calling lookup validation errors "
                  + oce1.getOpenClinicaMessage());
          addPageMessage(oce1.getOpenClinicaMessage());
          System.out.println("threw the openclinica message at line 327, import");
        }
      } else {
        fail = true;
        addPageMessage(respage.getString("no_event_crfs_matching_the_xml_metadata"));
      }
      // for (HashMap<String, String> crfData : importedData) {
      // DisplayItemBeanWrapper displayItemBeanWrapper =
      // testing(request,
      // crfData);
      // displayItemBeanWrappers.add(displayItemBeanWrapper);
      // errors = displayItemBeanWrapper.getValidationErrors();
      //
      // }
      if (fail) {
        System.out.println("failed here - forwarding...");
        forwardPage(Page.IMPORT_CRF_DATA);
      } else {
        addPageMessage(respage.getString("passing_crf_edit_checks"));
        session.setAttribute("importedData", displayItemBeanWrappers);
        session.setAttribute("validationErrors", totalValidationErrors);
        session.setAttribute("hardValidationErrors", hardValidationErrors);
        // above are updated 'statically' by the method that originally
        // generated the wrappers; soon the only thing we will use
        // wrappers for is the 'overwrite' flag

        System.out.println("found total validation errors: " + totalValidationErrors.size());
        logger.debug("+++ content of total validation errors: " + totalValidationErrors.toString());
        SummaryStatsBean ssBean =
            getImportCRFDataService()
                .generateSummaryStatsBean(odmContainer, displayItemBeanWrappers);
        session.setAttribute("summaryStats", ssBean);
        // will have to set hard edit checks here as well
        session.setAttribute(
            "subjectData", odmContainer.getCrfDataPostImportContainer().getSubjectData());
        System.out.println("did not fail - forwarding...");
        forwardPage(Page.VERIFY_IMPORT_SERVLET);
      }
    }
  }
  /**
   * @api {get} /rest2/openrosa/:studyOID/manifest Get Form Manifest
   * @apiName getManifest
   * @apiPermission admin
   * @apiVersion 1.0.0
   * @apiParam {String} studyOID Study Oid.
   * @apiGroup Form
   * @apiDescription Gets additional information on a particular Form, including links to associated
   *     media.
   */
  @GET
  @Path("/{studyOID}/manifest")
  @Produces(MediaType.TEXT_XML)
  public String getManifest(
      @Context HttpServletRequest request,
      @Context HttpServletResponse response,
      @PathParam("studyOID") String studyOID,
      @QueryParam("formId") String crfOID,
      @RequestHeader("Authorization") String authorization,
      @Context ServletContext context)
      throws Exception {
    if (!mayProceedPreview(studyOID)) return null;

    CRFVersionDAO cVersionDao = new CRFVersionDAO(getDataSource());
    CrfVersionMediaDao mediaDao =
        (CrfVersionMediaDao)
            SpringServletAccess.getApplicationContext(context).getBean("crfVersionMediaDao");

    CRFVersionBean crfVersion = cVersionDao.findByOid(crfOID);
    List<MediaFile> mediaFiles = new ArrayList<MediaFile>();
    Manifest manifest = new Manifest();

    List<CrfVersionMedia> mediaList = mediaDao.findByCrfVersionId(crfVersion.getId());
    if (mediaList != null && mediaList.size() > 0) {
      for (CrfVersionMedia media : mediaList) {
        Calendar cal = Calendar.getInstance();
        cal.setTime(new Date());
        String urlBase =
            getCoreResources().getDataInfo().getProperty("sysURL").split("/MainMenu")[0];

        MediaFile mediaFile = new MediaFile();
        mediaFile.setFilename(media.getName());
        mediaFile.setHash(DigestUtils.md5Hex(String.valueOf(cal.getTimeInMillis())));
        mediaFile.setDownloadUrl(
            urlBase
                + "/rest2/openrosa/"
                + studyOID
                + "/downloadMedia?crfVersionMediaId="
                + media.getCrfVersionMediaId());
        manifest.add(mediaFile);
      }
    }
    try {
      // Create the XML manifest using a Castor mapping file.
      XMLContext xmlContext = new XMLContext();
      Mapping mapping = xmlContext.createMapping();
      mapping.loadMapping(getCoreResources().getURL("openRosaManifestMapping.xml"));
      xmlContext.addMapping(mapping);

      Marshaller marshaller = xmlContext.createMarshaller();
      StringWriter writer = new StringWriter();
      marshaller.setWriter(writer);
      marshaller.marshal(manifest);

      // Set response headers
      Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT"));
      Date currentDate = new Date();
      cal.setTime(currentDate);
      SimpleDateFormat format = new SimpleDateFormat("E, dd MMM yyyy HH:mm:ss zz");
      format.setCalendar(cal);
      response.setHeader("Content-Type", "text/xml; charset=UTF-8");
      response.setHeader("Date", format.format(currentDate));
      response.setHeader("X-OpenRosa-Version", "1.0");
      return writer.toString();
    } catch (Exception e) {
      LOGGER.error(e.getMessage());
      LOGGER.error(ExceptionUtils.getStackTrace(e));
      return "<Error>" + e.getMessage() + "</Error>";
    }
  }
  /**
   * @api {get} /rest2/openrosa/:studyOID/formList Get Form List
   * @apiName getFormList
   * @apiPermission admin
   * @apiVersion 1.0.0
   * @apiParam {String} studyOID Study Oid.
   * @apiGroup Form
   * @apiDescription Retrieves a listing of the available OpenClinica forms.
   * @apiParamExample {json} Request-Example: { "studyOid": "S_SAMPLTE", }
   * @apiSuccessExample {xml} Success-Response: HTTP/1.1 200 OK { <xforms
   *     xmlns="http://openrosa.org/xforms/xformsList"> <xform> <formID>F_FIRSTFORM_1</formID>
   *     <name>First Form</name> <majorMinorVersion>1</majorMinorVersion> <version>1</version>
   *     <hash>8678370cd92814d4e3216d58d821403f</hash>
   *     <downloadUrl>http://oc1.openclinica.com/OpenClinica-web/rest2/openrosa/S_SAMPLTE/formXml?
   *     formId=F_FIRSTFORM_1</downloadUrl> </xform> <xform> <formID>F_SECONDFORM_1</formID>
   *     <name>Second Form</name> <majorMinorVersion>1</majorMinorVersion> <version>1</version>
   *     <hash>7ee60d1c6516b730bbe9bdbd7cad942f</hash>
   *     <downloadUrl>http://oc1.openclinica.com/OpenClinica-web/rest2/openrosa/S_SAMPLTE/formXml?
   *     formId=F_SECONDFORM_1</downloadUrl> </xform> </xforms>
   */
  @GET
  @Path("/{studyOID}/formList")
  @Produces(MediaType.TEXT_XML)
  public String getFormList(
      @Context HttpServletRequest request,
      @Context HttpServletResponse response,
      @PathParam("studyOID") String studyOID,
      @QueryParam("formID") String crfOID,
      @RequestHeader("Authorization") String authorization,
      @Context ServletContext context)
      throws Exception {
    if (!mayProceedPreview(studyOID)) return null;

    StudyDAO sdao = new StudyDAO(getDataSource());
    StudyBean study = sdao.findByOid(studyOID);

    CRFDAO cdao = new CRFDAO(getDataSource());
    Collection<CRFBean> crfs = cdao.findAll();

    CRFVersionDAO cVersionDao = new CRFVersionDAO(getDataSource());
    Collection<CRFVersionBean> crfVersions = cVersionDao.findAll();

    CrfVersionMediaDao mediaDao =
        (CrfVersionMediaDao)
            SpringServletAccess.getApplicationContext(context).getBean("crfVersionMediaDao");

    try {
      XFormList formList = new XFormList();
      for (CRFBean crf : crfs) {
        for (CRFVersionBean version : crfVersions) {
          if (version.getCrfId() == crf.getId()) {
            XForm form = new XForm(crf, version);
            // TODO: Need to generate hash based on contents of
            // XForm. Will be done in a later story.
            // TODO: For now all XForms get a date based hash to
            // trick Enketo into always downloading
            // TODO: them.
            Calendar cal = Calendar.getInstance();
            cal.setTime(new Date());
            form.setHash(DigestUtils.md5Hex(String.valueOf(cal.getTimeInMillis())));

            String urlBase =
                getCoreResources().getDataInfo().getProperty("sysURL").split("/MainMenu")[0];
            form.setDownloadURL(
                urlBase + "/rest2/openrosa/" + studyOID + "/formXml?formId=" + version.getOid());

            List<CrfVersionMedia> mediaList = mediaDao.findByCrfVersionId(version.getId());
            if (mediaList != null && mediaList.size() > 0) {
              form.setManifestURL(
                  urlBase + "/rest2/openrosa/" + studyOID + "/manifest?formId=" + version.getOid());
            }
            formList.add(form);
          }
        }
      }

      // Create the XML formList using a Castor mapping file.
      XMLContext xmlContext = new XMLContext();
      Mapping mapping = xmlContext.createMapping();
      mapping.loadMapping(getCoreResources().getURL("openRosaFormListMapping.xml"));
      xmlContext.addMapping(mapping);

      Marshaller marshaller = xmlContext.createMarshaller();
      StringWriter writer = new StringWriter();
      marshaller.setWriter(writer);
      marshaller.marshal(formList);

      // Set response headers
      Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT"));
      Date currentDate = new Date();
      cal.setTime(currentDate);
      SimpleDateFormat format = new SimpleDateFormat("E, dd MMM yyyy HH:mm:ss zz");
      format.setCalendar(cal);
      response.setHeader("Content-Type", "text/xml; charset=UTF-8");
      response.setHeader("Date", format.format(currentDate));
      response.setHeader("X-OpenRosa-Version", "1.0");
      return writer.toString();
    } catch (Exception e) {
      LOGGER.error(e.getMessage());
      LOGGER.error(ExceptionUtils.getStackTrace(e));
      return "<Error>" + e.getMessage() + "</Error>";
    }
  }