public void copyInto(WireFeed feed, SyndFeed syndFeed) {
    Channel channel = (Channel) feed;
    super.copyInto(channel, syndFeed);
    syndFeed.setLanguage(channel.getLanguage()); // c
    syndFeed.setCopyright(channel.getCopyright()); // c
    Date pubDate = channel.getPubDate();
    if (pubDate != null) {
      syndFeed.setPublishedDate(pubDate); // c
    } else if (channel.getLastBuildDate() != null) {
      syndFeed.setPublishedDate(channel.getLastBuildDate()); // c
    }

    String author = channel.getManagingEditor();
    if (author != null) {
      List creators = ((DCModule) syndFeed.getModule(DCModule.URI)).getCreators();
      if (!creators.contains(author)) {
        Set s = new HashSet(); // using a set to remove duplicates
        s.addAll(creators); // DC creators
        s.add(author); // feed native author
        creators.clear();
        creators.addAll(s);
      }
    }
  }
Example #2
0
  /**
   * Imports patient records from remote pc.
   *
   * @param conn
   * @param urlString - url of feed
   * @param view - if not 0, simply parses rss feed and provides output.
   * @param comments - messages generated during import process
   * @param start - indicates if this is the first time this feed has been imported.
   * @param siteId - id of site being imported.
   * @param reload
   * @param refreshPatientZeprsId
   * @return status message
   * @throws Exception
   */
  public static String importFeed(
      Connection conn,
      String urlString,
      Boolean view,
      StringBuffer comments,
      Long start,
      Long siteId,
      Long reload,
      String refreshPatientZeprsId)
      throws Exception, ConnectException, NoRouteToHostException {
    // download the rss file from the remote server
    URL feedUrl = new URL(urlString);
    HttpURLConnection httpcon = (HttpURLConnection) feedUrl.openConnection();
    boolean isProxied = httpcon.usingProxy();
    if (isProxied) {
      log.debug(" Proxied: " + isProxied);
    }
    // BASE64Encoder is Sun proprietary encoder. Use Apache commons Base64 codec instead.
    String credentials = new sun.misc.BASE64Encoder().encode("zepadmin:zepadmin11".getBytes());
    /*String encodeString = "zepadmin:zepadmin11";
    Base64 encoder = new Base64();
    String encoding = String.valueOf(encoder.encode(encodeString.getBytes()));*/
    httpcon.setRequestProperty("Authorization", "Basic " + credentials);

    final WireFeedInput input = new WireFeedInput();
    WireFeed rssFeed = null;
    XmlReader reader = null;
    String message = null;
    String siteAbbrev = null;
    Channel channel = null;
    java.util.Date lastBuildDate = null;

    Site site = (Site) DynaSiteObjects.getClinicMap().get(siteId);
    siteAbbrev = site.getAbbreviation();

    try {
      String importFilePath =
          org.cidrz.webapp.dynasite.Constants.ARCHIVE_PATH
              + siteAbbrev
              + Constants.pathSep
              + "import"
              + Constants.pathSep;
      // Save the feed to the local filesystem. This will be used to help the master create its own
      // feed for this site.
      BufferedInputStream in = null;
      in = new BufferedInputStream(httpcon.getInputStream());
      String importedRssPath = importFilePath + "rss.xml";
      Writer writer = new FileWriter(importedRssPath);
      for (int c = in.read(); c != -1; c = in.read()) {
        writer.write(c);
      }
      writer.close();

      // make a hashmap of zeprs id's:last_modified from database
      HashMap<String, Timestamp> localPatientMap = new HashMap<String, Timestamp>();
      ResultSet updatedPatients = PatientDAO.getAllRS(conn);
      while (updatedPatients.next()) {
        String zeprsId = updatedPatients.getString("district_patient_id");
        Timestamp lastModified = updatedPatients.getTimestamp("last_modified");
        // Long site_id = updatedPatients.getLong("site_id");
        String fileName = zeprsId + ".xml";
        localPatientMap.put(fileName, lastModified);
      }
      int changes = 0;
      // check what site this pc is publishing
      Long publisherSiteId = null;
      Publisher publisher = null;
      String publisherFile = Constants.ARCHIVE_PATH + "publisher.xml";
      try {
        publisher = (Publisher) XmlUtils.getOne(publisherFile);
        publisherSiteId = publisher.getSiteId();
      } catch (FileNotFoundException e) {
        // it's ok - file not created yet.
      }
      boolean process = true;
      Timestamp buildDate = null;
      try {
        UpdateLog updateLog = (UpdateLog) UpdateLogDAO.getLastUpdate(conn, siteAbbrev);
        buildDate = updateLog.getBuilddate();
      } catch (SQLException e) {
        e.printStackTrace();
      } catch (ServletException e) {
        e.printStackTrace();
      } catch (ObjectNotFoundException e) {
        // log.debug("No entries in updateLog - this must be a new site subscription for " +
        // siteAbbrev + ". Forcing import of records for this site. ");
        log.debug(
            "No entries in updateLog - this must be a new site subscription for "
                + siteAbbrev
                + ". ");
        // cekelley May 28, 2009 - disabled forced import (reload) of records. Unnecessary and
        // time-consuming.
        // reload = Long.valueOf("1");
        comments.append("Subscribed to site: " + siteAbbrev + ". ");
      }

      File importedRssFile = new File(importedRssPath);
      reader = new XmlReader(importedRssFile);
      rssFeed = input.build(reader);
      rssFeed.setFeedType("rss_2.0");
      channel = (Channel) rssFeed;
      List items = channel.getItems();
      siteAbbrev = channel.getTitle();
      lastBuildDate = channel.getLastBuildDate();
      String previousSiteAbbrev = null;

      for (int i = 0; i < items.size(); i++) {
        process = true;
        Item item = (Item) items.get(i);
        String link = item.getLink();
        String fileName = item.getTitle();
        Long clinicHomeId = null;
        if (item.getAuthor() != null) {
          clinicHomeId = Long.valueOf(item.getAuthor());
        }
        if (publisherSiteId != null
            && (publisherSiteId.longValue() != clinicHomeId.longValue())
            && !publisher.getSite().getSiteAlphaId().equals("MST")) {
          process = false;
        } else if (publisher.getSite().getSiteAlphaId().equals("MST")) {
          process = true; // MST site will update no matter the sites conti
        }
        java.util.Date pubDate = item.getPubDate();
        Timestamp lastModifiedTimestamp = localPatientMap.get(fileName);
        java.util.Date currentLastModified = null;
        if (lastModifiedTimestamp != null) {
          currentLastModified = DateUtils.toDate(lastModifiedTimestamp);
        }

        if ((DynaSiteObjects.getStatusMap().get("Halt-Feed-Imports") != null)
            && (DynaSiteObjects.getStatusMap().get("Halt-Feed-Imports").equals("true"))) {
          log.debug("Halting Feed Imports: Halt-Feed-Imports = true");
          break;
        }
        site = (Site) DynaSiteObjects.getClinicMap().get(clinicHomeId);
        try {
          siteAbbrev = site.getAbbreviation();
          previousSiteAbbrev = siteAbbrev;
        } catch (NullPointerException e) {
          log.debug(
              "Site not found for " + fileName + " Setting siteAbbrev to " + previousSiteAbbrev);
        }

        URL patientUrl = new URL(link);
        //  if (view == 0) {
        if (process) {
          // If imported record's pubDate > most recent buildDate, process. Previously we only
          // checked if
          // currentLastModified != pubdate, but that would cause every out-of-date record to be
          // imported.
          if (refreshPatientZeprsId != null
              && (refreshPatientZeprsId.concat(".xml").equals(fileName))) {
            try {
              importPatientRecord(
                  currentLastModified,
                  fileName,
                  pubDate,
                  importFilePath,
                  patientUrl,
                  credentials,
                  conn,
                  comments,
                  Long.valueOf(1),
                  siteAbbrev,
                  view);
            } catch (Exception e) {
              log.debug(e);
            }
          } else {
            if ((reload != null)
                || (view)
                || (currentLastModified == null)
                || (buildDate == null)
                || (buildDate != null && pubDate.getTime() > buildDate.getTime())) {
              changes++;
              try {
                importPatientRecord(
                    currentLastModified,
                    fileName,
                    pubDate,
                    importFilePath,
                    patientUrl,
                    credentials,
                    conn,
                    comments,
                    reload,
                    siteAbbrev,
                    view);
              } catch (Exception e) {
                log.debug(e);
                e.printStackTrace();
              }
            }
          }
        }
        // }
      }
    } catch (SocketException e1) {
      message = "Unable to connect to host for feed: " + urlString;
      log.debug(message);
    }

    if (!view) {
      if (comments.toString().equals("")) {
        message = "No records to update.";
      } else {
        Timestamp updated = new Timestamp(System.currentTimeMillis());
        // reset siteAbbrev to channel
        siteAbbrev = channel.getTitle();
        String commentsString = null;
        if (comments.toString().length() > 500) {
          commentsString = comments.toString().substring(0, 500);
        } else {
          commentsString = comments.toString();
        }
        UpdateLogDAO.save(
            conn, updated, new Timestamp(lastBuildDate.getTime()), siteAbbrev, commentsString);
        Subscription subscription = (Subscription) SubscriptionDAO.getOne(conn, urlString);
        // this is getting imported for the first time either manually or via a scheduled job.
        if (start != null || subscription.getDatesubscribed() == null) {
          Timestamp datesubscribed = new Timestamp(System.currentTimeMillis());
          SubscriptionDAO.updateSite(conn, subscription.getId(), siteAbbrev, datesubscribed);
        }
        // log.debug(comments);
      }
    }
    return message;
  }