Ejemplo n.º 1
0
 public String findLeastFullRepository(Map<String, PlatformUtil.DF> repoMap) {
   String mostFree = null;
   for (String repo : repoMap.keySet()) {
     PlatformUtil.DF df = repoMap.get(repo);
     if (df != null) {
       if (mostFree == null || (repoMap.get(mostFree)).getAvail() < df.getAvail()) {
         mostFree = repo;
       }
     }
   }
   return mostFree;
 }
  /*
   * When testing no-pdf-check basic XML parsing, you will get partial MD records
   * depending on whether the info comes from dataset.xml or from main.xml
   */
  private void validateDatasetMetadataRecord(ArticleMetadata am) {
    log.debug3("valideDatasetMetadatRecord");
    String doi_val = am.get(MetadataField.FIELD_DOI);
    assertEquals(common_issn, am.get(MetadataField.FIELD_ISSN));

    log.debug3("doi val is: " + doi_val);
    // The dataset doesn't set this value, it'll fail over the main.xml value
    if (doi_val.equals("10.1016/S0140-1111(14)61865-1")) {
      assertEquals(null, am.get(MetadataField.FIELD_DATE));
    } else {
      assertEquals(dateMap.get(doi_val), am.get(MetadataField.FIELD_DATE));
    }
    assertEquals(pubTitleMap.get(doi_val), am.get(MetadataField.FIELD_PUBLICATION_TITLE));
  }
  /** Create LockssKeystores from config subtree below {@link #PARAM_KEYSTORE} */
  void configureKeyStores(Configuration config) {
    Configuration allKs = config.getConfigTree(PARAM_KEYSTORE);
    for (Iterator iter = allKs.nodeIterator(); iter.hasNext(); ) {
      String id = (String) iter.next();
      Configuration oneKs = allKs.getConfigTree(id);
      try {
        LockssKeyStore lk = createLockssKeyStore(oneKs);
        String name = lk.getName();
        if (name == null) {
          log.error("KeyStore definition missing name: " + oneKs);
          continue;
        }
        LockssKeyStore old = keystoreMap.get(name);
        if (old != null && !lk.equals(old)) {
          log.warning(
              "Keystore "
                  + name
                  + " redefined.  "
                  + "New definition may not take effect until daemon restart");
        }

        log.debug("Adding keystore " + name);
        keystoreMap.put(name, lk);

      } catch (Exception e) {
        log.error("Couldn't create keystore: " + oneKs, e);
      }
    }
  }
  /*
   * You will have to tell it the DOI and the schema because those normally come from dataset
   */
  private void validateSingleMainMetadataRecord(ArticleMetadata am, String doi_val, String schema) {
    log.debug3("valideSingleMainMetadatRecord");
    if ("simple-article".equals(schema)) {
      assertEquals(common_simple_article_title, am.get(MetadataField.FIELD_ARTICLE_TITLE));
    } else {
      assertEquals(common_article_title, am.get(MetadataField.FIELD_ARTICLE_TITLE));
    }

    log.debug3("doi val is: " + doi_val);
    assertEquals(authorMap.get(doi_val), am.getList(MetadataField.FIELD_AUTHOR));
    assertEquals(volMap.get(doi_val), am.get(MetadataField.FIELD_VOLUME));
    assertEquals(issueMap.get(doi_val), am.get(MetadataField.FIELD_ISSUE));
    assertEquals("Comment", am.getRaw(ElsevierMainDTD5XmlSchemaHelper.common_dochead));
    assertEquals(doi_val, am.getRaw(ElsevierMainDTD5XmlSchemaHelper.common_doi));
    assertEquals("2014", am.getRaw(ElsevierMainDTD5XmlSchemaHelper.common_copyright));
  }
Ejemplo n.º 5
0
 // hack only local
 public synchronized LockssRepositoryImpl getRepositoryFromPath(String path) {
   LockssRepositoryImpl repo = (LockssRepositoryImpl) localRepos.get(path);
   if (repo == null) {
     repo = new LockssRepositoryImpl(path);
     repo.initService(getDaemon());
     repo.startService();
     localRepos.put(path, repo);
   }
   return repo;
 }
 static LocalRepository getLocalRepository(String repoRoot) {
   synchronized (localRepositories) {
     LocalRepository localRepo = (LocalRepository) localRepositories.get(repoRoot);
     if (localRepo == null) {
       logger.debug2("Creating LocalRepository(" + repoRoot + ")");
       localRepo = new LocalRepository(repoRoot);
       localRepositories.put(repoRoot, localRepo);
     }
     return localRepo;
   }
 }
Ejemplo n.º 7
0
  public void testText() throws Exception {
    MockStatusAccessor statusAccessor =
        MockStatusAccessor.generateStatusAccessor(colArray1, rowArray1);
    statusAccessor.setTitle("testtbl", null);
    statSvc.registerStatusAccessor("testtbl", statusAccessor);

    WebResponse resp = getTable("testtbl", true);
    assertResponseOk(resp);
    assertEquals("Content type", "text/plain", resp.getContentType());
    log.debug(resp.getText());
    List lines = getLines(resp);
    assertEquals(rowArray1.length + 3, lines.size());
    Map row0 = getRow((String) lines.get(0));
    assertEquals("2.4.6.8", row0.get("host"));

    Map row2 = getRow((String) lines.get(2));
    assertEquals("testtbl", row2.get("table"));

    assertEqualTables(table1, lines);
  }
 static String canonRoot(String root) {
   synchronized (canonicalRoots) {
     String canon = (String) canonicalRoots.get(root);
     if (canon == null) {
       try {
         canon = new File(root).getCanonicalPath();
         canonicalRoots.put(root, canon);
       } catch (IOException e) {
         logger.warning("Can't canonicalize: " + root, e);
         return root;
       }
     }
     return canon;
   }
 }
  /*
   * When testing a complete extraction out of the tarset, the MD record will be completely filled in
   * and pdf-existence will get established
   */
  private void validateCompleteMetadataRecord(ArticleMetadata am) {
    log.debug3("valideCompleteMetadatRecord");
    String doi_val = am.get(MetadataField.FIELD_DOI);
    /* make sure we can pick up both types of xml article data */
    log.debug3("doi val is: " + doi_val);

    if ("JA 5.2.0 SIMPLE-ARTICLE"
        .equals(am.getRaw(ElsevierDatasetXmlSchemaHelper.dataset_dtd_metadata))) {
      log.debug3("simple-article");
      assertEquals(common_simple_article_title, am.get(MetadataField.FIELD_ARTICLE_TITLE));
    } else {
      assertEquals(common_article_title, am.get(MetadataField.FIELD_ARTICLE_TITLE));
    }
    assertEquals(common_issn, am.get(MetadataField.FIELD_ISSN));
    assertEquals(authorMap.get(doi_val), am.getList(MetadataField.FIELD_AUTHOR));
    assertEquals(dateMap.get(doi_val), am.get(MetadataField.FIELD_DATE));
    assertEquals(accessUrlMap.get(doi_val), am.get(MetadataField.FIELD_ACCESS_URL));
    assertEquals(volMap.get(doi_val), am.get(MetadataField.FIELD_VOLUME));
    assertEquals(issueMap.get(doi_val), am.get(MetadataField.FIELD_ISSUE));
    assertEquals(pubTitleMap.get(doi_val), am.get(MetadataField.FIELD_PUBLICATION_TITLE));
    assertEquals("Elsevier", am.get(MetadataField.FIELD_PROVIDER));
    assertEquals("Elsevier", am.get(MetadataField.FIELD_PUBLISHER));
    log.debug3(am.ppString(2));
  }
 /**
  * Finds the directory for this AU. If none found in the map, designates a new dir for it.
  *
  * @param auid AU id representing the au
  * @param repoRoot path to the root of the repository
  * @return the dir String
  */
 static String getAuDir(String auid, String repoRoot, boolean create) {
   String repoCachePath = extendCacheLocation(repoRoot);
   LocalRepository localRepo = getLocalRepository(repoRoot);
   synchronized (localRepo) {
     Map aumap = localRepo.getAuMap();
     String auPathSlash = (String) aumap.get(auid);
     if (auPathSlash != null) {
       return auPathSlash;
     }
     if (!create) {
       return null;
     }
     logger.debug3("Creating new au directory for '" + auid + "'.");
     String auDir = localRepo.getPrevAuDir();
     for (int cnt = RepositoryManager.getMaxUnusedDirSearch(); cnt > 0; cnt--) {
       // loop through looking for an available dir
       auDir = getNextDirName(auDir);
       File testDir = new File(repoCachePath, auDir);
       if (logger.isDebug3()) logger.debug3("Probe for unused: " + testDir);
       if (!testDir.exists()) {
         if (RepositoryManager.isStatefulUnusedDirSearch()) {
           localRepo.setPrevAuDir(auDir);
         }
         String auPath = testDir.toString();
         logger.debug3("New au directory: " + auPath);
         auPathSlash = auPath + File.separator;
         // write the new au property file to the new dir
         // XXX this data should be backed up elsewhere to avoid single-point
         // corruption
         Properties idProps = new Properties();
         idProps.setProperty(AU_ID_PROP, auid);
         saveAuIdProperties(auPath, idProps);
         aumap.put(auid, auPathSlash);
         return auPathSlash;
       } else {
         if (logger.isDebug3()) {
           logger.debug3("Existing directory found at '" + auDir + "'.  Checking next...");
         }
       }
     }
   }
   throw new RuntimeException(
       "Can't find unused repository dir after "
           + RepositoryManager.getMaxUnusedDirSearch()
           + " tries in "
           + repoCachePath);
 }
Ejemplo n.º 11
0
  /**
   * Adds a collection of pluginIds for TdbAus that are different from those in this Tdb.
   *
   * @param pluginIds the set of pluginIds
   * @param otherTdb a Tdb
   */
  private void addPluginIdsForDifferences(Set<String> pluginIds, Tdb otherTdb) {
    Map<String, TdbPublisher> tdbPublishers = otherTdb.getAllTdbPublishers();

    for (TdbPublisher tdbPublisher : tdbPublishers.values()) {
      if (!this.tdbPublisherMap.containsKey(tdbPublisher.getName())) {
        // add pluginIds for publishers in tdb that are not in this Tdb
        tdbPublisher.addAllPluginIds(pluginIds);
      }
    }

    for (TdbPublisher thisPublisher : tdbPublisherMap.values()) {
      TdbPublisher tdbPublisher = tdbPublishers.get(thisPublisher.getName());
      if (tdbPublisher == null) {
        // add pluginIds for publisher in this Tdb that is not in tdb
        thisPublisher.addAllPluginIds(pluginIds);
      } else {
        // add pluginIds for publishers in both Tdbs that are different
        thisPublisher.addPluginIdsForDifferences(pluginIds, tdbPublisher);
      }
    }
  }
    /**
     * Return the auid -> au-subdir-path mapping. Enumerating the directories if necessary to
     * initialize the map
     */
    Map getAuMap() {
      if (auMap == null) {
        logger.debug3("Loading name map for '" + repoCacheFile + "'.");
        auMap = new HashMap();
        if (!repoCacheFile.exists()) {
          logger.debug3("Creating cache dir:" + repoCacheFile + "'.");
          if (!repoCacheFile.mkdirs()) {
            logger.critical("Couldn't create directory, check owner/permissions: " + repoCacheFile);
            // return empty map
            return auMap;
          }
        } else {
          // read each dir's property file and store mapping auid -> dir
          File[] auDirs = repoCacheFile.listFiles();
          for (int ii = 0; ii < auDirs.length; ii++) {
            String dirName = auDirs[ii].getName();
            //       if (dirName.compareTo(lastPluginDir) == 1) {
            //         // adjust the 'lastPluginDir' upwards if necessary
            //         lastPluginDir = dirName;
            //       }

            String path = auDirs[ii].getAbsolutePath();
            Properties idProps = getAuIdProperties(path);
            if (idProps != null) {
              String auid = idProps.getProperty(AU_ID_PROP);
              StringBuilder sb = new StringBuilder(path.length() + File.separator.length());
              sb.append(path);
              sb.append(File.separator);
              auMap.put(auid, sb.toString());
              logger.debug3("Mapping to: " + auMap.get(auid) + ": " + auid);
            } else {
              logger.debug3("Not mapping " + path + ", no auid file.");
            }
          }
        }
      }
      return auMap;
    }
 /**
  * Return the named LockssKeyStore
  *
  * @param name the keystore name
  * @param criticalServiceName if non-null, this is a criticial keystore whose unavailability
  *     should cause the daemon to exit (if org.lockss.keyMgr.exitIfMissingKeystore is true)
  */
 public LockssKeyStore getLockssKeyStore(String name, String criticalServiceName) {
   LockssKeyStore res = keystoreMap.get(name);
   checkFact(res, name, criticalServiceName, null);
   return res;
 }
Ejemplo n.º 14
0
 protected boolean hasNoRoleParsm(String roleName) {
   String noRoleParam = noRoleParams.get(roleName);
   return (noRoleParam != null && !StringUtil.isNullString(req.getParameter(noRoleParam)));
 }
Ejemplo n.º 15
0
  /**
   * Get or create TdbTitle for the specified properties and TdbAu.
   *
   * @param props the properties
   * @param au the TdbAu
   * @return the corresponding TdbTitle
   */
  private TdbTitle getTdbTitle(Properties props, TdbAu au) {
    TdbTitle title = null;

    // get publisher name
    String publisherNameFromProps = getTdbPublisherName(props, au);

    // get the title name
    String titleNameFromProps = getTdbTitleName(props, au);

    // get the title ID
    String titleIdFromProps = getTdbTitleId(props, au);

    String titleId = titleIdFromProps;
    if (titleId == null) {
      // generate a titleId if one not specified, using the
      // hash code of the combined title name and publisher names
      int hash = (titleNameFromProps + publisherNameFromProps).hashCode();
      titleId = (hash < 0) ? ("id:1" + (-hash)) : ("id:0" + hash);
    }

    // get publisher specified by property name
    TdbPublisher publisher = tdbPublisherMap.get(publisherNameFromProps);
    if (publisher != null) {
      // find title from publisher
      title = publisher.getTdbTitleById(titleId);
      if (title != null) {
        // warn that title name is different
        if (!title.getName().equals(titleNameFromProps)) {
          logger.warning(
              "Title for au \""
                  + au.getName()
                  + "\": \""
                  + titleNameFromProps
                  + "\" is different than existing title \""
                  + title.getName()
                  + "\" for id "
                  + titleId
                  + " -- using existing title.");
        }
        return title;
      }
    }

    if (publisher == null) {
      // warn of missing publisher name
      if (publisherNameFromProps.startsWith(UNKNOWN_PUBLISHER_PREFIX)) {
        logger.warning(
            "Publisher missing for au \""
                + au.getName()
                + "\" -- using \""
                + publisherNameFromProps
                + "\"");
      }

      // create new publisher for specified publisher name
      publisher = new TdbPublisher(publisherNameFromProps);
      tdbPublisherMap.put(publisherNameFromProps, publisher);
    }

    // warn of missing title name and/or id
    if (titleNameFromProps.startsWith(UNKNOWN_TITLE_PREFIX)) {
      logger.warning(
          "Title missing for au \"" + au.getName() + "\" -- using \"" + titleNameFromProps + "\"");
    }
    if (titleIdFromProps == null) {
      logger.debug2("Title ID missing for au \"" + au.getName() + "\" -- using " + titleId);
    }

    // create title and add to publisher
    title = new TdbTitle(titleNameFromProps, titleId);
    try {
      publisher.addTdbTitle(title);
    } catch (TdbException ex) {
      // shouldn't happen: title already exists in publisher
      logger.error(ex.getMessage(), ex);
    }

    return title;
  }
Ejemplo n.º 16
0
  /**
   * Add a TdbAu to a TdbTitle and TdbPubisher, and add links to the TdbTitle specified by the
   * properties.
   *
   * @param props the properties
   * @param au the TdbAu to add
   * @throws TdbException if the AU already exists in this Tdb
   */
  private void addTdbAu(Properties props, TdbAu au) throws TdbException {
    // add au for plugin assuming it is not a duplicate
    if (!addTdbAuForPlugin(au)) {
      // au already registered -- report existing au
      TdbAu existingAu = findExistingTdbAu(au);
      String titleName = getTdbTitleName(props, au);
      if (!titleName.equals(existingAu.getTdbTitle().getName())) {
        throw new TdbException(
            "Cannot add duplicate au entry: \""
                + au.getName()
                + "\" for title \""
                + titleName
                + "\" with same definition as existing au entry: \""
                + existingAu.getName()
                + "\" for title \""
                + existingAu.getTdbTitle().getName()
                + "\" to title database");
      } else if (!existingAu.getName().equals(au.getName())) {
        // error because it could lead to a missing AU -- one probably has a typo
        throw new TdbException(
            "Cannot add duplicate au entry: \""
                + au.getName()
                + "\" with the same definition as \""
                + existingAu.getName()
                + "\" for title \""
                + titleName
                + "\" to title database");
      } else {
        throw new TdbException(
            "Cannot add duplicate au entry: \""
                + au.getName()
                + "\" for title \""
                + titleName
                + "\" to title database");
      }
    }

    // get or create the TdbTitle for this
    TdbTitle title = getTdbTitle(props, au);
    try {
      // add AU to title
      title.addTdbAu(au);
    } catch (TdbException ex) {
      // if we can't add au to title, remove for plugin and re-throw exception
      removeTdbAuForPlugin(au);
      throw ex;
    }

    // process title links
    Map<String, Map<String, String>> linkMap = new HashMap<String, Map<String, String>>();
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
      String key = "" + entry.getKey();
      String value = "" + entry.getValue();
      if (key.startsWith("journal.link.")) {
        // skip to link name
        String param = key.substring("link.".length());
        int i;
        if (((i = param.indexOf(".type")) < 0) && ((i = param.indexOf(".journalId")) < 0)) {
          logger.warning(
              "Ignoring nexpected link key for au \"" + au.getName() + "\" key: \"" + key + "\"");
        } else {
          // get link map for linkName
          String lname = param.substring(0, i);
          Map<String, String> lmap = linkMap.get(lname);
          if (lmap == null) {
            lmap = new HashMap<String, String>();
            linkMap.put(lname, lmap);
          }
          // add name and value to link map for link
          String name = param.substring(i + 1);
          lmap.put(name, value);
        }
      }
    }

    // add links to title from accumulated "type", "journalId" entries
    for (Map<String, String> lmap : linkMap.values()) {
      String name = lmap.get("type");
      String value = lmap.get("journalId");
      if ((name != null) && (value != null)) {
        try {
          TdbTitle.LinkType linkType = TdbTitle.LinkType.valueOf(name);
          title.addLinkToTdbTitleId(linkType, value);
        } catch (IllegalArgumentException ex) {
          logger.warning(
              "Ignoring unknown link type for au \"" + au.getName() + "\" name: \"" + name + "\"");
        }
      }
    }
  }
Ejemplo n.º 17
0
  /**
   * Create a new TdbAu instance from the properties.
   *
   * @param props the properties
   * @return a TdbAu instance set built from the properties
   */
  private TdbAu newTdbAu(Properties props) {
    String pluginId = (String) props.get("plugin");
    if (pluginId == null) {
      throw new IllegalArgumentException("TdbAu plugin ID not specified");
    }

    String auName = props.getProperty("title");
    if (auName == null) {
      throw new IllegalArgumentException("TdbAu title not specified");
    }

    // create a new TdbAu and set its elements
    TdbAu au = new TdbAu(auName, pluginId);

    // process attrs, and params
    Map<String, Map<String, String>> paramMap = new HashMap<String, Map<String, String>>();
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
      String key = String.valueOf(entry.getKey());
      String value = String.valueOf(entry.getValue());
      if (key.startsWith("attributes.")) {
        // set attributes directly
        String name = key.substring("attributes.".length());
        try {
          au.setAttr(name, value);
        } catch (TdbException ex) {
          logger.warning(
              "Cannot set attribute \"" + name + "\" with value \"" + value + "\" -- ignoring");
        }

      } else if (key.startsWith("param.")) {
        // skip to param name
        String param = key.substring("param.".length());
        int i;
        if (((i = param.indexOf(".key")) < 0) && ((i = param.indexOf(".value")) < 0)) {
          logger.warning(
              "Ignoring unexpected param key for au \""
                  + auName
                  + "\" key: \""
                  + key
                  + "\" -- ignoring");
        } else {
          // get param map for pname
          String pname = param.substring(0, i);
          Map<String, String> pmap = paramMap.get(pname);
          if (pmap == null) {
            pmap = new HashMap<String, String>();
            paramMap.put(pname, pmap);
          }
          // add name and value to param map for pname
          String name = param.substring(i + 1);
          pmap.put(name, value);
        }

      } else if (!key.equals("title") // TdbAu has "name" property
          && !key.equals("plugin") // TdbAu has "pluginId" property
          && !key.equals("journalTitle") // TdbAu has "title" TdbTitle property
          && !key.startsWith("journal.")) { // TdbAu has "title" TdbTitle property
        // translate all other properties into AU properties
        try {
          au.setPropertyByName(key, value);
        } catch (TdbException ex) {
          logger.warning(
              "Cannot set property \"" + key + "\" with value \"" + value + "\" -- ignoring");
        }
      }
    }

    // set param from accumulated "key", and "value" entries
    for (Map<String, String> pmap : paramMap.values()) {
      String name = pmap.get("key");
      String value = pmap.get("value");
      if (name == null) {
        logger.warning("Ignoring property with null name");
      } else if (value == null) {
        logger.warning("Ignoring property \"" + name + "\" with null value");
      } else {
        try {
          au.setParam(name, value);
        } catch (TdbException ex) {
          logger.warning(
              "Cannot set param \"" + name + "\" with value \"" + value + "\" -- ignoring");
        }
      }
    }

    return au;
  }
Ejemplo n.º 18
0
  /**
   * Merge other Tdb into this one. Makes copies of otherTdb's non-duplicate TdbPublisher, TdbTitle,
   * and TdbAu objects and their non-duplicate children. The object themselves are not merged.
   *
   * @param otherTdb the other Tdb
   * @throws TdbException if Tdb is sealed
   */
  public void copyFrom(Tdb otherTdb) throws TdbException {
    // ignore inappropriate Tdb values
    if ((otherTdb == null) || (otherTdb == this)) {
      return;
    }

    if (isSealed()) {
      throw new TdbException("Cannot add otherTdb AUs to sealed Tdb");
    }

    // merge non-duplicate publishers of otherTdb
    boolean tdbIsNew = tdbPublisherMap.isEmpty();
    for (TdbPublisher otherPublisher : otherTdb.getAllTdbPublishers().values()) {
      String pubName = otherPublisher.getName();
      TdbPublisher thisPublisher;
      boolean publisherIsNew = true;
      if (tdbIsNew) {
        // no need to check for existing publisher if TDB is new
        thisPublisher = new TdbPublisher(pubName);
        tdbPublisherMap.put(pubName, thisPublisher);
      } else {
        thisPublisher = tdbPublisherMap.get(pubName);
        publisherIsNew = (thisPublisher == null);
        if (publisherIsNew) {
          // copy publisher if not present in this Tdb
          thisPublisher = new TdbPublisher(pubName);
          tdbPublisherMap.put(pubName, thisPublisher);
        }
      }

      // merge non-duplicate titles of otherPublisher into thisPublisher
      for (TdbTitle otherTitle : otherPublisher.getTdbTitles()) {
        String otherId = otherTitle.getId();
        TdbTitle thisTitle;
        boolean titleIsNew = true;
        if (publisherIsNew) {
          // no need to check for existing title if publisher is new
          thisTitle = otherTitle.copyForTdbPublisher(thisPublisher);
          thisPublisher.addTdbTitle(thisTitle);
        } else {
          thisTitle = thisPublisher.getTdbTitleById(otherId);
          titleIsNew = (thisTitle == null);
          if (titleIsNew) {
            // copy title if not present in this publisher
            thisTitle = otherTitle.copyForTdbPublisher(thisPublisher);
            thisPublisher.addTdbTitle(thisTitle);
          } else if (!thisTitle.getName().equals(otherTitle.getName())) {
            // error because it could lead to a missing title -- one probably has a typo
            // (what about checking other title elements too?)
            logger.error(
                "Ignorning duplicate title entry: \""
                    + otherTitle.getName()
                    + "\" with the same ID as \""
                    + thisTitle.getName()
                    + "\"");
          }
        }

        // merge non-duplicate TdbAus of otherTitle into thisTitle
        for (TdbAu otherAu : otherTitle.getTdbAus()) {
          // no need to check for existing au if title is new
          String pluginId = otherAu.getPluginId();
          if (titleIsNew || !getTdbAuIds(pluginId).contains(otherAu.getId())) {
            // always succeeds we've already checked for duplicate
            TdbAu thisAu = otherAu.copyForTdbTitle(thisTitle);
            addTdbAuForPlugin(thisAu);
          } else {
            TdbAu thisAu = findExistingTdbAu(otherAu);
            if (!thisAu.getTdbTitle().getName().equals(otherAu.getTdbTitle().getName())) {
              if (!thisAu.getName().equals(otherAu.getName())) {
                logger.error(
                    "Ignorning duplicate au entry: \""
                        + otherAu.getName()
                        + "\" for title \""
                        + otherAu.getTdbTitle().getName()
                        + "\" with same definion as existing au entry: \""
                        + thisAu.getName()
                        + "\" for title \""
                        + thisAu.getTdbTitle().getName()
                        + "\"");
              } else {
                logger.error(
                    "Ignorning duplicate au entry: \""
                        + otherAu.getName()
                        + "\" for title \""
                        + otherAu.getTdbTitle().getName()
                        + "\" with same definion as existing one for title \""
                        + thisAu.getTdbTitle().getName()
                        + "\"");
              }
            } else if (!thisAu.getName().equals(otherAu.getName())) {
              // error because it could lead to a missing AU -- one probably has a typo
              logger.error(
                  "Ignorning duplicate au entry: \""
                      + otherAu.getName()
                      + "\" with the same definition as \""
                      + thisAu.getName()
                      + "\" for title \""
                      + otherAu.getTdbTitle().getName());
            } else {
              logger.warning(
                  "Ignoring duplicate au entry: \""
                      + otherAu.getName()
                      + "\" for title \""
                      + otherAu.getTdbTitle().getName());
            }
          }
        }
      }
    }
  }
Ejemplo n.º 19
0
 /**
  * Get the publisher for the specified name.
  *
  * @param name the publisher name
  * @return the publisher, or <code>null</code> if not found
  */
 public TdbPublisher getTdbPublisher(String name) {
   return (tdbPublisherMap != null) ? tdbPublisherMap.get(name) : null;
 }
Ejemplo n.º 20
0
 public PlatformUtil.DF getRepositoryDF(String repoName) {
   if (repoMap != null) return (PlatformUtil.DF) repoMap.get(repoName);
   return super.getRepositoryDF(repoName);
 }
Ejemplo n.º 21
0
 public String getFeatureVersion(Plugin.Feature feat) {
   if (featureVersion == null) {
     return null;
   }
   return featureVersion.get(feat);
 }
Ejemplo n.º 22
0
 protected void initMimeMap() throws PluginException.InvalidDefinition {
   for (Iterator iter = definitionMap.entrySet().iterator(); iter.hasNext(); ) {
     Map.Entry ent = (Map.Entry) iter.next();
     String key = (String) ent.getKey();
     Object val = ent.getValue();
     if (key.endsWith(DefinableArchivalUnit.SUFFIX_LINK_EXTRACTOR_FACTORY)) {
       String mime = stripSuffix(key, DefinableArchivalUnit.SUFFIX_LINK_EXTRACTOR_FACTORY);
       if (val instanceof String) {
         String factName = (String) val;
         log.debug(mime + " link extractor: " + factName);
         MimeTypeInfo.Mutable mti = mimeMap.modifyMimeTypeInfo(mime);
         LinkExtractorFactory fact =
             (LinkExtractorFactory) newAuxClass(factName, LinkExtractorFactory.class);
         mti.setLinkExtractorFactory(fact);
       }
     } else if (key.endsWith(DefinableArchivalUnit.SUFFIX_CRAWL_FILTER_FACTORY)) {
       // XXX This clause must precede the one for SUFFIX_HASH_FILTER_FACTORY
       // XXX unless/until that key is changed to not be a terminal substring
       // XXX of this one
       String mime = stripSuffix(key, DefinableArchivalUnit.SUFFIX_CRAWL_FILTER_FACTORY);
       if (val instanceof String) {
         String factName = (String) val;
         log.debug(mime + " crawl filter: " + factName);
         MimeTypeInfo.Mutable mti = mimeMap.modifyMimeTypeInfo(mime);
         FilterFactory fact = (FilterFactory) newAuxClass(factName, FilterFactory.class);
         mti.setCrawlFilterFactory(fact);
       }
     } else if (key.endsWith(DefinableArchivalUnit.SUFFIX_HASH_FILTER_FACTORY)) {
       String mime = stripSuffix(key, DefinableArchivalUnit.SUFFIX_HASH_FILTER_FACTORY);
       if (val instanceof String) {
         String factName = (String) val;
         log.debug(mime + " filter: " + factName);
         MimeTypeInfo.Mutable mti = mimeMap.modifyMimeTypeInfo(mime);
         FilterFactory fact = (FilterFactory) newAuxClass(factName, FilterFactory.class);
         mti.setHashFilterFactory(fact);
       }
     } else if (key.endsWith(DefinableArchivalUnit.SUFFIX_FETCH_RATE_LIMIT)) {
       String mime = stripSuffix(key, DefinableArchivalUnit.SUFFIX_FETCH_RATE_LIMIT);
       if (val instanceof String) {
         String rate = (String) val;
         log.debug(mime + " fetch rate: " + rate);
         MimeTypeInfo.Mutable mti = mimeMap.modifyMimeTypeInfo(mime);
         RateLimiter limit = mti.getFetchRateLimiter();
         if (limit != null) {
           limit.setRate(rate);
         } else {
           mti.setFetchRateLimiter(new RateLimiter(rate));
         }
       }
     } else if (key.endsWith(DefinableArchivalUnit.SUFFIX_LINK_REWRITER_FACTORY)) {
       String mime = stripSuffix(key, DefinableArchivalUnit.SUFFIX_LINK_REWRITER_FACTORY);
       String factName = (String) val;
       log.debug(mime + " link rewriter: " + factName);
       MimeTypeInfo.Mutable mti = mimeMap.modifyMimeTypeInfo(mime);
       LinkRewriterFactory fact =
           (LinkRewriterFactory) newAuxClass(factName, LinkRewriterFactory.class);
       mti.setLinkRewriterFactory(fact);
     } else if (key.endsWith(DefinableArchivalUnit.SUFFIX_METADATA_EXTRACTOR_FACTORY_MAP)) {
       String mime = stripSuffix(key, DefinableArchivalUnit.SUFFIX_METADATA_EXTRACTOR_FACTORY_MAP);
       Map factNameMap = (Map) val;
       Map factClassMap = new HashMap();
       MimeTypeInfo.Mutable mti = mimeMap.modifyMimeTypeInfo(mime);
       for (Iterator it = factNameMap.keySet().iterator(); it.hasNext(); ) {
         String mdTypes = (String) it.next();
         String factName = (String) factNameMap.get(mdTypes);
         log.debug(mime + " (" + mdTypes + ") metadata extractor: " + factName);
         for (String mdType : (List<String>) StringUtil.breakAt(mdTypes, ";")) {
           setMdTypeFact(factClassMap, mdType, factName);
         }
       }
       mti.setFileMetadataExtractorFactoryMap(factClassMap);
     }
   }
 }