/**
  * Processing the {@value #OH_PROP} configuration property, if there is one.
  *
  * @param iniFile INI configuration file being processed
  * @param configSection current configuration section being processed
  * @param configBuilder current builder being constructed from the parser
  * @return obligation processing service
  * @throws ConfigurationException thrown if there is a problem building the obligations handlers
  */
 public static ObligationService processObligationHandlers(
     Ini iniFile, Ini.Section configSection, AbstractConfigurationBuilder<?> configBuilder)
     throws ConfigurationException {
   ObligationService service = new ObligationService();
   if (configSection.containsKey(OH_PROP)) {
     StringTokenizer obligationHandlers = new StringTokenizer(configSection.get(OH_PROP), " ");
     String obligationHandlerName;
     while (obligationHandlers.hasMoreTokens()) {
       obligationHandlerName = Strings.safeTrimOrNullString(obligationHandlers.nextToken());
       if (!iniFile.containsKey(obligationHandlerName)) {
         String errorMsg =
             "INI configuration file does not contain a configuration section for obligation handler "
                 + obligationHandlerName;
         LOG.error(errorMsg);
         throw new ConfigurationException(errorMsg);
       }
       if (obligationHandlerName != null) {
         service.addObligationhandler(
             buildObligationHandler(iniFile.get(obligationHandlerName), configBuilder));
         LOG.info("Added obligation handler: {}", obligationHandlerName);
       }
     }
   }
   return service;
 }
  /**
   * Extracts the Minecraft version number from Forge's configuration file.
   *
   * @throws MojoFailureException if the configuration file can't be read
   */
  private String getMinecraftVersion() throws MojoFailureException {
    if (null == mcVersion)
      try {
        final Ini ini = new Ini(new File(forgeDir, "mcp/conf/version.cfg"));
        mcVersion = ini.get("VERSION").get("ClientVersion");
      } catch (Exception caught) {
        throw new MojoFailureException("error reading MCP version file", caught);
      }

    return mcVersion;
  }
Пример #3
0
  private static void run(Ini ini) throws Exception {
    Section section = ini.get("OFFLINE");

    String enabled = section.get("enabled").trim();

    if (enabled.equalsIgnoreCase("false")) runOnline(ini);
    else if (enabled.equalsIgnoreCase("true")) runOffline(ini);
    else
      throw new RuntimeException(
          "Offline mode must be either true or false - current value = " + enabled);
  }
Пример #4
0
  private static void initOai(Ini ini) throws Exception {
    Section section = ini.get("HARVESTER");

    String username = section.get("username");
    String passwordFile = section.get("passwordFile");

    File file = new File(passwordFile);
    String password = (Files.readFile(file)).trim();

    authenticate(username, password);
  }
Пример #5
0
 private static void runExportScript() {
   String extractScript = configIni.get("Reindex", "extractScript");
   if (extractScript.length() > 0) {
     logger.info("Running export script");
     try {
       String reindexResult = SystemUtil.executeCommand(extractScript, logger);
       logger.info("Result of extractScript (" + extractScript + ") was " + reindexResult);
     } catch (IOException e) {
       logger.error("Error running extract script, stopping reindex process", e);
       System.exit(1);
     }
   }
 }
 public void readExplosions(EntitiesData entitiesData, Ini ini) throws SlickException {
   Profile.Section explosions = ini.get("EXPLOSIONS");
   String[] strings = explosions.childrenNames();
   for (String id : strings) {
     Profile.Section struct = explosions.getChild(id);
     entitiesData.addParticle(
         id,
         struct.get("image", String.class),
         struct.get("width", Integer.class),
         struct.get("height", Integer.class),
         struct.get("fps", Float.class));
   }
 }
Пример #7
0
 public static void main(String[] args) {
   //        [log.click]
   // counter=c1,c2,c3
   // counter.c1.name =
   // counter.c1.field =
   // sum=s1,s2
   // sum.s1.name = fdsfsdf
   // sum.s1.field = 1,8
   // sum.s1.value = 5
   // sum.s1.if = 8:win
   // sum.s2.name = ffff
   String section = "log.click";
   String sectionCounters = ini.get(section, "counter");
   for (String s : sectionCounters.split(",")) {
     s = s.trim();
     String counterNameParam = "counter" + "." + s + ".name";
     String counterNameValue = ini.get(section, counterNameParam);
     String counterFieldParam = "counter" + "." + s + ".field";
     String counterFieldValue = ini.get(section, counterFieldParam);
     _hashConfig.put(section + "." + counterNameParam, counterNameValue);
     _hashConfig.put(section + "." + counterFieldParam, counterFieldValue);
   }
   String sectionSums = ini.get(section, "sum");
 }
 public void readStructures(EntitiesData entitiesData, Ini ini) throws SlickException {
   Profile.Section structures = ini.get("STRUCTURES");
   String[] strings = structures.childrenNames();
   for (String id : strings) {
     Profile.Section struct = structures.getChild(id);
     entitiesData.addStructure(
         id,
         struct.get("image", String.class),
         struct.get("width", Integer.class),
         struct.get("height", Integer.class),
         struct.get("sight", Integer.class),
         struct.get("hitpoints", Integer.class),
         struct.get("explosion", String.class, "UNKNOWN"));
   }
 }
Пример #9
0
  // metawiki extraction process
  private static void runOnline(Ini ini) throws Exception {
    Section section = ini.get("HARVESTER");
    int pollInterval = Integer.parseInt(section.get("pollInterval"));
    String lastResponseDateFile = section.get("lastResponseDateFile").trim();

    FetchRecordTask task = createOnlineTask(ini);

    Runnable taskWrapper = new SaveLastUtcResponseDateTaskWrapper(task, lastResponseDateFile);

    ScheduledExecutorService es = Executors.newSingleThreadScheduledExecutor();

    logger.info("Starting online task");
    // es.scheduleAtFixedRate(taskWrapper, 0, pollInterval, TimeUnit.SECONDS);
    es.scheduleWithFixedDelay(taskWrapper, 0, pollInterval, TimeUnit.SECONDS);
    logger.info("Online task finished");
  }
 private void readWeapons(EntitiesData entitiesData, Ini ini) throws SlickException {
   Profile.Section weapons = ini.get("WEAPONS");
   String[] strings = weapons.childrenNames();
   for (String id : strings) {
     Profile.Section struct = weapons.getChild(id);
     entitiesData.addProjectile(
         id,
         struct.get("image", String.class),
         struct.get("width", Integer.class),
         struct.get("height", Integer.class),
         struct.get("explosion", String.class),
         struct.get("movespeed", Float.class),
         struct.get("damage", Integer.class),
         struct.get("facings", Integer.class));
   }
 }
 public void readUnits(EntitiesData entitiesData, Ini ini) throws SlickException {
   Profile.Section units = ini.get("UNITS");
   String[] strings = units.childrenNames();
   for (String id : strings) {
     Profile.Section struct = units.getChild(id);
     entitiesData.addUnit(
         id,
         struct.get("image", String.class, "no-image-provided"),
         struct.get("width", Integer.class, 1),
         struct.get("height", Integer.class, 1),
         struct.get("sight", Integer.class, 1),
         struct.get("movespeed", Float.class, 0f),
         struct.get("turnspeed", Float.class, 0f),
         struct.get("hitpoints", Integer.class, 0),
         struct.get("weapon", String.class, UNKNOWN),
         struct.get("explosion", String.class));
   }
 }
Пример #12
0
  private static void initLoggers(Ini ini) {
    // A hack to get rid of double initialization caused by OAI-Harvester
    new ListRecords();
    Logger.getRootLogger().removeAllAppenders();

    Section section = ini.get("LOGGING");

    String log4jConfigFile = section.get("log4jConfigFile");

    if (log4jConfigFile != null) {
      System.out.println("Loading log config from file: '" + log4jConfigFile + "'");
      PropertyConfigurator.configure(log4jConfigFile);
    } else {
      System.out.println("No log config file specified - using default settings");
      SimpleLayout layout = new SimpleLayout();
      ConsoleAppender consoleAppender = new ConsoleAppender(layout);
      Logger.getRootLogger().addAppender(consoleAppender);
    }
  }
Пример #13
0
  private static FetchRecordTask createOnlineTask(Ini ini) throws Exception {
    // Begin ini section
    Section section = ini.get("HARVESTER");

    String lastResponseDateFile = section.get("lastResponseDateFile");
    String startNow = section.get("startNow");

    String startDate;
    if (startNow.equalsIgnoreCase("true")) startDate = getStartDateNow();
    else startDate = readStartDate(lastResponseDateFile);

    String baseWikiUri = section.get("baseWikiUri");

    String oaiUri = baseWikiUri + "Special:OAIRepository";
    int sleepInterval = Integer.parseInt(section.get("sleepInterval"));
    // End ini section

    // Create an instance of the retrieval facade
    RetrievalFacade retrieval = new RetrievalFacade(oaiUri, startDate);

    // Create a task object for retrieving metadata
    FetchRecordTask task = retrieval.newFetchRecordTask(baseWikiUri);
    task.setSleepInterval(sleepInterval);

    // Filter what to actually retrieve
    IFilter<RecordMetadata> metadataFilter =
        new StatisticWrapperFilter<RecordMetadata>(new MetaDbpediaMetadataFilter());
    task.setMetadataFilter(metadataFilter);

    // append the meta extraction workflow to the task
    task.setRecordHandler(createWorkflow(ini));
    // task.setHandler(createWorkflow(ini));
    // task.setDeletionHandler(createDeletionWorkflow());
    // task.setDeletionHandler(getDeletionWorkflow());

    return task;
  }
Пример #14
0
  protected boolean loadConfig(Ini ini, Logger logger) {
    vufindUrl = Util.cleanIniValue(ini.get("Site", "url"));

    installPath = ini.get("Site", "installPath");
    if (installPath == null || installPath.length() == 0) {
      logger.error(
          "Local path to vufind installation not found in General Settings.  Please specify location in local key.");
      return false;
    }
    emailFrom = ini.get("MaterialsRequest", "emailFrom");
    if (emailFrom == null || emailFrom.length() == 0) {
      logger.error(
          "Email From address not found in Process Settings.  Please specify host in emailPort key.");
      return false;
    }

    libraryName = ini.get("Site", "libraryName");
    if (libraryName == null || libraryName.length() == 0) {
      logger.warn(
          "Library Name not found in Process Settings.  Please specify add libraryName key.");
    }
    circulationPhone = ini.get("MaterialsRequest", "phone");
    if (circulationPhone == null || circulationPhone.length() == 0) {
      logger.warn(
          "Circulation Department Phone Number not found in Process Settings.  Please specify add circulationPhone key.");
    }
    circulationEmail = ini.get("MaterialsRequest", "email");
    if (circulationEmail == null || circulationEmail.length() == 0) {
      logger.warn(
          "Circulation Department Email not found in Process Settings.  Please specify add circulationPhone key.");
    }
    circulationUrl = ini.get("MaterialsRequest", "url");
    if (circulationUrl == null || circulationUrl.length() == 0) {
      logger.warn(
          "Circulation Department not found in Process Settings.  Please specify add circulationUrl key.");
    }
    return true;
  }
Пример #15
0
  /**
   * Returns the workflow for extracting property and class definitions from meta-wiki
   *
   * @return
   */
  private static IHandler<IRecord> createWorkflow(Ini ini) throws Exception {
    Section backendSection = ini.get("BACKEND_VIRTUOSO");
    String dataGraphName = backendSection.get("graphNameData");
    String metaGraphName = backendSection.get("graphNameMeta");
    String uri = backendSection.get("uri");
    String username = backendSection.get("username");
    String password = backendSection.get("password");

    // Class.forName("virtuoso.jdbc4.Driver").newInstance();
    // Connection con = DriverManager.getConnection(uri, username, password);

    ConnectionWrapper connectionWrapper = new ConnectionWrapper(uri, username, password);

    Section extractorSection = ini.get("PROPERTY_DEFINITION_EXTRACTOR");
    String expressionPrefix = extractorSection.get("expressionPrefix");
    String propertyPrefix = extractorSection.get("propertyPrefix");
    String reifierPrefix = extractorSection.get("reifierPrefix");

    Section namespaceMappingSection = ini.get("NAMESPACE_MAPPING");
    String filename = namespaceMappingSection.get("filename");

    Section harvesterSection = ini.get("HARVESTER");
    String technicalBaseUri = harvesterSection.get("technicalWikiUri");

    /*
    VirtGraph dataGraph = new VirtGraph (graphNameData, uri, username, password);
    ISparulExecutor dataSparulExecutor = new VirtuosoJenaSparulExecutor(dataGraph);

    VirtGraph metaGraph = new VirtGraph (graphNameMeta, uri, username, password);
    ISparulExecutor metaSparulExecutor = new VirtuosoJenaSparulExecutor(metaGraph);
     */

    /*
    ISparulExecutor dataSparulExecutor =
    	new SparulStatisticExecutorWrapper(
    			new VirtuosoJdbcSparulExecutorPreconditionWrapper(
    				connectionWrapper,
    				new VirtuosoJdbcSparulExecutor(dataGraphName)));

    ISparulExecutor metaSparulExecutor =
    	new SparulStatisticExecutorWrapper(
    			new VirtuosoJdbcSparulExecutorPreconditionWrapper(
    					connectionWrapper,
    					new VirtuosoJdbcSparulExecutor(metaGraphName)));
    */
    // Sparul executor with default graph set to null
    ISparulExecutor nullSparulExecutor =
        new SparulStatisticExecutorWrapper(
            new VirtuosoJdbcSparulExecutorPreconditionWrapper(
                connectionWrapper, new VirtuosoJdbcSparulExecutor(null)));

    logger.info("Sending a test query to check TTLP privileges");
    try {
      nullSparulExecutor.insert(new ArrayList<RDFTriple>(), dataGraphName);
    } catch (Exception e) {
      logger.fatal(ExceptionUtil.toString(e));
      throw e;
    }
    logger.info("Success");

    insertSystemTriples(nullSparulExecutor, dataGraphName, metaGraphName);

    // Just for testing... remove this when done.
    // dataSparulExecutor.executeSelect("Select * {?s ?p ?o . Filter(?o = \"Birthplace\") . }");

    PrefixResolver prefixResolver = new PrefixResolver(new File(filename));
    // System.out.println(prefixResolver.resolve("rdf:sameAs"));
    // System.exit(0);

    // MultiHandler is a multiplexer for IHandler<Record> instances
    MultiHandler<IRecord> handlerList = new MultiHandler<IRecord>();

    // Attach a category delegation handler - this handler delegates
    // to other handlers depending on a classification
    CategoryHandler<IRecord, String> classifiedHandler =
        new CategoryHandler<IRecord, String>(new PageTypeRecordClassifier<IRecord>());
    handlerList.handlers().add(classifiedHandler);

    // for articles
    MultiHandler<IRecord> articleHandlerList = new MultiHandler<IRecord>();
    MultiHandler<IRecord> deletionHandlerList = new MultiHandler<IRecord>();

    // classifiedHandler.addHandler(articleHandlerList, "2");
    classifiedHandler.addHandler(articleHandlerList, "200");
    classifiedHandler.addHandler(articleHandlerList, "202");

    classifiedHandler.addHandler(deletionHandlerList, "deleted");

    // Attach the parsers for class and property definitions
    ParseContentRecordHandler parser = new ParseContentRecordHandler();
    articleHandlerList.handlers().add(parser);

    /*
    ComplexGroupTripleManager sys =
    	new ComplexGroupTripleManager(dataSparulExecutor, metaSparulExecutor);
    */
    PropertyDefinitionCleanUpExtractor cleanUp =
        new PropertyDefinitionCleanUpExtractor(
            propertyPrefix, dataGraphName, metaGraphName, nullSparulExecutor);

    articleHandlerList.handlers().add(cleanUp);

    TBoxExtractor x =
        new TBoxExtractor(
            technicalBaseUri,
            nullSparulExecutor,
            dataGraphName,
            metaGraphName,
            reifierPrefix,
            propertyPrefix,
            expressionPrefix,
            prefixResolver);
    articleHandlerList.handlers().add(x);
    deletionHandlerList.handlers().add(x);

    // Set up the extractor, which renames resources when a page is moved
    // This extractor needs to do alot of more work than what is currently
    // implemented - it basically needs to genereate tasks which update
    // all affected wiki pages which reference the resources being renamed
    /*
    RedirectRenameExtractor y =
    	new RedirectRenameExtractor(
    			nullSparulExecutor,
    			metaGraphName,
    			new Predicate<String>() {
    				@Override
    				public boolean evaluate(String arg)
    				{
    					return arg == null ? null : arg.startsWith("User:DBpedia-Bot/ontology/");
    				}
    			},
    			new Transformer<String, RDFNode>() {

    				@Override
    				public RDFNode transform(String arg)
    				{
    					String tmp = arg.substring("User:DBpedia-Bot/ontology/".length());
    					return new RDFResourceNode(IRI.create("http://dbpedia.org/ontology/" + tmp));
    				}

    			}
    	);
    articleHandlerList.handlers().add(y);
    */

    return handlerList;
  }
Пример #16
0
  private static void initializeReindex() {
    System.out.println("Starting to initialize system");
    // Delete the existing reindex.log file
    File solrmarcLog = new File("../../sites/" + serverName + "/logs/reindex.log");
    if (solrmarcLog.exists()) {
      solrmarcLog.delete();
    }
    for (int i = 1; i <= 4; i++) {
      solrmarcLog = new File("../../sites/" + serverName + "/logs/reindex.log." + i);
      if (solrmarcLog.exists()) {
        solrmarcLog.delete();
      }
    }
    solrmarcLog = new File("solrmarc.log");
    if (solrmarcLog.exists()) {
      solrmarcLog.delete();
    }
    for (int i = 1; i <= 4; i++) {
      solrmarcLog = new File("solrmarc.log." + i);
      if (solrmarcLog.exists()) {
        solrmarcLog.delete();
      }
    }

    // Initialize the logger
    File log4jFile = new File("../../sites/" + serverName + "/conf/log4j.reindex.properties");
    if (log4jFile.exists()) {
      PropertyConfigurator.configure(log4jFile.getAbsolutePath());
    } else {
      System.out.println("Could not find log4j configuration " + log4jFile.getAbsolutePath());
      System.exit(1);
    }

    logger.info("Starting Reindex for " + serverName);

    // Load the configuration file
    String configName = "../../sites/" + serverName + "/conf/config.ini";
    logger.info("Loading configuration from " + configName);
    File configFile = new File(configName);
    if (!configFile.exists()) {
      logger.error("Could not find confiuration file " + configName);
      System.exit(1);
    }

    // Parse the configuration file
    configIni = new Ini();
    try {
      configIni.load(new FileReader(configFile));
    } catch (InvalidFileFormatException e) {
      logger.error("Configuration file is not valid.  Please check the syntax of the file.", e);
    } catch (FileNotFoundException e) {
      logger.error(
          "Configuration file could not be found.  You must supply a configuration file in conf called config.ini.",
          e);
    } catch (IOException e) {
      logger.error("Configuration file could not be read.", e);
    }
    solrPort = configIni.get("Reindex", "solrPort");
    if (solrPort == null || solrPort.length() == 0) {
      logger.error(
          "You must provide the port where the solr index is loaded in the import configuration file");
      System.exit(1);
    }

    String updateSolrStr = configIni.get("Reindex", "updateSolr");
    if (updateSolrStr != null) {
      updateSolr = Boolean.parseBoolean(updateSolrStr);
    }
    String updateResourcesStr = configIni.get("Reindex", "updateResources");
    if (updateResourcesStr != null) {
      updateResources = Boolean.parseBoolean(updateResourcesStr);
    }
    String exportStrandsCatalogStr = configIni.get("Reindex", "exportStrandsCatalog");
    if (exportStrandsCatalogStr != null) {
      exportStrandsCatalog = Boolean.parseBoolean(exportStrandsCatalogStr);
    }
    String exportOPDSCatalogStr = configIni.get("Reindex", "exportOPDSCatalog");
    if (exportOPDSCatalogStr != null) {
      exportOPDSCatalog = Boolean.parseBoolean(exportOPDSCatalogStr);
    }
    String loadEContentFromMarcStr = configIni.get("Reindex", "loadEContentFromMarc");
    if (loadEContentFromMarcStr != null) {
      loadEContentFromMarc = Boolean.parseBoolean(loadEContentFromMarcStr);
    }
    String updateAlphaBrowseStr = configIni.get("Reindex", "updateAlphaBrowse");
    if (updateAlphaBrowseStr != null) {
      updateAlphaBrowse = Boolean.parseBoolean(updateAlphaBrowseStr);
    }

    logger.info("Setting up database connections");
    // Setup connections to vufind and econtent databases
    String databaseConnectionInfo =
        Util.cleanIniValue(configIni.get("Database", "database_vufind_jdbc"));
    if (databaseConnectionInfo == null || databaseConnectionInfo.length() == 0) {
      logger.error(
          "VuFind Database connection information not found in Database Section.  Please specify connection information in database_vufind_jdbc.");
      System.exit(1);
    }
    try {
      vufindConn = DriverManager.getConnection(databaseConnectionInfo);
    } catch (SQLException e) {
      logger.error("Could not connect to vufind database", e);
      System.exit(1);
    }

    String econtentDBConnectionInfo =
        Util.cleanIniValue(configIni.get("Database", "database_econtent_jdbc"));
    if (econtentDBConnectionInfo == null || econtentDBConnectionInfo.length() == 0) {
      logger.error(
          "Database connection information for eContent database not found in Database Section.  Please specify connection information as database_econtent_jdbc key.");
      System.exit(1);
    }
    try {
      econtentConn = DriverManager.getConnection(econtentDBConnectionInfo);
    } catch (SQLException e) {
      logger.error("Could not connect to econtent database", e);
      System.exit(1);
    }

    // Start a reindex log entry
    try {
      logger.info("Creating log entry for index");
      PreparedStatement createLogEntryStatement =
          vufindConn.prepareStatement(
              "INSERT INTO reindex_log (startTime) VALUES (?)",
              PreparedStatement.RETURN_GENERATED_KEYS);
      createLogEntryStatement.setLong(1, new Date().getTime() / 1000);
      createLogEntryStatement.executeUpdate();
      ResultSet generatedKeys = createLogEntryStatement.getGeneratedKeys();
      if (generatedKeys.next()) {
        reindexLogId = generatedKeys.getLong(1);
      }
    } catch (SQLException e) {
      logger.error("Unable to create log entry for reindex process", e);
      System.exit(0);
    }
  }