private void handleException(VirtuosoException e) throws VirtuosoException { switch (e.getErrorCode()) { case VirtuosoException.SQLERROR: throw e; } logger.warn(ExceptionUtil.toString(e)); reconnectLoop(); }
private void reconnectLoop() { for (; ; ) { try { logger.info("Attempting to reconnect in 30 seconds"); Thread.sleep(30000); connectionWrapper.reconnect(); return; } catch (Exception e) { logger.debug(ExceptionUtil.toString(e)); } } }
/** * Returns the workflow for extracting property and class definitions from meta-wiki * * @return */ private static IHandler<IRecord> createWorkflow(Ini ini) throws Exception { Section backendSection = ini.get("BACKEND_VIRTUOSO"); String dataGraphName = backendSection.get("graphNameData"); String metaGraphName = backendSection.get("graphNameMeta"); String uri = backendSection.get("uri"); String username = backendSection.get("username"); String password = backendSection.get("password"); // Class.forName("virtuoso.jdbc4.Driver").newInstance(); // Connection con = DriverManager.getConnection(uri, username, password); ConnectionWrapper connectionWrapper = new ConnectionWrapper(uri, username, password); Section extractorSection = ini.get("PROPERTY_DEFINITION_EXTRACTOR"); String expressionPrefix = extractorSection.get("expressionPrefix"); String propertyPrefix = extractorSection.get("propertyPrefix"); String reifierPrefix = extractorSection.get("reifierPrefix"); Section namespaceMappingSection = ini.get("NAMESPACE_MAPPING"); String filename = namespaceMappingSection.get("filename"); Section harvesterSection = ini.get("HARVESTER"); String technicalBaseUri = harvesterSection.get("technicalWikiUri"); /* VirtGraph dataGraph = new VirtGraph (graphNameData, uri, username, password); ISparulExecutor dataSparulExecutor = new VirtuosoJenaSparulExecutor(dataGraph); VirtGraph metaGraph = new VirtGraph (graphNameMeta, uri, username, password); ISparulExecutor metaSparulExecutor = new VirtuosoJenaSparulExecutor(metaGraph); */ /* ISparulExecutor dataSparulExecutor = new SparulStatisticExecutorWrapper( new VirtuosoJdbcSparulExecutorPreconditionWrapper( connectionWrapper, new VirtuosoJdbcSparulExecutor(dataGraphName))); ISparulExecutor metaSparulExecutor = new SparulStatisticExecutorWrapper( new VirtuosoJdbcSparulExecutorPreconditionWrapper( connectionWrapper, new VirtuosoJdbcSparulExecutor(metaGraphName))); */ // Sparul executor with default graph set to null ISparulExecutor nullSparulExecutor = new SparulStatisticExecutorWrapper( new VirtuosoJdbcSparulExecutorPreconditionWrapper( connectionWrapper, new VirtuosoJdbcSparulExecutor(null))); logger.info("Sending a test query to check TTLP privileges"); try { nullSparulExecutor.insert(new ArrayList<RDFTriple>(), dataGraphName); } catch (Exception e) { logger.fatal(ExceptionUtil.toString(e)); throw e; } logger.info("Success"); insertSystemTriples(nullSparulExecutor, dataGraphName, metaGraphName); // Just for testing... remove this when done. // dataSparulExecutor.executeSelect("Select * {?s ?p ?o . Filter(?o = \"Birthplace\") . }"); PrefixResolver prefixResolver = new PrefixResolver(new File(filename)); // System.out.println(prefixResolver.resolve("rdf:sameAs")); // System.exit(0); // MultiHandler is a multiplexer for IHandler<Record> instances MultiHandler<IRecord> handlerList = new MultiHandler<IRecord>(); // Attach a category delegation handler - this handler delegates // to other handlers depending on a classification CategoryHandler<IRecord, String> classifiedHandler = new CategoryHandler<IRecord, String>(new PageTypeRecordClassifier<IRecord>()); handlerList.handlers().add(classifiedHandler); // for articles MultiHandler<IRecord> articleHandlerList = new MultiHandler<IRecord>(); MultiHandler<IRecord> deletionHandlerList = new MultiHandler<IRecord>(); // classifiedHandler.addHandler(articleHandlerList, "2"); classifiedHandler.addHandler(articleHandlerList, "200"); classifiedHandler.addHandler(articleHandlerList, "202"); classifiedHandler.addHandler(deletionHandlerList, "deleted"); // Attach the parsers for class and property definitions ParseContentRecordHandler parser = new ParseContentRecordHandler(); articleHandlerList.handlers().add(parser); /* ComplexGroupTripleManager sys = new ComplexGroupTripleManager(dataSparulExecutor, metaSparulExecutor); */ PropertyDefinitionCleanUpExtractor cleanUp = new PropertyDefinitionCleanUpExtractor( propertyPrefix, dataGraphName, metaGraphName, nullSparulExecutor); articleHandlerList.handlers().add(cleanUp); TBoxExtractor x = new TBoxExtractor( technicalBaseUri, nullSparulExecutor, dataGraphName, metaGraphName, reifierPrefix, propertyPrefix, expressionPrefix, prefixResolver); articleHandlerList.handlers().add(x); deletionHandlerList.handlers().add(x); // Set up the extractor, which renames resources when a page is moved // This extractor needs to do alot of more work than what is currently // implemented - it basically needs to genereate tasks which update // all affected wiki pages which reference the resources being renamed /* RedirectRenameExtractor y = new RedirectRenameExtractor( nullSparulExecutor, metaGraphName, new Predicate<String>() { @Override public boolean evaluate(String arg) { return arg == null ? null : arg.startsWith("User:DBpedia-Bot/ontology/"); } }, new Transformer<String, RDFNode>() { @Override public RDFNode transform(String arg) { String tmp = arg.substring("User:DBpedia-Bot/ontology/".length()); return new RDFResourceNode(IRI.create("http://dbpedia.org/ontology/" + tmp)); } } ); articleHandlerList.handlers().add(y); */ return handlerList; }