예제 #1
0
  public static void insertSystemTriples(
      ISparulExecutor executor, String dataGraphName, String metaGraphName) throws Exception {
    logger.info("Inserting Annotation Properties.");

    List<RDFTriple> triples = new ArrayList<RDFTriple>();

    MyVocabulary[] vocabs = {
      MyVocabulary.DC_MODIFIED,
      MyVocabulary.DBM_EDIT_LINK,
      MyVocabulary.DBM_PAGE_ID,
      MyVocabulary.DBM_REVISION,
      MyVocabulary.DBM_OAIIDENTIFIER
    };

    for (MyVocabulary item : vocabs) {
      triples.add(
          new RDFTriple(
              new RDFResourceNode(item.getIRI()),
              new RDFResourceNode(OWLRDFVocabulary.RDF_TYPE.getIRI()),
              new RDFResourceNode(OWLRDFVocabulary.OWL_ANNOTATION_PROPERTY.getIRI())));
    }

    List<RDFTriple> metaTriples = new ArrayList<RDFTriple>();
    for (RDFTriple item : triples) {
      URI uri = RDFUtil.generateMD5HashUri("http://dbpedia.org/sysvocab/", item);

      RDFResourceNode reifier = new RDFResourceNode(IRI.create(uri));

      metaTriples.add(
          new RDFTriple(
              reifier,
              new RDFResourceNode(OWLRDFVocabulary.OWL_ANNOTATED_SOURCE.getIRI()),
              item.getSubject()));
      metaTriples.add(
          new RDFTriple(
              reifier,
              new RDFResourceNode(OWLRDFVocabulary.OWL_ANNOTATED_PROPERTY.getIRI()),
              item.getProperty()));
      metaTriples.add(
          new RDFTriple(
              reifier,
              new RDFResourceNode(OWLRDFVocabulary.OWL_ANNOTATED_TARGET.getIRI()),
              item.getObject()));
      metaTriples.add(
          new RDFTriple(
              reifier,
              new RDFResourceNode(MyVocabulary.DBM_EXTRACTED_BY.getIRI()),
              new RDFResourceNode(IRI.create(TBoxExtractor.extractorUri))));
    }

    executor.insert(metaTriples, metaGraphName);
    executor.insert(triples, dataGraphName);
  }
예제 #2
0
  /**
   * Returns the workflow for extracting property and class definitions from meta-wiki
   *
   * @return
   */
  private static IHandler<IRecord> createWorkflow(Ini ini) throws Exception {
    Section backendSection = ini.get("BACKEND_VIRTUOSO");
    String dataGraphName = backendSection.get("graphNameData");
    String metaGraphName = backendSection.get("graphNameMeta");
    String uri = backendSection.get("uri");
    String username = backendSection.get("username");
    String password = backendSection.get("password");

    // Class.forName("virtuoso.jdbc4.Driver").newInstance();
    // Connection con = DriverManager.getConnection(uri, username, password);

    ConnectionWrapper connectionWrapper = new ConnectionWrapper(uri, username, password);

    Section extractorSection = ini.get("PROPERTY_DEFINITION_EXTRACTOR");
    String expressionPrefix = extractorSection.get("expressionPrefix");
    String propertyPrefix = extractorSection.get("propertyPrefix");
    String reifierPrefix = extractorSection.get("reifierPrefix");

    Section namespaceMappingSection = ini.get("NAMESPACE_MAPPING");
    String filename = namespaceMappingSection.get("filename");

    Section harvesterSection = ini.get("HARVESTER");
    String technicalBaseUri = harvesterSection.get("technicalWikiUri");

    /*
    VirtGraph dataGraph = new VirtGraph (graphNameData, uri, username, password);
    ISparulExecutor dataSparulExecutor = new VirtuosoJenaSparulExecutor(dataGraph);

    VirtGraph metaGraph = new VirtGraph (graphNameMeta, uri, username, password);
    ISparulExecutor metaSparulExecutor = new VirtuosoJenaSparulExecutor(metaGraph);
     */

    /*
    ISparulExecutor dataSparulExecutor =
    	new SparulStatisticExecutorWrapper(
    			new VirtuosoJdbcSparulExecutorPreconditionWrapper(
    				connectionWrapper,
    				new VirtuosoJdbcSparulExecutor(dataGraphName)));

    ISparulExecutor metaSparulExecutor =
    	new SparulStatisticExecutorWrapper(
    			new VirtuosoJdbcSparulExecutorPreconditionWrapper(
    					connectionWrapper,
    					new VirtuosoJdbcSparulExecutor(metaGraphName)));
    */
    // Sparul executor with default graph set to null
    ISparulExecutor nullSparulExecutor =
        new SparulStatisticExecutorWrapper(
            new VirtuosoJdbcSparulExecutorPreconditionWrapper(
                connectionWrapper, new VirtuosoJdbcSparulExecutor(null)));

    logger.info("Sending a test query to check TTLP privileges");
    try {
      nullSparulExecutor.insert(new ArrayList<RDFTriple>(), dataGraphName);
    } catch (Exception e) {
      logger.fatal(ExceptionUtil.toString(e));
      throw e;
    }
    logger.info("Success");

    insertSystemTriples(nullSparulExecutor, dataGraphName, metaGraphName);

    // Just for testing... remove this when done.
    // dataSparulExecutor.executeSelect("Select * {?s ?p ?o . Filter(?o = \"Birthplace\") . }");

    PrefixResolver prefixResolver = new PrefixResolver(new File(filename));
    // System.out.println(prefixResolver.resolve("rdf:sameAs"));
    // System.exit(0);

    // MultiHandler is a multiplexer for IHandler<Record> instances
    MultiHandler<IRecord> handlerList = new MultiHandler<IRecord>();

    // Attach a category delegation handler - this handler delegates
    // to other handlers depending on a classification
    CategoryHandler<IRecord, String> classifiedHandler =
        new CategoryHandler<IRecord, String>(new PageTypeRecordClassifier<IRecord>());
    handlerList.handlers().add(classifiedHandler);

    // for articles
    MultiHandler<IRecord> articleHandlerList = new MultiHandler<IRecord>();
    MultiHandler<IRecord> deletionHandlerList = new MultiHandler<IRecord>();

    // classifiedHandler.addHandler(articleHandlerList, "2");
    classifiedHandler.addHandler(articleHandlerList, "200");
    classifiedHandler.addHandler(articleHandlerList, "202");

    classifiedHandler.addHandler(deletionHandlerList, "deleted");

    // Attach the parsers for class and property definitions
    ParseContentRecordHandler parser = new ParseContentRecordHandler();
    articleHandlerList.handlers().add(parser);

    /*
    ComplexGroupTripleManager sys =
    	new ComplexGroupTripleManager(dataSparulExecutor, metaSparulExecutor);
    */
    PropertyDefinitionCleanUpExtractor cleanUp =
        new PropertyDefinitionCleanUpExtractor(
            propertyPrefix, dataGraphName, metaGraphName, nullSparulExecutor);

    articleHandlerList.handlers().add(cleanUp);

    TBoxExtractor x =
        new TBoxExtractor(
            technicalBaseUri,
            nullSparulExecutor,
            dataGraphName,
            metaGraphName,
            reifierPrefix,
            propertyPrefix,
            expressionPrefix,
            prefixResolver);
    articleHandlerList.handlers().add(x);
    deletionHandlerList.handlers().add(x);

    // Set up the extractor, which renames resources when a page is moved
    // This extractor needs to do alot of more work than what is currently
    // implemented - it basically needs to genereate tasks which update
    // all affected wiki pages which reference the resources being renamed
    /*
    RedirectRenameExtractor y =
    	new RedirectRenameExtractor(
    			nullSparulExecutor,
    			metaGraphName,
    			new Predicate<String>() {
    				@Override
    				public boolean evaluate(String arg)
    				{
    					return arg == null ? null : arg.startsWith("User:DBpedia-Bot/ontology/");
    				}
    			},
    			new Transformer<String, RDFNode>() {

    				@Override
    				public RDFNode transform(String arg)
    				{
    					String tmp = arg.substring("User:DBpedia-Bot/ontology/".length());
    					return new RDFResourceNode(IRI.create("http://dbpedia.org/ontology/" + tmp));
    				}

    			}
    	);
    articleHandlerList.handlers().add(y);
    */

    return handlerList;
  }