@Override public UpdateContainer doIt(VWorkspace vWorkspace) throws CommandException { UpdateContainer c = new UpdateContainer(); Worksheet worksheet = vWorkspace.getViewFactory().getVWorksheet(vWorksheetId).getWorksheet(); worksheetName = worksheet.getTitle(); // Generate the semantic types for the worksheet OntologyManager ontMgr = vWorkspace.getWorkspace().getOntologyManager(); if (ontMgr.isEmpty()) return new UpdateContainer(new ErrorUpdate("No ontology loaded.")); SemanticTypeUtil.computeSemanticTypesForAutoModel( worksheet, vWorkspace.getWorkspace().getCrfModelHandler(), ontMgr); String alignmentId = AlignmentManager.Instance() .constructAlignmentId(vWorkspace.getWorkspace().getId(), vWorksheetId); Alignment alignment = AlignmentManager.Instance().getAlignment(alignmentId); if (alignment == null) { alignment = new Alignment(ontMgr); AlignmentManager.Instance().addAlignmentToMap(alignmentId, alignment); } try { // Save the semantic types in the input parameter JSON saveSemanticTypesInformation( worksheet, vWorkspace, worksheet.getSemanticTypes().getListOfTypes()); // Add the visualization update c.add(new SemanticTypesUpdate(worksheet, vWorksheetId, alignment)); c.add( new SVGAlignmentUpdate_ForceKarmaLayout( vWorkspace.getViewFactory().getVWorksheet(vWorksheetId), alignment)); } catch (Exception e) { logger.error("Error occured while generating the model Reason:.", e); return new UpdateContainer( new ErrorUpdate("Error occured while generating the model for the source.")); } c.add(new TagsUpdate()); return c; }
public JSONObject getAsJSONObject(OntologyManager ontMgr) throws JSONException { JSONObject obj = new JSONObject(); JSONArray arr = new JSONArray(); // Need to sort HashMap<String, Double> sortedMap = Util.sortHashMap(scoreMap); for (String label : sortedMap.keySet()) { JSONObject oj = new JSONObject(); // Check if the type contains domain if (label.contains("|")) { URI domainURI = ontMgr.getURIFromString(label.split("\\|")[0]); URI typeURI = ontMgr.getURIFromString(label.split("\\|")[1]); if (domainURI == null || typeURI == null) continue; oj.put( SemanticTypesUpdate.JsonKeys.DisplayDomainLabel.name(), domainURI.getLocalNameWithPrefixIfAvailable()); oj.put(SemanticTypesUpdate.JsonKeys.Domain.name(), label.split("\\|")[0]); oj.put( SemanticTypesUpdate.JsonKeys.DisplayLabel.name(), typeURI.getLocalNameWithPrefixIfAvailable()); oj.put(SemanticTypesUpdate.JsonKeys.FullType.name(), label.split("\\|")[1]); } else { URI typeURI = ontMgr.getURIFromString(label); if (typeURI == null) continue; oj.put(SemanticTypesUpdate.JsonKeys.FullType.name(), label); oj.put( SemanticTypesUpdate.JsonKeys.DisplayLabel.name(), typeURI.getLocalNameWithPrefixIfAvailable()); oj.put(SemanticTypesUpdate.JsonKeys.DisplayDomainLabel.name(), ""); oj.put(SemanticTypesUpdate.JsonKeys.Domain.name(), ""); } oj.put("Probability", scoreMap.get(label)); arr.put(oj); } obj.put("Labels", arr); return obj; }
public static void test() throws Exception { ServletContextParameterMap contextParameters = ContextParametersRegistry.getInstance().getDefault(); ModelingConfiguration modelingConfiguration = ModelingConfigurationRegistry.getInstance() .getModelingConfiguration(contextParameters.getId()); // String inputPath = Params.INPUT_DIR; String outputPath = Params.OUTPUT_DIR; String graphPath = Params.GRAPHS_DIR; // List<SemanticModel> semanticModels = ModelReader.importSemanticModels(inputPath); List<SemanticModel> semanticModels = ModelReader.importSemanticModelsFromJsonFiles(Params.MODEL_DIR, Params.MODEL_MAIN_FILE_EXT); // ModelEvaluation me2 = semanticModels.get(20).evaluate(semanticModels.get(20)); // System.out.println(me2.getPrecision() + "--" + me2.getRecall()); // if (true) // return; List<SemanticModel> trainingData = new ArrayList<SemanticModel>(); OntologyManager ontologyManager = new OntologyManager(contextParameters.getId()); File ff = new File(Params.ONTOLOGY_DIR); File[] files = ff.listFiles(); for (File f : files) { ontologyManager.doImport(f, "UTF-8"); } ontologyManager.updateCache(); // getStatistics1(semanticModels); // if (true) // return; ModelLearningGraph modelLearningGraph = null; ModelLearner_Old modelLearner; boolean iterativeEvaluation = false; boolean useCorrectType = false; int numberOfCRFCandidates = 4; int numberOfKnownModels; String filePath = Params.RESULTS_DIR; String filename = "results,k=" + numberOfCRFCandidates + ".csv"; PrintWriter resultFile = new PrintWriter(new File(filePath + filename)); StringBuffer[] resultsArray = new StringBuffer[semanticModels.size() + 2]; for (int i = 0; i < resultsArray.length; i++) { resultsArray[i] = new StringBuffer(); } for (int i = 0; i < semanticModels.size(); i++) { // for (int i = 0; i <= 10; i++) { // int i = 3; { resultFile.flush(); int newSourceIndex = i; SemanticModel newSource = semanticModels.get(newSourceIndex); logger.info("======================================================"); logger.info(newSource.getName() + "(#attributes:" + newSource.getColumnNodes().size() + ")"); System.out.println( newSource.getName() + "(#attributes:" + newSource.getColumnNodes().size() + ")"); logger.info("======================================================"); if (!iterativeEvaluation) numberOfKnownModels = semanticModels.size() - 1; else numberOfKnownModels = 0; if (resultsArray[0].length() > 0) resultsArray[0].append(" \t "); resultsArray[0].append( newSource.getName() + "(" + newSource.getColumnNodes().size() + ")" + "\t" + " " + "\t" + " "); if (resultsArray[1].length() > 0) resultsArray[1].append(" \t "); resultsArray[1].append("p \t r \t t"); while (numberOfKnownModels <= semanticModels.size() - 1) { trainingData.clear(); int j = 0, count = 0; while (count < numberOfKnownModels) { if (j != newSourceIndex) { trainingData.add(semanticModels.get(j)); count++; } j++; } modelLearningGraph = (ModelLearningGraphSparse) ModelLearningGraph.getEmptyInstance(ontologyManager, ModelLearningGraphType.Sparse); SemanticModel correctModel = newSource; List<ColumnNode> columnNodes = correctModel.getColumnNodes(); // if (useCorrectType && numberOfCRFCandidates > 1) // updateCrfSemanticTypesForResearchEvaluation(columnNodes); modelLearner = new ModelLearner_Old(ontologyManager, columnNodes); long start = System.currentTimeMillis(); String graphName = !iterativeEvaluation ? graphPath + semanticModels.get(newSourceIndex).getName() + Params.GRAPH_FILE_EXT : graphPath + semanticModels.get(newSourceIndex).getName() + ".knownModels=" + numberOfKnownModels + Params.GRAPH_FILE_EXT; if (new File(graphName).exists()) { // read graph from file try { logger.info("loading the graph ..."); DirectedWeightedMultigraph<Node, DefaultLink> graph = GraphUtil.importJson(graphName); modelLearner.graphBuilder = new GraphBuilder(ontologyManager, graph, false); modelLearner.nodeIdFactory = modelLearner.graphBuilder.getNodeIdFactory(); } catch (Exception e) { e.printStackTrace(); } } else { logger.info("building the graph ..."); for (SemanticModel sm : trainingData) modelLearningGraph.addModel(sm, false); modelLearner.graphBuilder = modelLearningGraph.getGraphBuilder(); modelLearner.nodeIdFactory = modelLearner.graphBuilder.getNodeIdFactory(); // save graph to file try { GraphUtil.exportJson( modelLearningGraph.getGraphBuilder().getGraph(), graphName, true, true); } catch (Exception e) { e.printStackTrace(); } } List<SortableSemanticModel_Old> hypothesisList = modelLearner.hypothesize(useCorrectType, numberOfCRFCandidates); long elapsedTimeMillis = System.currentTimeMillis() - start; float elapsedTimeSec = elapsedTimeMillis / 1000F; List<SortableSemanticModel_Old> topHypotheses = null; if (hypothesisList != null) { topHypotheses = hypothesisList.size() > modelingConfiguration.getNumCandidateMappings() ? hypothesisList.subList(0, modelingConfiguration.getNumCandidateMappings()) : hypothesisList; } Map<String, SemanticModel> models = new TreeMap<String, SemanticModel>(); // export to json // if (topHypotheses != null) // for (int k = 0; k < topHypotheses.size() && k < 3; k++) { // // String fileExt = null; // if (k == 0) fileExt = Params.MODEL_RANK1_FILE_EXT; // else if (k == 1) fileExt = Params.MODEL_RANK2_FILE_EXT; // else if (k == 2) fileExt = Params.MODEL_RANK3_FILE_EXT; // SortableSemanticModel m = topHypotheses.get(k); // new SemanticModel(m).writeJson(Params.MODEL_DIR + // newSource.getName() + fileExt); // // } ModelEvaluation me; models.put("1-correct model", correctModel); if (topHypotheses != null) for (int k = 0; k < topHypotheses.size(); k++) { SortableSemanticModel_Old m = topHypotheses.get(k); me = m.evaluate(correctModel); String label = "candidate" + k + m.getSteinerNodes().getScoreDetailsString() + "cost:" + roundTwoDecimals(m.getCost()) + // "-distance:" + me.getDistance() + "-precision:" + me.getPrecision() + "-recall:" + me.getRecall(); models.put(label, m); if (k == 0) { // first rank model System.out.println( "number of known models: " + numberOfKnownModels + ", precision: " + me.getPrecision() + ", recall: " + me.getRecall() + ", time: " + elapsedTimeSec); logger.info( "number of known models: " + numberOfKnownModels + ", precision: " + me.getPrecision() + ", recall: " + me.getRecall() + ", time: " + elapsedTimeSec); // resultFile.println("number of known models \t precision \t recall"); // resultFile.println(numberOfKnownModels + "\t" + me.getPrecision() + "\t" + // me.getRecall()); String s = me.getPrecision() + "\t" + me.getRecall() + "\t" + elapsedTimeSec; if (resultsArray[numberOfKnownModels + 2].length() > 0) resultsArray[numberOfKnownModels + 2].append(" \t "); resultsArray[numberOfKnownModels + 2].append(s); // resultFile.println(me.getPrecision() + "\t" + me.getRecall() + "\t" + // elapsedTimeSec); } } String outName = !iterativeEvaluation ? outputPath + semanticModels.get(newSourceIndex).getName() + Params.GRAPHVIS_OUT_DETAILS_FILE_EXT : outputPath + semanticModels.get(newSourceIndex).getName() + ".knownModels=" + numberOfKnownModels + Params.GRAPHVIS_OUT_DETAILS_FILE_EXT; // if (!iterativeEvaluation) { GraphVizUtil.exportSemanticModelsToGraphviz( models, newSource.getName(), outName, GraphVizLabelType.LocalId, GraphVizLabelType.LocalUri, false, false); // } numberOfKnownModels++; } // resultFile.println("======================================================="); } for (StringBuffer s : resultsArray) resultFile.println(s.toString()); resultFile.close(); }
private Set<SemanticTypeMapping> findSemanticTypeInGraph( ColumnNode sourceColumn, SemanticType semanticType, HashMap<String, Integer> semanticTypesCount, Set<Node> addedNodes) { logger.debug("finding matches for semantic type in the graph ... "); if (addedNodes == null) addedNodes = new HashSet<Node>(); Set<SemanticTypeMapping> mappings = new HashSet<SemanticTypeMapping>(); if (semanticType == null) { logger.error("semantic type is null."); return mappings; } if (semanticType.getDomain() == null) { logger.error("semantic type does not have any domain"); return mappings; } if (semanticType.getType() == null) { logger.error("semantic type does not have any link"); return mappings; } String domainUri = semanticType.getDomain().getUri(); String propertyUri = semanticType.getType().getUri(); Double confidence = semanticType.getConfidenceScore(); Origin origin = semanticType.getOrigin(); Integer countOfSemanticType = semanticTypesCount.get(domainUri + propertyUri); if (countOfSemanticType == null) { logger.error("count of semantic type should not be null or zero"); return mappings; } if (domainUri == null || domainUri.isEmpty()) { logger.error("semantic type does not have any domain"); return mappings; } if (propertyUri == null || propertyUri.isEmpty()) { logger.error("semantic type does not have any link"); return mappings; } logger.debug( "semantic type: " + domainUri + "|" + propertyUri + "|" + confidence + "|" + origin); // add dataproperty to existing classes if sl is a data node mapping // Set<Node> foundInternalNodes = new HashSet<Node>(); Set<SemanticTypeMapping> semanticTypeMatches = this.graphBuilder.getSemanticTypeMatches().get(domainUri + propertyUri); if (semanticTypeMatches != null) { for (SemanticTypeMapping stm : semanticTypeMatches) { SemanticTypeMapping mp = new SemanticTypeMapping( sourceColumn, semanticType, stm.getSource(), stm.getLink(), stm.getTarget()); mappings.add(mp); // foundInternalNodes.add(stm.getSource()); } } logger.debug("adding data property to the found internal nodes ..."); Integer count; boolean allowMultipleSamePropertiesPerNode = ModelingConfigurationRegistry.getInstance() .getModelingConfiguration( ContextParametersRegistry.getInstance() .getContextParameters(ontologyManager.getContextId()) .getKarmaHome()) .isMultipleSamePropertyPerNode(); Set<Node> nodesWithSameUriOfDomain = this.graphBuilder.getUriToNodesMap().get(domainUri); if (nodesWithSameUriOfDomain != null) { for (Node source : nodesWithSameUriOfDomain) { count = this.graphBuilder.getNodeDataPropertyCount().get(source.getId() + propertyUri); if (count != null) { if (allowMultipleSamePropertiesPerNode) { if (count >= countOfSemanticType.intValue()) continue; } else { if (count >= 1) continue; } } String nodeId = new RandomGUID().toString(); ColumnNode target = new ColumnNode(nodeId, nodeId, sourceColumn.getColumnName(), null); if (!this.graphBuilder.addNode(target)) continue; ; addedNodes.add(target); String linkId = LinkIdFactory.getLinkId(propertyUri, source.getId(), target.getId()); LabeledLink link = new DataPropertyLink(linkId, new Label(propertyUri)); if (!this.graphBuilder.addLink(source, target, link)) continue; ; SemanticTypeMapping mp = new SemanticTypeMapping( sourceColumn, semanticType, (InternalNode) source, link, target); mappings.add(mp); } } return mappings; }
private CandidateSteinerSets getCandidateSteinerSets( List<ColumnNode> columnNodes, boolean useCorrectTypes, int numberOfCRFCandidates, Set<Node> addedNodes) { if (columnNodes == null || columnNodes.isEmpty()) return null; int maxNumberOfSteinerNodes = columnNodes.size() * 2; CandidateSteinerSets candidateSteinerSets = new CandidateSteinerSets(maxNumberOfSteinerNodes, ontologyManager.getContextId()); if (addedNodes == null) addedNodes = new HashSet<Node>(); Set<SemanticTypeMapping> tempSemanticTypeMappings; HashMap<ColumnNode, List<SemanticType>> columnSemanticTypes = new HashMap<ColumnNode, List<SemanticType>>(); HashMap<String, Integer> semanticTypesCount = new HashMap<String, Integer>(); List<SemanticType> candidateSemanticTypes; String domainUri = "", propertyUri = ""; for (ColumnNode n : columnNodes) { candidateSemanticTypes = n.getTopKLearnedSemanticTypes(numberOfCRFCandidates); columnSemanticTypes.put(n, candidateSemanticTypes); for (SemanticType semanticType : candidateSemanticTypes) { if (semanticType == null || semanticType.getDomain() == null || semanticType.getType() == null) continue; domainUri = semanticType.getDomain().getUri(); propertyUri = semanticType.getType().getUri(); Integer count = semanticTypesCount.get(domainUri + propertyUri); if (count == null) semanticTypesCount.put(domainUri + propertyUri, 1); else semanticTypesCount.put(domainUri + propertyUri, count.intValue() + 1); } } int numOfMappings = 1; for (ColumnNode n : columnNodes) { candidateSemanticTypes = columnSemanticTypes.get(n); if (candidateSemanticTypes == null) continue; logger.info("===== Column: " + n.getColumnName()); Set<SemanticTypeMapping> semanticTypeMappings = new HashSet<SemanticTypeMapping>(); for (SemanticType semanticType : candidateSemanticTypes) { logger.info("\t===== Semantic Type: " + semanticType.getModelLabelString()); if (semanticType == null || semanticType.getDomain() == null || semanticType.getType() == null) continue; domainUri = semanticType.getDomain().getUri(); propertyUri = semanticType.getType().getUri(); Integer countOfSemanticType = semanticTypesCount.get(domainUri + propertyUri); // logger.info("count of semantic type: " + countOfSemanticType); tempSemanticTypeMappings = findSemanticTypeInGraph(n, semanticType, semanticTypesCount, addedNodes); // logger.info("number of matches for semantic type: " + // + (tempSemanticTypeMappings == null ? 0 : tempSemanticTypeMappings.size())); if (tempSemanticTypeMappings != null) semanticTypeMappings.addAll(tempSemanticTypeMappings); int countOfMatches = tempSemanticTypeMappings == null ? 0 : tempSemanticTypeMappings.size(); if (countOfMatches < countOfSemanticType) // No struct in graph is matched with the semantic type, we add a // new struct to the graph { for (int i = 0; i < countOfSemanticType - countOfMatches; i++) { SemanticTypeMapping mp = addSemanticTypeStruct(n, semanticType, addedNodes); if (mp != null) semanticTypeMappings.add(mp); } } } // System.out.println("number of matches for column " + n.getColumnName() + // ": " + (semanticTypeMappings == null ? 0 : semanticTypeMappings.size())); logger.info( "number of matches for column " + n.getColumnName() + ": " + (semanticTypeMappings == null ? 0 : semanticTypeMappings.size())); numOfMappings *= semanticTypeMappings == null || semanticTypeMappings.isEmpty() ? 1 : semanticTypeMappings.size(); candidateSteinerSets.updateSteinerSets(semanticTypeMappings); } // System.out.println("number of possible mappings: " + numOfMappings); logger.info("number of possible mappings: " + numOfMappings); return candidateSteinerSets; }
public List<SortableSemanticModel_Old> hypothesize( boolean useCorrectTypes, int numberOfCRFCandidates) { Set<Node> addedNodes = new HashSet< Node>(); // They should be deleted from the graph after computing the semantic models logger.info("finding candidate steiner sets ... "); CandidateSteinerSets candidateSteinerSets = getCandidateSteinerSets(columnNodes, useCorrectTypes, numberOfCRFCandidates, addedNodes); if (candidateSteinerSets == null || candidateSteinerSets.getSteinerSets() == null || candidateSteinerSets.getSteinerSets().isEmpty()) { logger.error("there is no candidate set of steiner nodes."); return null; } logger.info("number of steiner sets: " + candidateSteinerSets.numberOfCandidateSets()); logger.info("updating weights according to training data ..."); long start = System.currentTimeMillis(); this.updateWeights(); long updateWightsElapsedTimeMillis = System.currentTimeMillis() - start; logger.info("time to update weights: " + (updateWightsElapsedTimeMillis / 1000F)); logger.info("computing steiner trees ..."); List<SortableSemanticModel_Old> sortableSemanticModels = new ArrayList<SortableSemanticModel_Old>(); int count = 1; for (SteinerNodes sn : candidateSteinerSets.getSteinerSets()) { logger.debug("computing steiner tree for steiner nodes set " + count + " ..."); logger.debug(sn.getScoreDetailsString()); DirectedWeightedMultigraph<Node, LabeledLink> tree = computeSteinerTree(sn.getNodes()); count++; if (tree != null) { SemanticModel sm = new SemanticModel( new RandomGUID().toString(), tree, columnNodes, sn.getMappingToSourceColumns()); SortableSemanticModel_Old sortableSemanticModel = new SortableSemanticModel_Old(sm, sn); sortableSemanticModels.add(sortableSemanticModel); } if (count == ModelingConfigurationRegistry.getInstance() .getModelingConfiguration( ContextParametersRegistry.getInstance() .getContextParameters(ontologyManager.getContextId()) .getKarmaHome()) .getNumCandidateMappings()) break; } Collections.sort(sortableSemanticModels); // logger.info("results are ready ..."); // return sortableSemanticModels; List<SortableSemanticModel_Old> uniqueModels = new ArrayList<SortableSemanticModel_Old>(); SortableSemanticModel_Old current, previous; if (sortableSemanticModels != null) { if (sortableSemanticModels.size() > 0) uniqueModels.add(sortableSemanticModels.get(0)); for (int i = 1; i < sortableSemanticModels.size(); i++) { current = sortableSemanticModels.get(i); previous = sortableSemanticModels.get(i - 1); if (current.getScore() == previous.getScore() && current.getCost() == previous.getCost()) continue; uniqueModels.add(current); } } logger.info("results are ready ..."); return uniqueModels; }
@Override public UpdateContainer doIt(VWorkspace vWorkspace) throws CommandException { OntologyManager ontMgr = vWorkspace.getWorkspace().getOntologyManager(); JSONArray classesList = new JSONArray(); JSONArray classesMap = new JSONArray(); JSONArray propertiesList = new JSONArray(); JSONArray propertiesMap = new JSONArray(); Map<String, String> prefixMap = vWorkspace.getWorkspace().getOntologyManager().getPrefixMap(); ExtendedIterator<OntClass> iter = ontMgr.getOntModel().listNamedClasses(); // ExtendedIterator<DatatypeProperty> propsIter = ontMgr.getOntModel() // .listDatatypeProperties(); ExtendedIterator<OntProperty> propsIter = ontMgr.getOntModel().listAllOntProperties(); final JSONObject outputObj = new JSONObject(); try { while (iter.hasNext()) { OntClass cls = iter.next(); String pr = prefixMap.get(cls.getNameSpace()); String classLabel = cls.getLocalName(); // if (cls.getLabel(null) != null && !cls.getLabel(null).equals("")) // classLabel = cls.getLabel(null); String clsStr = (pr != null && !pr.equals("")) ? pr + ":" + classLabel : classLabel; classesList.put(clsStr); JSONObject classKey = new JSONObject(); classKey.put(clsStr, cls.getURI()); classesMap.put(classKey); } while (propsIter.hasNext()) { // DatatypeProperty prop = propsIter.next(); OntProperty prop = propsIter.next(); if (prop.isObjectProperty() && !prop.isDatatypeProperty()) continue; String pr = prefixMap.get(prop.getNameSpace()); String propLabel = prop.getLocalName(); // if (prop.getLabel(null) != null && !prop.getLabel(null).equals("")) // propLabel = prop.getLabel(null); String propStr = (pr != null && !pr.equals("")) ? pr + ":" + propLabel : propLabel; propertiesList.put(propStr); JSONObject propKey = new JSONObject(); propKey.put(propStr, prop.getURI()); propertiesMap.put(propKey); } // Populate the JSON object that will hold everything in output outputObj.put(JsonKeys.classList.name(), classesList); outputObj.put(JsonKeys.classMap.name(), classesMap); outputObj.put(JsonKeys.propertyList.name(), propertiesList); outputObj.put(JsonKeys.propertyMap.name(), propertiesMap); } catch (JSONException e) { logger.error("Error populating JSON!"); } UpdateContainer upd = new UpdateContainer( new AbstractUpdate() { @Override public void generateJson(String prefix, PrintWriter pw, VWorkspace vWorkspace) { pw.print(outputObj.toString()); } }); return upd; }
@Override public UpdateContainer doIt(Workspace workspace) throws CommandException { final OntologyManager ontMgr = workspace.getOntologyManager(); Set<LabeledLink> properties = new HashSet<>(); logger.debug( "GetPropertiesCommand:" + propertiesRange + ":" + classURI + "," + domainURI + ", " + rangeURI); if (propertiesRange == INTERNAL_PROP_RANGE.allObjectProperties) { HashMap<String, Label> linkList = ontMgr.getObjectProperties(); if (linkList != null) { for (Label label : linkList.values()) { properties.add(new DataPropertyLink(label.getUri(), label)); } } } else if (propertiesRange == INTERNAL_PROP_RANGE.allDataProperties) { HashMap<String, Label> linkList = ontMgr.getDataProperties(); for (Label label : linkList.values()) { properties.add(new DataPropertyLink(label.getUri(), label)); } } else if (propertiesRange == INTERNAL_PROP_RANGE.propertiesWithDomainRange) { Map<String, Label> linkList = ontMgr.getObjectPropertiesByDomainRange(domainURI, rangeURI, true); for (Label label : linkList.values()) { properties.add(new DataPropertyLink(label.getUri(), label)); } } else if (propertiesRange == INTERNAL_PROP_RANGE.dataPropertiesForClass) { Map<String, Label> linkList = ontMgr.getDataPropertiesByDomain(classURI, true); for (Label label : linkList.values()) { properties.add(new DataPropertyLink(label.getUri(), label)); } } else if (propertiesRange == INTERNAL_PROP_RANGE.existingProperties) { Alignment alignment = AlignmentManager.Instance() .getAlignmentOrCreateIt(workspace.getId(), worksheetId, ontMgr); Set<String> steinerTreeNodeIds = new HashSet<String>(); if (alignment != null && !alignment.isEmpty()) { DirectedWeightedMultigraph<Node, LabeledLink> steinerTree = alignment.getSteinerTree(); for (Node node : steinerTree.vertexSet()) { if (node.getType() == NodeType.InternalNode) { steinerTreeNodeIds.add(node.getId()); } } List<LabeledLink> specializedLinks = new ArrayList<LabeledLink>(); Set<LabeledLink> temp = null; temp = alignment.getLinksByType(LinkType.DataPropertyLink); if (temp != null) specializedLinks.addAll(temp); for (LabeledLink link : steinerTree.edgeSet()) if (link instanceof ObjectPropertyLink) specializedLinks.add(link); // Store the data property links for specialized edge link options properties.addAll(specializedLinks); } } logger.debug("Got back " + properties.size() + " results"); final Set<LabeledLink> finalProperties = properties; UpdateContainer upd = new UpdateContainer( new AbstractUpdate() { @Override public void generateJson(String prefix, PrintWriter pw, VWorkspace vWorkspace) { JSONObject obj = new JSONObject(); JSONArray resultArray = new JSONArray(); try { obj.put(JsonKeys.updateType.name(), "PropertyList"); for (LabeledLink link : finalProperties) { Label linkLabel = link.getLabel(); String edgeLabelStr = linkLabel.getDisplayName(); JSONObject edgeObj = new JSONObject(); if (linkLabel.getUri() != null && linkLabel.getNs() != null && linkLabel.getUri().equalsIgnoreCase(linkLabel.getNs())) { edgeLabelStr = linkLabel.getUri(); } edgeObj.put(JsonKeys.label.name(), edgeLabelStr); edgeObj.put(JsonKeys.uri.name(), linkLabel.getUri()); edgeObj.put(JsonKeys.id.name(), link.getId()); resultArray.put(edgeObj); } obj.put(JsonKeys.properties.name(), resultArray); pw.println(obj.toString()); } catch (Exception e) { logger.error("Exception:", e); e.printStackTrace(); } } }); return upd; }