private List<String> addHeaders(Worksheet wk, List<String> columnNames, RepFactory factory) { HTable headers = wk.getHeaders(); ArrayList<String> headersList = new ArrayList<String>(); for (int i = 0; i < columnNames.size(); i++) { HNode hNode = null; hNode = headers.addHNode(columnNames.get(i), HNodeType.Regular, wk, factory); headersList.add(hNode.getId()); } return headersList; }
@Override public UpdateContainer doIt(Workspace workspace) throws CommandException { Worksheet wk = workspace.getWorksheet(worksheetId); SuperSelection selection = getSuperSelection(wk); String Msg = String.format("begin, Time,%d, Worksheet,%s", System.currentTimeMillis(), worksheetId); logger.info(Msg); // Get the HNode HashMap<String, HashMap<String, String>> rows = new HashMap<String, HashMap<String, String>>(); HNodePath selectedPath = null; List<HNodePath> columnPaths = wk.getHeaders().getAllPaths(); for (HNodePath path : columnPaths) { if (path.getLeaf().getId().equals(hNodeId)) { selectedPath = path; } } // random nodes Collection<Node> nodes = new ArrayList<Node>(); wk.getDataTable().collectNodes(selectedPath, nodes, selection); HashSet<Integer> indSet = this.obtainIndexs(nodes.size()); int index = 0; for (Iterator<Node> iterator = nodes.iterator(); iterator.hasNext(); ) { Node node = iterator.next(); if (indSet.contains(index)) { String id = node.getId(); String originalVal = node.getValue().asString(); HashMap<String, String> x = new HashMap<String, String>(); x.put("Org", originalVal); x.put("Tar", originalVal); x.put("Orgdis", originalVal); x.put("Tardis", originalVal); rows.put(id, x); } index++; } Msg = String.format("end, Time,%d, Worksheet,%s", System.currentTimeMillis(), worksheetId); logger.info(Msg); return new UpdateContainer(new FetchResultUpdate(hNodeId, rows)); }
@Override public void generateJson(String prefix, PrintWriter pw, VWorkspace vWorkspace) { Workspace workspace = vWorkspace.getWorkspace(); alignment = AlignmentManager.Instance().getAlignment(workspace.getId(), worksheetId); SemanticTypes types = worksheet.getSemanticTypes(); Map<String, ColumnNode> hNodeIdTocolumnNodeMap = createColumnNodeMap(); Map<String, SemanticTypeNode> hNodeIdToDomainNodeMap = createDomainNodeMap(); JSONStringer jsonStr = new JSONStringer(); try { JSONWriter writer = jsonStr.object(); writer.key("worksheetId").value(worksheetId).key("updateType").value("SemanticTypesUpdate"); writer.key(JsonKeys.Types.name()); writer.array(); // Iterate through all the columns for (HNodePath path : worksheet.getHeaders().getAllPaths()) { HNode node = path.getLeaf(); String nodeId = node.getId(); writer.object(); // Check if a semantic type exists for the HNode SemanticType type = types.getSemanticTypeForHNodeId(nodeId); if (type != null && type.getConfidenceLevel() != SemanticType.ConfidenceLevel.Low) { writer .key(JsonKeys.HNodeId.name()) .value(type.getHNodeId()) .key(JsonKeys.SemanticTypesArray.name()) .array(); ColumnNode alignmentColumnNode = hNodeIdTocolumnNodeMap.get(type.getHNodeId()); SemanticTypeNode domainNode = hNodeIdToDomainNodeMap.get(type.getHNodeId()); if (alignmentColumnNode == null || domainNode == null) { logger.error( "Column node or domain node not found in alignment." + " (This should not happen conceptually!):" + type); continue; } // Add the primary semantic type writer .object() .key(JsonKeys.Origin.name()) .value(type.getOrigin().name()) .key(JsonKeys.ConfidenceLevel.name()) .value(type.getConfidenceLevel().name()) .key(JsonKeys.isPrimary.name()) .value(true); // Add the RDF literal type to show in the text box String rdfLiteralType = alignmentColumnNode.getRdfLiteralType() == null ? "" : alignmentColumnNode.getRdfLiteralType().getDisplayName(); String language = alignmentColumnNode.getLanguage() == null ? "" : alignmentColumnNode.getLanguage(); writer.key(JsonKeys.rdfLiteralType.name()).value(rdfLiteralType); writer.key(JsonKeys.language.name()).value(language); // String domainDisplayLabel = (domainNode.getLabel().getPrefix() != null && // (!domainNode.getLabel().getPrefix().equals(""))) ? // (domainNode.getLabel().getPrefix() + ":" + domainNode.getLocalId()) : // domainNode.getLocalId(); if (!type.isClass()) { writer .key(JsonKeys.FullType.name()) .value(type.getType().getUri()) .key(JsonKeys.DisplayLabel.name()) .value(type.getType().getDisplayName()) .key(JsonKeys.DisplayRDFSLabel.name()) .value(type.getType().getRdfsLabel()) .key(JsonKeys.DisplayRDFSComment.name()) .value(type.getType().getRdfsComment()) .key(JsonKeys.DomainId.name()) .value(domainNode.getId()) .key(JsonKeys.DomainUri.name()) .value(domainNode.getUri()) .key(JsonKeys.DisplayDomainLabel.name()) .value(domainNode.getDisplayId()) .key(JsonKeys.DomainRDFSLabel.name()) .value(domainNode.getRdfsLabel()) .key(JsonKeys.DomainRDFSComment.name()) .value(domainNode.getRdfsComment()); } else { writer .key(JsonKeys.FullType.name()) .value(domainNode.getId()) .key(JsonKeys.DisplayLabel.name()) .value(domainNode.getDisplayId()) .key(JsonKeys.DisplayRDFSLabel.name()) .value(domainNode.getRdfsLabel()) .key(JsonKeys.DisplayRDFSComment.name()) .value(domainNode.getRdfsComment()) .key(JsonKeys.DomainId.name()) .value("") .key(JsonKeys.DomainUri.name()) .value("") .key(JsonKeys.DisplayDomainLabel.name()) .value("") .key(JsonKeys.DomainRDFSLabel.name()) .value("") .key(JsonKeys.DomainRDFSComment.name()) .value(""); } // Mark the special properties writer .key(JsonKeys.isMetaProperty.name()) .value(isMetaProperty(type.getType(), alignmentColumnNode)); writer.endObject(); // Iterate through the synonym semantic types SynonymSemanticTypes synTypes = types.getSynonymTypesForHNodeId(nodeId); if (synTypes != null) { for (SemanticType synType : synTypes.getSynonyms()) { writer .object() .key(JsonKeys.HNodeId.name()) .value(synType.getHNodeId()) .key(JsonKeys.FullType.name()) .value(synType.getType().getUri()) .key(JsonKeys.Origin.name()) .value(synType.getOrigin().name()) .key(JsonKeys.ConfidenceLevel.name()) .value(synType.getConfidenceLevel().name()) .key(JsonKeys.DisplayLabel.name()) .value(synType.getType().getDisplayName()) .key(JsonKeys.DisplayRDFSLabel.name()) .value(synType.getType().getRdfsLabel()) .key(JsonKeys.DisplayRDFSComment.name()) .value(synType.getType().getRdfsComment()) .key(JsonKeys.isPrimary.name()) .value(false); if (!synType.isClass()) { writer .key(JsonKeys.DomainUri.name()) .value(synType.getDomain().getUri()) .key(JsonKeys.DomainId.name()) .value("") .key(JsonKeys.DisplayDomainLabel.name()) .value(synType.getDomain().getDisplayName()) .key(JsonKeys.DomainRDFSLabel.name()) .value(synType.getDomain().getRdfsLabel()) .key(JsonKeys.DomainRDFSComment.name()) .value(synType.getDomain().getRdfsComment()); } else { writer .key(JsonKeys.DomainId.name()) .value("") .key(JsonKeys.DomainUri.name()) .value("") .key(JsonKeys.DisplayDomainLabel.name()) .value("") .key(JsonKeys.DomainRDFSLabel.name()) .value("") .key(JsonKeys.DomainRDFSComment.name()) .value(""); } writer.endObject(); } } writer.endArray(); } else { writer.key(JsonKeys.HNodeId.name()).value(nodeId); writer.key(JsonKeys.SemanticTypesArray.name()).array().endArray(); } writer.endObject(); } writer.endArray(); writer.endObject(); pw.print(writer.toString()); } catch (JSONException e) { logger.error("Error occured while writing to JSON!", e); } }
public void run() { long start = System.currentTimeMillis(); // Find the corresponding hNodePath. Used to find examples for training the CRF Model. HNodePath currentColumnPath = null; List<HNodePath> paths = worksheet.getHeaders().getAllPaths(); for (HNodePath path : paths) { if (path.getLeaf().getId().equals(newType.getHNodeId())) { currentColumnPath = path; break; } } Map<ColumnFeature, Collection<String>> columnFeatures = new HashMap<ColumnFeature, Collection<String>>(); // Prepare the column name for training String columnName = currentColumnPath.getLeaf().getColumnName(); Collection<String> columnNameList = new ArrayList<String>(); columnNameList.add(columnName); columnFeatures.put(ColumnFeature.ColumnHeaderName, columnNameList); // Train the model with the new type ArrayList<String> trainingExamples = SemanticTypeUtil.getTrainingExamples(worksheet, currentColumnPath); boolean trainingResult = false; String newTypeString = (newType.getDomain() == null) ? newType.getType().getUri() : newType.getDomain().getUri() + "|" + newType.getType().getUri(); trainingResult = crfModelHandler.addOrUpdateLabel(newTypeString, trainingExamples, columnFeatures); if (!trainingResult) { logger.error("Error occured while training CRF Model."); } // logger.debug("Using type:" + newType.getDomain().getUri() + "|" + // newType.getType().getUri()); // Add the new CRF column model for this column ArrayList<String> labels = new ArrayList<String>(); ArrayList<Double> scores = new ArrayList<Double>(); trainingResult = crfModelHandler.predictLabelForExamples( trainingExamples, 4, labels, scores, null, columnFeatures); if (!trainingResult) { logger.error("Error occured while predicting labels"); } CRFColumnModel newModel = new CRFColumnModel(labels, scores); worksheet.getCrfModel().addColumnModel(newType.getHNodeId(), newModel); long elapsedTimeMillis = System.currentTimeMillis() - start; float elapsedTimeSec = elapsedTimeMillis / 1000F; logger.info("Time required for training the semantic type: " + elapsedTimeSec); // long t2 = System.currentTimeMillis(); // Identify the outliers for the column // SemanticTypeUtil.identifyOutliers(worksheet, newTypeString,currentColumnPath, // vWorkspace.getWorkspace().getTagsContainer() // .getTag(TagName.Outlier), columnFeatures, crfModelHandler); // long t3 = System.currentTimeMillis(); // logger.info("Identify outliers: "+ (t3-t2)); }
@Override public UpdateContainer doIt(Workspace workspace) throws CommandException { Worksheet wk = workspace.getWorksheet(worksheetId); SuperSelection selection = getSuperSelection(wk); String msg = String.format( "Gen rule start,Time,%d, Worksheet,%s", System.currentTimeMillis(), worksheetId); logger.info(msg); // Get the HNode HashMap<String, String> rows = new HashMap<String, String>(); HashMap<String, Integer> amb = new HashMap<String, Integer>(); HNodePath selectedPath = null; List<HNodePath> columnPaths = wk.getHeaders().getAllPaths(); for (HNodePath path : columnPaths) { if (path.getLeaf().getId().equals(hNodeId)) { selectedPath = path; } } Collection<Node> nodes = new ArrayList<Node>(); wk.getDataTable().collectNodes(selectedPath, nodes, selection); for (Node node : nodes) { String id = node.getId(); if (!this.nodeIds.contains(id)) continue; String originalVal = node.getValue().asString(); rows.put(id, originalVal); this.compResultString += originalVal + "\n"; calAmbScore(id, originalVal, amb); } RamblerValueCollection vc = new RamblerValueCollection(rows); HashMap<String, Vector<String[]>> expFeData = new HashMap<String, Vector<String[]>>(); inputs = new RamblerTransformationInputs(examples, vc); // generate the program boolean results = false; int iterNum = 0; RamblerTransformationOutput rtf = null; // initialize the vocabulary Iterator<String> iterx = inputs.getInputValues().getValues().iterator(); Vector<String> v = new Vector<String>(); int vb_cnt = 0; while (iterx.hasNext() && vb_cnt < 30) { String eString = iterx.next(); v.add(eString); vb_cnt++; } Vector<String> vob = UtilTools.buildDict(v); inputs.setVocab(vob.toArray(new String[vob.size()])); while (iterNum < 1 && !results) // try to find an program within iterNum { rtf = new RamblerTransformationOutput(inputs); if (rtf.getTransformations().keySet().size() > 0) { results = true; } iterNum++; } Iterator<String> iter = rtf.getTransformations().keySet().iterator(); // id:{org: tar: orgdis: tardis: } HashMap<String, HashMap<String, String>> resdata = new HashMap<String, HashMap<String, String>>(); HashSet<String> keys = new HashSet<String>(); while (iter.hasNext()) { String tpid = iter.next(); ValueCollection rvco = rtf.getTransformedValues_debug(tpid); if (rvco == null) continue; // constructing displaying data HashMap<String, String[]> xyzHashMap = new HashMap<String, String[]>(); for (String key : rvco.getNodeIDs()) { HashMap<String, String> dict = new HashMap<String, String>(); // add to the example selection boolean isExp = false; String org = vc.getValue(key); String classLabel = rvco.getClass(key); String pretar = rvco.getValue(key); String dummyValue = pretar; if (pretar.indexOf("_FATAL_ERROR_") != -1) { dummyValue = org; // dummyValue = "#ERROR"; } try { UtilTools.StringColorCode(org, dummyValue, dict); } catch (Exception ex) { logger.info(String.format("ColorCoding Exception%s, %s", org, dummyValue)); // set dict dict.put("Org", org); dict.put("Tar", "ERROR"); dict.put("Orgdis", org); dict.put("Tardis", "ERROR"); } for (TransformationExample exp : examples) { if (exp.getNodeId().compareTo(key) == 0) { if (!expFeData.containsKey(classLabel)) { Vector<String[]> vstr = new Vector<String[]>(); String[] texp = {dict.get("Org"), pretar}; vstr.add(texp); expFeData.put(classLabel, vstr); } else { String[] texp = {dict.get("Org"), pretar}; expFeData.get(classLabel).add(texp); } isExp = true; } } if (!isExp) { String[] pair = {dict.get("Org"), dict.get("Tar"), pretar, classLabel}; xyzHashMap.put(key, pair); } resdata.put(key, dict); } if (!rtf.nullRule) keys.add(getBestExample(xyzHashMap, expFeData)); } // find the best row String vars = ""; String expstr = ""; String recmd = ""; for (TransformationExample x : examples) { expstr += String.format("%s|%s", x.getBefore(), x.getAfter()); } expstr += "|"; if (rtf.nullRule) { keys.clear(); // keys.add("-2"); // "-2 indicates null rule" } if (!resdata.isEmpty() && !rtf.nullRule) { recmd = resdata.get(keys.iterator().next()).get("Org"); } else { recmd = ""; } msg = String.format( "Gen rule end, Time,%d, Worksheet,%s,Examples:%s,Recmd:%s", System.currentTimeMillis(), worksheetId, expstr, recmd); logger.info(msg); return new UpdateContainer(new CleaningResultUpdate(hNodeId, resdata, vars, keys)); }
@Override public UpdateContainer doIt(Workspace workspace) throws CommandException { Worksheet worksheet = workspace.getWorksheet(worksheetId); SuperSelection selection = getSuperSelection(worksheet); HNodePath selectedPath = null; List<HNodePath> columnPaths = worksheet.getHeaders().getAllPaths(); for (HNodePath path : columnPaths) { if (path.getLeaf().getId().equals(hNodeId)) { selectedPath = path; } } Collection<Node> nodes = new ArrayList<Node>(); workspace .getFactory() .getWorksheet(worksheetId) .getDataTable() .collectNodes(selectedPath, nodes, selection); try { JSONArray requestJsonArray = new JSONArray(); for (Node node : nodes) { String id = node.getId(); String originalVal = node.getValue().asString(); JSONObject jsonRecord = new JSONObject(); jsonRecord.put("id", id); originalVal = originalVal == null ? "" : originalVal; jsonRecord.put("value", originalVal); requestJsonArray.put(jsonRecord); } String jsonString = null; jsonString = requestJsonArray.toString(); // String url = // "http://localhost:8080/cleaningService/IdentifyData"; // String url = "http://localhost:8070/myWS/IdentifyData"; String url = ServletContextParameterMap.getParameterValue(ContextParameter.CLEANING_SERVICE_URL); HttpClient httpclient = new DefaultHttpClient(); HttpPost httppost = null; HttpResponse response = null; HttpEntity entity; StringBuffer out = new StringBuffer(); URI u = null; u = new URI(url); List<NameValuePair> formparams = new ArrayList<NameValuePair>(); formparams.add(new BasicNameValuePair("json", jsonString)); httppost = new HttpPost(u); httppost.setEntity(new UrlEncodedFormEntity(formparams, "UTF-8")); response = httpclient.execute(httppost); entity = response.getEntity(); if (entity != null) { BufferedReader buf = new BufferedReader(new InputStreamReader(entity.getContent())); String line = buf.readLine(); while (line != null) { out.append(line); line = buf.readLine(); } } // logger.trace(out.toString()); // logger.info("Connnection success : " + url + " Successful."); final JSONObject data1 = new JSONObject(out.toString()); // logger.trace("Data--->" + data1); return new UpdateContainer( new AbstractUpdate() { @Override public void generateJson(String prefix, PrintWriter pw, VWorkspace vWorkspace) { JSONObject response = new JSONObject(); // logger.trace("Reached here"); try { response.put("updateType", "CleaningServiceOutput"); response.put("chartData", data1); response.put("hNodeId", hNodeId); // logger.trace(response.toString(4)); } catch (JSONException e) { pw.print("Error"); } pw.print(response.toString()); } }); } catch (Exception e) { e.printStackTrace(); return new UpdateContainer(new ErrorUpdate("Error!")); } }