/** * getLocaleByString * * <p>Convert a string representation to a locale object. * * @param str String representation of that Locale * @return Locale after translation */ public static java.util.Locale getLocaleByString(String str) { if (Utility.isEmpty(str)) { return (null); } String[] ary = TextUtility.splitString(str, "_"); if (ary.length == 0) { return (null); } else if (ary.length == 1) { return (new java.util.Locale(ary[0])); } else if (ary.length == 2) { return (new java.util.Locale(ary[0], ary[1])); } else if (ary.length == 3) { return (new java.util.Locale(ary[0], ary[1], ary[2])); } else { return (null); } /* if (str != null && "en_US".equals(str.trim())) { return(new java.util.Locale("en", "US")); } else if (str != null && "zh_HK".equals(str.trim())) { return(new java.util.Locale("zh", "HK")); } else if (str != null && "zh_CN".equals(str.trim())) { return(new java.util.Locale("zh", "CN")); } else { return(new java.util.Locale("en", "US")); } */ }
public boolean validateLicence() { if (this.readFlag) { return (this.isValidLicense); } getXMLElements(); boolean flag = false; synchronized (lock) { if (validateIP()) { String s = new String(); for (Enumeration e = Modules.elements(); e.hasMoreElements(); ) { ModuleLicenceInfo ml = ((ModuleLicenceInfo) e.nextElement()); s += ml.getName(); s += ml.getType(); s += ml.getNo(); } s += Info.getIp(); s += Info.getExpDate(); s += Info.getVersion(); try { if (TextUtility.encodeObject(md5util.getDigest(s)) .equals(Info.getKey().trim().toString())) { flag = true; } } catch (Exception e) { log.error(Priority.INFO, e); } } /* if (!flag) { for (Enumeration e = Modules.elements(); e.hasMoreElements(); ) { ModuleLicenceInfo ml = ( (ModuleLicenceInfo) e.nextElement()); ml.setNo(-1); } } */ this.isValidLicense = flag; } return flag; }
/** * @param isSimplifyEntity * @param parsedFileName * @param aimedDataFileName * @param outputFile * @param medtType * @param isRemoveOverlappingEntities * @throws Exception */ public void generateTPWFvectorOutput( ArrayList<Sentence> listSentence, String outputFile, int medtType, String entPairFileName, ClauseAnalyser.eDataFilterOption relToBeConsidered, String inClauseBoundFileName) throws Exception { boolean useWalkFeatures = true, useRegExPatterns = false, useDepPatterns = true, useTriggers = true, useNegativeCues = true, discardDepRelUsingProbabilityInReducedGraph = false, triggersFromWholeRGinsteadOfLCP = true; if (TextUtility.isEmptyString(TKOutputGenerator.triggerFileName)) useTriggers = false; String str = ""; if (discardDepRelUsingProbabilityInReducedGraph) str += "discardDepRelUsingProbabilityInReducedGraph "; if (useWalkFeatures) str += "WalkFeatures "; if (useRegExPatterns) str += "RegExPatterns "; if (useDepPatterns) str += "DepPatterns "; if (useTriggers) str += "Triggers "; if (triggersFromWholeRGinsteadOfLCP) str += "TriggersFromWholeRGinsteadOfLCP "; if (useNegativeCues) str += "NegativeCues "; System.out.println(str); PatternsDepRelFromGraph clsWVG = new PatternsDepRelFromGraph(); if (PatternsDepRelFromGraph.listOfAllPatterns.size() == 0) { clsWVG.collectAllDepRelPatternsFromTrainData( listSentence, discardDepRelUsingProbabilityInReducedGraph); } int[][] arrClauseBoundOfSen = new TKOutputPST().getClauseBoundOfAllSen(inClauseBoundFileName); FileUtility.writeInFile(outputFile, "", false); // read trigger word list Triggers.readTriggersAndNegativeWord(); for (int s = 0; s < listSentence.size(); s++) { Sentence objCurSen = listSentence.get(s); int senIndex = TKOutputPST.listAllSenIDs.indexOf(objCurSen.senID); // only those sentences are taken into account which has more than one entity annotations if (objCurSen.listOfEntities.size() > 1) { generateVectorForSen( objCurSen, medtType, entPairFileName, discardDepRelUsingProbabilityInReducedGraph, useWalkFeatures, useRegExPatterns, useDepPatterns, useTriggers, triggersFromWholeRGinsteadOfLCP, useNegativeCues, relToBeConsidered, senIndex > 0 ? arrClauseBoundOfSen[senIndex] : null); } } FileUtility.writeInFile(outputFile, GenericFeatVect.getInstanceVectors(), false); }
/** * @param tokenWithPos * @param senID * @param sentence * @param listEnt * @param listRel * @param listDependencies * @param medtType * @param entPairFileName * @return * @throws IOException */ private void generateVectorForSen( Sentence objCurSen, int medtType, String entPairFileName, boolean discardDepRelUsingProbabilityInReducedGraph, boolean useWalkFeatures, boolean useRegExPatterns, boolean useDepPatterns, boolean useTriggers, boolean triggersFromWholeRGinsteadOfLCP, boolean useNegativeCues, ClauseAnalyser.eDataFilterOption relToBeConsidered, int[] arrClauseBoundOfSen) throws IOException { // for each pair of entities, find minimal subtrees and output it with 1 or 0 // 1 represents there exists a relation between those entities for (int r = 0; r < objCurSen.listRels.size(); r++) { Entity e1 = objCurSen.getEntityById(objCurSen.listRels.get(r).arg1); Entity e2 = objCurSen.getEntityById(objCurSen.listRels.get(r).arg2); // checking relation type if (TKOutputPST.skipInstance(arrClauseBoundOfSen, relToBeConsidered, e1, e2, objCurSen, r)) continue; if (!objCurSen.listRels.get(r).isPositive) TKOutputPST.totalRelNeg++; else TKOutputPST.totalRelPos++; boolean isSet = setInpVectFromDepGraphOfPairsAndTrigger( objCurSen.listRels.get(r), objCurSen, medtType, discardDepRelUsingProbabilityInReducedGraph, useWalkFeatures, useRegExPatterns, useDepPatterns, useTriggers, triggersFromWholeRGinsteadOfLCP, useNegativeCues, e1, e2); if (isSet) GenericFeatVect.listOfAllInstancePolarity.add( objCurSen.listRels.get(r).isPositive ? 1 : -1); // */ if (!TextUtility.isEmptyString(entPairFileName)) { if (isSet) FileUtility.writeInFile(entPairFileName, e1.id + "\t" + e2.id + "\n", true); /* //if ( !str.isEmpty() ) if ( !isSet ) FileUtility.writeInFile(entPairFileName, objCurSen.listRels.get(r).printString() + "\tFOUND\n", true); else { FileUtility.writeInFile(entPairFileName, objCurSen.listRels.get(r).printString() + "\tNOT_FOUND\n", true); } */ } } }