public TTGlyph toSimpleGlyph() { // convert the file into array of contours XContour[] contours = toContours(); if ((contours == null) && (!isRequiredGlyph())) { return null; } // if TTGlyph retval = new TTGlyph(); retval.setSimple(true); retval.setAdvanceWidth(getAdvanceWidth()); if (contours == null) { return retval; } // if ArrayList<EContourPoint> points = new ArrayList<>(); for (int i = 0; i < contours.length; i++) { XContour contour = contours[i]; XContourPoint[] contourPoints = contour.getContourPoint(); for (int j = 0; j < contourPoints.length; j++) { points.add((EContourPoint) contourPoints[j]); } // for j retval.addEndPoint(points.size() - 1); } // for i for (EContourPoint point : points) { loadContourPoint(retval, point); } // for point boolean hasGridfit = false; // I need int i here. for (int i = 0; i < points.size(); i++) { EContourPoint point = points.get(i); if (!point.isRounded()) { continue; } // if hasGridfit = true; loadGridfit(retval, point, i); } // for i if (hasGridfit) { retval.addInstruction(TTGlyph.IUP1); retval.addInstruction(TTGlyph.IUP0); } // if // I need int i here. for (int i = 0; i < points.size(); i++) { EContourPoint point = points.get(i); if (point.getHint().length == 0) { continue; } // if loadHint(retval, point, i); } // for i return retval; }
/** Generates array of XContour from local contours and modules. Used for TTF building. */ private XContour[] toContours() { XContour[] retval; ArrayList<XContour> list = new ArrayList<>(); XContour[] contours = m_glyph.getBody().getContour(); for (int i = 0; i < contours.length; i++) { EContour contour = (EContour) contours[i]; list.add(contour.toQuadratic()); } // for i XModule[] modules = m_glyph.getBody().getModule(); for (int i = 0; i < modules.length; i++) { EModuleInvoke module = (EModuleInvoke) modules[i]; // push and pop happens inside toContour list.add(module.toContour(new AffineTransform())); } // for i if (list.size() == 0) return null; retval = new XContour[list.size()]; for (int i = 0; i < list.size(); i++) { retval[i] = list.get(i); } // for i return retval; }
// Add/Insert public int add(Script script) { // Find the correct spot to add it alphabetically int i, limit; for (i = 0, limit = scripts.size(); i < limit; i++) { Script scriptTemp = (Script) scripts.get(i); if (scriptTemp.getName().compareTo(script.getName()) >= 0) { break; } } scripts.add(i, script); // Update the table fireTableRowsInserted(i, i); return i; }
public boolean download(DownloadListener dl) throws Exception { // 1) get crcod, cdn, and cookies handshake(); // 2) get XML ArrayList<String> paths = parseXML(); dl.setTotal(getTotal()); // 3) get pages byte[] key = { 99, 49, 51, 53, 100, 54, 56, 56, 57, 57, 99, 56, 50, 54, 99, 101, 100, 55, 99, 52, 57, 98, 99, 55, 54, 97, 97, 57, 52, 56, 57, 48 }; BlowFishKey bfkey = new BlowFishKey(key); for (int i = 0; i < paths.size(); i++) { if (dl.isDownloadAborted()) return (true); // rid is just a random number from 0-9999 URL url = new URL( "http://mangaonweb.com/page.do?cdn=" + cdn + "&cpn=" + paths.get(i) + "&crcod=" + crcod + "&rid=" + (int) (Math.random() * 10000)); byte[] encrypted = downloadByteArray(url); bfkey.decrypt(encrypted, 0); RandomAccessFile output = new RandomAccessFile(dl.downloadPath(this, i), "rw"); output.write(encrypted); output.close(); dl.downloadIncrement(this); } dl.downloadFinished(this); return (true); }
// Getters public Script get(int i) { return (Script) scripts.get(i); }
public static PetriNet convert(ConfigurableEPC baseEPC) { HashMap<EPCFunction, Transition> functionActivityMapping; HashMap<EPCConnector, Place> xorconnectorChoiceMapping; // HV: Initialize the mappings. functionActivityMapping = new HashMap<EPCFunction, Transition>(); xorconnectorChoiceMapping = new HashMap<EPCConnector, Place>(); // Check to use the weights if necessary // HV: Add both mappings. On completion, these will be filledd. PetriNet petrinet = EPCToPetriNetConverter.convert( baseEPC, new HashMap(), functionActivityMapping, xorconnectorChoiceMapping); HashSet visible = new HashSet(); // HV: The next block is taken care of by the functionActivityMapping // below. /* * Iterator it = petrinet.getTransitions().iterator(); while * (it.hasNext()) { Transition t = (Transition) it.next(); if (t.object * instanceof EPCFunction) { // if (t.getLogEvent() != null) { // Add * transitions with LogEvent (i.e. referring to functions) * visible.add(t); } } */ // HV: Prevent the places mapped onto from being reduced. visible.addAll(functionActivityMapping.values()); visible.addAll(xorconnectorChoiceMapping.values()); Message.add(visible.toString(), Message.DEBUG); Iterator it = petrinet.getPlaces().iterator(); while (it.hasNext()) { Place p = (Place) it.next(); if (p.inDegree() * p.outDegree() == 0) { // Add Initial and final places to visible, i.e. places that // refer to in and output events visible.add(p); } } // Reduce the PetriNet with Murata rules, while keeping the visible ones PetriNetReduction pnred = new PetriNetReduction(); pnred.setNonReducableNodes(visible); HashMap pnMap = new HashMap(); // Used to map pre-reduction nodes to // post-reduction nodes. PetriNet reduced = pnred.reduce(petrinet, pnMap); if (reduced != petrinet) { // Update both mappings from pre-reduction nodes to post-reduction // nodes. HashMap<EPCFunction, Transition> newFunctionActivityMapping = new HashMap<EPCFunction, Transition>(); for (EPCFunction function : functionActivityMapping.keySet()) { Transition transition = (Transition) functionActivityMapping.get(function); if (pnMap.keySet().contains(transition)) { newFunctionActivityMapping.put(function, (Transition) pnMap.get(transition)); } } functionActivityMapping = newFunctionActivityMapping; HashMap<EPCConnector, Place> newXorconnectorChoiceMapping = new HashMap<EPCConnector, Place>(); for (EPCConnector connector : xorconnectorChoiceMapping.keySet()) { Place place = (Place) xorconnectorChoiceMapping.get(connector); if (pnMap.keySet().contains(place)) { newXorconnectorChoiceMapping.put(connector, (Place) pnMap.get(place)); } } xorconnectorChoiceMapping = newXorconnectorChoiceMapping; } reduced.makeClusters(); // filter the \nunknown:normal ArrayList<Transition> alTrans = reduced.getVisibleTasks(); for (int i = 0; i < alTrans.size(); i++) { Transition t = alTrans.get(i); String id = t.getIdentifier(); int idx = id.indexOf("\\nunknown:normal"); if (idx > 0) { id = id.substring(0, idx); } // �˴������ֵ��ѯ�滻���е�label String mappedId = htDict.get(id); if (mappedId != null) { t.setIdentifier(mappedId); } else { t.setIdentifier(id); } } return reduced; }
public String getText() { String text = accumulator.toString(); if (text.trim().length() == 0) { return ""; } /*text = text.replace("\n", " "); text = text.replace(" ", " ");*/ if (counting) { /* StringTokenizer st = new StringTokenizer(text, delimiters, true); int count = 0; while(st.hasMoreTokens()) { String token = st.nextToken().trim(); if (token.length() == 0) { continue; } count++; } */ int i = currentPatentIndex; int count = text.length(); while (i < patents.size()) { PatentItem currentPatent = patents.get(i); if (currentPatent != null) { int startOffset = currentPatent.getOffsetBegin(); int endOffset = currentPatent.getOffsetEnd(); if ((startOffset >= offset) && (endOffset <= offset + count)) { String context = currentPatent.getContext(); /*System.out.println("OFFSET: " + offset); System.out.println("count: " + count); System.out.println("startOffset: " + startOffset); System.out.println("endOffset: " + endOffset); System.out.println("context: " + context); System.out.println("text: " + text);*/ String target = ""; if (context.charAt(0) == ' ') { target = " <ref type=\"patent\">" + context.substring(1, context.length()) + "</ref>"; } else { target = "<ref type=\"patent\">" + context + "</ref>"; } text = text.replace(context, target); currentPatentIndex = i; } } i++; } // i = currentArticleIndex; i = 0; while (i < articles.size()) { BibDataSet currentArticle = articles.get(i); if (currentArticle != null) { List<Integer> offsets = currentArticle.getOffsets(); int startOffset = -1; int endOffset = -1; String context = currentArticle.getRawBib().trim(); if (offsets.size() > 0) { if (offsets.get(0) != null) { startOffset = offsets.get(0).intValue(); /*StringTokenizer stt = new StringTokenizer(context, delimiters, true); int count2 = 0; while(stt.hasMoreTokens()) { String token2 = stt.nextToken().trim(); if (token2.length() == 0) { continue; } count2++; }*/ // endOffset = offsets.get(1).intValue(); endOffset = startOffset + context.length(); } } // if ( (startOffset >= offset) && (endOffset <= offset+count) ) { if ((startOffset >= offset)) { /*System.out.println("OFFSET: " + offset); System.out.println("count: " + count); System.out.println("startOffset: " + startOffset); System.out.println("endOffset: " + endOffset); System.out.println("context: " + context); System.out.println("text: " + text);*/ String target = " <ref type=\"npl\">" + context + "</ref> "; text = text.replace(context, target); currentArticleIndex = i; } } i++; } offset += count; } return text; }
public void endElement(java.lang.String uri, java.lang.String localName, java.lang.String qName) throws SAXException { // if (!qName.equals("TOKEN") && !qName.equals("BLOCK") && // !qName.equals("TEXT")) // System.out.println(qName); if (qName.equals("TEXT")) { blabla.append("\n"); LayoutToken token = new LayoutToken(); token.setText("\n"); block.addToken(token); accumulator.setLength(0); tokenizations.add("\n"); } else if (qName.equals("METADATA")) { accumulator.setLength(0); } else if (qName.equals("TOKEN")) { String tok0 = TextUtilities.clean(getText()); if (block.getStartToken() == -1) { block.setStartToken(tokenizations.size()); } if (tok0.length() > 0) { StringTokenizer st = new StringTokenizer(tok0, TextUtilities.fullPunctuations, true); boolean diaresis = false; boolean accent = false; boolean keepLast = false; while (st.hasMoreTokens()) { diaresis = false; accent = false; keepLast = false; String tok = st.nextToken(); if (tok.length() > 0) { LayoutToken token = new LayoutToken(); if ((previousToken != null) && (tok != null) && (previousToken.length() > 0) && (tok.length() > 0) && blabla.length() > 0) { Character leftChar = previousTok.getText().charAt(previousTok.getText().length() - 1); Character rightChar = tok.charAt(0); ModifierClass leftClass = classifyChar(leftChar); ModifierClass rightClass = classifyChar(rightChar); ModifierClass modifierClass = ModifierClass.NOT_A_MODIFIER; if (leftClass != ModifierClass.NOT_A_MODIFIER || rightClass != ModifierClass.NOT_A_MODIFIER) { Character baseChar = null; Character modifierChar = null; if (leftClass != ModifierClass.NOT_A_MODIFIER) { if (rightClass != ModifierClass.NOT_A_MODIFIER) { // assert false; // keeping characters, but setting class // to not a modifier baseChar = leftChar; modifierChar = rightChar; modifierClass = ModifierClass.NOT_A_MODIFIER; } else { baseChar = rightChar; modifierChar = leftChar; modifierClass = leftClass; } } else { baseChar = leftChar; modifierChar = rightChar; modifierClass = rightClass; } String updatedChar = modifyCharacter(baseChar, modifierChar); tokenizations.remove(tokenizations.size() - 1); if (tokenizations.size() > 0) { tokenizations.remove(tokenizations.size() - 1); } blabla.deleteCharAt(blabla.length() - 1); if (blabla.length() > 0) { blabla.deleteCharAt(blabla.length() - 1); } removeLastCharacterIfPresent(previousTok); if (updatedChar != null) { blabla.append(updatedChar); previousTok.setText(previousTok.getText() + updatedChar); } blabla.append(tok.substring(1, tok.length())); previousTok.setText(previousTok.getText() + tok.substring(1, tok.length())); tokenizations.add(previousTok.getText()); diaresis = (modifierClass == ModifierClass.DIAERESIS || modifierClass == ModifierClass.NORDIC_RING || modifierClass == ModifierClass.CZECH_CARON || modifierClass == ModifierClass.TILDE || modifierClass == ModifierClass.CEDILLA); accent = (modifierClass == ModifierClass.ACUTE_ACCENT || modifierClass == ModifierClass.CIRCUMFLEX || modifierClass == ModifierClass.GRAVE_ACCENT); if (rightClass != ModifierClass.NOT_A_MODIFIER) { tok = ""; // resetting current token as it // is a single-item } } } if (tok != null) { // actually in certain cases, the extracted string under token can be a chunk of text // with separators that need to be preserved // tok = tok.replace(" ", ""); } if ((!diaresis) && (!accent)) { // blabla.append(" "); blabla.append(tok); token.setText(tok); tokenizations.add(tok); } else { tok = ""; keepLast = true; } /* * StringTokenizer st0 = new StringTokenizer(tok0, * TextUtilities.fullPunctuations, true); * while(st0.hasMoreTokens()) { String tok = * st0.nextToken(); tokenizations.add(tok); } * tokenizations.add(" "); */ /* * boolean punct1 = false; boolean punct2 = false; * boolean punct3 = false; String content = null; int i * = 0; for(; i<TextUtilities.punctuations.length(); * i++) { if (tok.length() > 0) { if * (tok.charAt(tok.length()-1) == * TextUtilities.punctuations.charAt(i)) { punct1 = * true; content = tok.substring(0, tok.length()-1); if * (tok.length() > 1) { int j = 0; for(; * j<TextUtilities.punctuations.length(); j++) { if * (tok.charAt(tok.length()-2) == * TextUtilities.punctuations.charAt(j)) { punct3 = * true; content = tok.substring(0, tok.length()-2); } } * } break; } } } if (tok.length() > 0) { if ( * (tok.startsWith("(")) && (tok.length() > 1) ) { if * ((punct3) && (tok.length() > 2)) content = * tok.substring(1, tok.length()-2); else if (punct1) * content = tok.substring(1, tok.length()-1); else * content = tok.substring(1, tok.length()); punct2 = * true; token.setText("("); } else if ( * (tok.startsWith("[")) && (tok.length() > 1) ) { if * ((punct3) && (tok.length() > 2)) content = * tok.substring(1, tok.length()-2); else if (punct1) * content = tok.substring(1, tok.length()-1); else * content = tok.substring(1, tok.length()); punct2 = * true; token.setText("["); } else if ( * (tok.startsWith("\"")) && (tok.length() > 1) ) { if * ((punct3) && (tok.length() > 2)) content = * tok.substring(1, tok.length()-2); else if (punct1) * content = tok.substring(1, tok.length()-1); else * content = tok.substring(1, tok.length()); punct2 = * true; token.setText("\""); } } */ if (currentRotation) currentFontSize = currentFontSize / 2; /* * if (punct2) { if (currentFont != null) * token.setFont(currentFont.toLowerCase()); else * token.setFont("default"); * token.setItalic(currentItalic); * token.setBold(currentBold); * token.setRotation(currentRotation); * token.setColorFont(colorFont); token.setX(currentX); * token.setY(currentY); token.setWidth(currentWidth); * token.setHeight(currentHeight); * token.setFontSize(currentFontSize); * block.addToken(token); * * token = new LayoutToken(); token.setText(content); } * if (punct1) { token.setText(content); if (currentFont * != null) token.setFont(currentFont.toLowerCase()); * else token.setFont("default"); * token.setItalic(currentItalic); * token.setBold(currentBold); * token.setRotation(currentRotation); * token.setColorFont(colorFont); token.setX(currentX); * token.setY(currentY); token.setWidth(currentWidth); * token.setHeight(currentHeight); * token.setFontSize(currentFontSize); * block.addToken(token); * * if (punct3) { token = new LayoutToken(); * token.setText(""+tok.charAt(tok.length()-2)); if * (currentFont != null) * token.setFont(currentFont.toLowerCase()); else * token.setFont("default"); * token.setItalic(currentItalic); * token.setBold(currentBold); * token.setRotation(currentRotation); * token.setColorFont(colorFont); token.setX(currentX); * token.setY(currentY); token.setWidth(currentWidth); * token.setHeight(currentHeight); * token.setFontSize(currentFontSize); * block.addToken(token); } * * token = new LayoutToken(); * token.setText(""+tok.charAt(tok.length()-1)); } */ if (currentFont != null) token.setFont(currentFont.toLowerCase()); else token.setFont("default"); token.setItalic(currentItalic); token.setBold(currentBold); token.setRotation(currentRotation); token.setColorFont(colorFont); token.setX(currentX); token.setY(currentY); token.setWidth(currentWidth); token.setHeight(currentHeight); token.setFontSize(currentFontSize); if (!diaresis && !accent) { block.addToken(token); } if (block.getFont() == null) { if (currentFont != null) block.setFont(currentFont.toLowerCase()); else token.setFont("default"); } if (nbTokens == 0) { block.setItalic(currentItalic); block.setBold(currentBold); } if (block.getColorFont() == null) block.setColorFont(colorFont); if (block.getX() == 0.0) block.setX(currentX); if (block.getY() == 0.0) block.setY(currentY); if (block.getWidth() == 0.0) block.setWidth(currentWidth); if (block.getHeight() == 0.0) block.setHeight(currentHeight); if (block.getFontSize() == 0.0) block.setFontSize(currentFontSize); if (!diaresis && !accent) { previousToken = tok; previousTok = token; } else { previousToken = previousTok.getText(); } nbTokens++; accumulator.setLength(0); } } if (tokenizations.size() > 0) { String justBefore = tokenizations.get(tokenizations.size() - 1); if (!justBefore.endsWith("-")) { tokenizations.add(" "); blabla.append(" "); } } } block.setEndToken(tokenizations.size()); } else if (qName.equals("PAGE")) { // page marker are usefull to detect headers (same first line(s) // appearing on each page) if (block != null) { blabla.append("\n"); tokenizations.add("\n"); block.setText(blabla.toString()); block.setNbTokens(nbTokens); doc.addBlock(block); } Block block0 = new Block(); block0.setText("@PAGE\n"); block0.setNbTokens(0); block0.setPage(currentPage); doc.addBlock(block0); block = new Block(); block.setPage(currentPage); blabla = new StringBuffer(); nbTokens = 0; // blabla.append("\n@block\n"); tokenizations.add("\n"); } else if (qName.equals("IMAGE")) { if (block != null) { blabla.append("\n"); block.setText(blabla.toString()); block.setNbTokens(nbTokens); doc.addBlock(block); } block = new Block(); block.setPage(currentPage); blabla = new StringBuffer(); if (images.size() > 0) { blabla.append("@IMAGE " + images.get(images.size() - 1) + "\n"); } block.setText(blabla.toString()); block.setNbTokens(nbTokens); if (block.getX() == 0.0) block.setX(currentX); if (block.getY() == 0.0) block.setY(currentY); if (block.getWidth() == 0.0) block.setWidth(currentWidth); if (block.getHeight() == 0.0) block.setHeight(currentHeight); doc.addBlock(block); blabla = new StringBuffer(); nbTokens = 0; block = new Block(); block.setPage(currentPage); } /* * else if (qName.equals("VECTORIALIMAGES")) { if (block != null) { * blabla.append("\n"); block.setText(blabla.toString()); * block.setNbTokens(nbTokens); doc.addBlock(block); } block = new * Block(); block.setPage(currentPage); blabla = new StringBuffer(); * blabla.append("@IMAGE " + "vectorial \n"); * block.setText(blabla.toString()); block.setNbTokens(nbTokens); if * (block.getX() == 0.0) block.setX(currentX); if (block.getY() == 0.0) * block.setY(currentY); if (block.getWidth() == 0.0) * block.setWidth(currentWidth); if (block.getHeight() == 0.0) * block.setHeight(currentHeight); doc.addBlock(block); blabla = new * StringBuffer(); nbTokens = 0; block = new Block(); * block.setPage(currentPage); } */ else if (qName.equals("BLOCK")) { blabla.append("\n"); tokenizations.add("\n"); block.setText(blabla.toString()); block.setNbTokens(nbTokens); block.setWidth(currentX - block.getX() + currentWidth); block.setHeight(currentY - block.getY() + currentHeight); doc.addBlock(block); // blabla = new StringBuffer(); nbTokens = 0; block = null; } else if (qName.equals("xi:include")) { if (block != null) { blabla.append("\n"); block.setText(blabla.toString()); block.setNbTokens(nbTokens); doc.addBlock(block); } block = new Block(); block.setPage(currentPage); blabla = new StringBuffer(); blabla.append("@IMAGE " + images.get(images.size() - 1) + "\n"); block.setText(blabla.toString()); block.setNbTokens(nbTokens); doc.addBlock(block); blabla = new StringBuffer(); nbTokens = 0; block = new Block(); block.setPage(currentPage); } /* * else if (qName.equals("DOCUMENT")) { * System.out.println(blabla.toString()); } */ }