private void buildXQueryDynamicContext( XQueryContext context, Object[] params, MutableDocumentSet docsToLock, boolean bindVariables) throws XPathException { context.setBackwardsCompatibility(false); context.setStaticallyKnownDocuments(docs); context.setBaseURI(baseUri == null ? new AnyURIValue("/db") : baseUri); if (bindVariables) { for (Map.Entry<QName, Object> entry : bindings.entrySet()) { context.declareVariable( new org.exist.dom.QName( entry.getKey().getLocalPart(), entry.getKey().getNamespaceURI(), entry.getKey().getPrefix()), convertValue(entry.getValue())); } if (params != null) for (int i = 0; i < params.length; i++) { Object convertedValue = convertValue(params[i]); if (docsToLock != null && convertedValue instanceof Sequence) { docsToLock.addAll(((Sequence) convertedValue).getDocumentSet()); } context.declareVariable("_" + (i + 1), convertedValue); } } }
private void buildXQueryStaticContext(XQueryContext context, boolean importModules) throws XPathException { context.declareNamespaces(namespaceBindings.getCombinedMap()); for (Map.Entry<String, Document> entry : moduleMap.entrySet()) { context.importModule(entry.getKey(), null, "xmldb:exist:///db" + entry.getValue().path()); } }
/** Destructor. */ public void remove() { for (Iterator<Map.Entry<Long, WImage>> it_it = this.grid_.entrySet().iterator(); it_it.hasNext(); ) { Map.Entry<Long, WImage> it = it_it.next(); ; if (it.getValue() != null) it.getValue().remove(); } super.remove(); }
/** * Regenerates and redraws the image pieces. * * <p>This method invalidates all current grid images, and recreates them. */ public void redrawAll() { for (Iterator<Map.Entry<Long, WImage>> it_it = this.grid_.entrySet().iterator(); it_it.hasNext(); ) { Map.Entry<Long, WImage> it = it_it.next(); ; if (it.getValue() != null) it.getValue().remove(); } this.grid_.clear(); this.generateGridItems(this.currentX_, this.currentY_); }
private void putMap(String name, Map m) { _put(OBJECT, name); final int sizePos = _buf.getPosition(); _buf.writeInt(0); for (Map.Entry entry : (Set<Map.Entry>) m.entrySet()) _putObjectField(entry.getKey().toString(), entry.getValue()); _buf.write(EOO); _buf.writeInt(sizePos, _buf.getPosition() - sizePos); }
public void run() { // each file is processed into a local hash table and then merged with the global results // this will cause much less contention on the global table, but still avoids a sequential // update Hashtable<String, Integer> local_results = new Hashtable<String, Integer>(WordCountJ.HASH_SIZE, WordCountJ.LF); // grab a file to work on String cf; while ((cf = files.poll()) != null) { try { BufferedReader input = new BufferedReader(new FileReader(cf)); String text; // well go line-by-line... maybe this is not the fastest while ((text = input.readLine()) != null) { // parse words Matcher matcher = pattern.matcher(text); while (matcher.find()) { String word = matcher.group(1); if (local_results.containsKey(word)) { local_results.put(word, 1 + local_results.get(word)); } else { local_results.put(word, 1); } } } input.close(); } catch (Exception e) { System.out.println(" caught a " + e.getClass() + "\n with message: " + e.getMessage()); return; } // merge local hashmap with shared one,could have a // seperate thread do this but that might be cheating Iterator<Map.Entry<String, Integer>> updates = local_results.entrySet().iterator(); while (updates.hasNext()) { Map.Entry<String, Integer> kv = updates.next(); String k = kv.getKey(); Integer v = kv.getValue(); synchronized (results) { if (results.containsKey(k)) { results.put(k, v + results.get(k)); } else { results.put(k, v); } } } local_results.clear(); } }
private org.exist.source.Source buildQuerySource(String query, Object[] params, String cookie) { Map<String, String> combinedMap = namespaceBindings.getCombinedMap(); for (Map.Entry<String, Document> entry : moduleMap.entrySet()) { combinedMap.put("<module> " + entry.getKey(), entry.getValue().path()); } for (Map.Entry<QName, Object> entry : bindings.entrySet()) { combinedMap.put( "<var> " + entry.getKey(), null); // don't care about values, as long as the same vars are bound } combinedMap.put("<posvars> " + params.length, null); combinedMap.put("<cookie>", cookie); // TODO: should include statically known documents and baseURI too? return new StringSourceWithMapKey(query, combinedMap); }
/** * Clone this query service, optionally overriding the clone's namespace and variable bindings. If * the namespace bindings override or variable bindings override is specified, then that object is * cloned and used for its respective purpose. If an override is not specified, the bindings are * cloned from the original query service. * * @param nsBindingsOverride the namespace bindings to clone, or <code>null</code> to clone from * the original * @param varBindingsOverride the variable bindings to clone, or <code>null</code> to clone from * the original * @return a clone of this query service with bindings optionally overridden */ public QueryService clone(NamespaceMap nsBindingsOverride, Map<QName, ?> varBindingsOverride) { try { QueryService that = (QueryService) super.clone(); that.namespaceBindings = nsBindingsOverride != null ? nsBindingsOverride.clone() : that.namespaceBindings.clone(); if (varBindingsOverride == null) { that.bindings = new HashMap<QName, Object>(that.bindings); } else { that.bindings = new HashMap<QName, Object>(); for (Map.Entry<QName, ?> entry : varBindingsOverride.entrySet()) { that.let(entry.getKey(), entry.getValue()); } } that.moduleMap = new TreeMap<String, Document>(moduleMap); return that; } catch (CloneNotSupportedException e) { throw new RuntimeException("unexpected exception", e); } }
private void cleanGrid() { WVirtualImage.Rect cleanNb = this.neighbourhood( this.currentX_, this.currentY_, this.viewPortWidth_ * 3, this.viewPortHeight_ * 3); long i1 = cleanNb.x1 / this.gridImageSize_; long j1 = cleanNb.y1 / this.gridImageSize_; long i2 = cleanNb.x2 / this.gridImageSize_ + 1; long j2 = cleanNb.y2 / this.gridImageSize_ + 1; for (Iterator<Map.Entry<Long, WImage>> it_it = this.grid_.entrySet().iterator(); it_it.hasNext(); ) { Map.Entry<Long, WImage> it = it_it.next(); WVirtualImage.Coordinate coordinate = new WVirtualImage.Coordinate(); this.decodeKey(it.getKey(), coordinate); if (coordinate.i < i1 || coordinate.i > i2 || coordinate.j < j1 || coordinate.j > j2) {; if (it.getValue() != null) it.getValue().remove(); it_it.remove(); } else { } } }
public int numberOfSubsets(int goodValue, int[] d) { HashMap<Integer, Integer> map = new HashMap<>(); // prod, sets for (int i = 0; i < d.length; i++) { HashMap<Integer, Integer> nmap = new HashMap<>(); nmap.put(d[i], 1); for (Map.Entry<Integer, Integer> entry : map.entrySet()) { // not use Integer setcount = nmap.get(entry.getKey()); if (setcount == null) setcount = 0; setcount = (entry.getValue() + setcount) % MOD; nmap.put(entry.getKey(), setcount); // use int nprod = entry.getKey(); if (nprod > goodValue / d[i]) continue; nprod *= d[i]; if (goodValue % nprod != 0) continue; setcount = nmap.get(nprod); if (setcount == null) setcount = 0; setcount = (entry.getValue() + setcount) % MOD; nmap.put(nprod, setcount); } map = nmap; } return map.get(goodValue) == null ? 0 : map.get(goodValue); }
/** Enter a set of generated class files. */ private List<ClassSymbol> enterClassFiles(Map<String, JavaFileObject> classFiles) { ClassReader reader = ClassReader.instance(context); Names names = Names.instance(context); List<ClassSymbol> list = List.nil(); for (Map.Entry<String, JavaFileObject> entry : classFiles.entrySet()) { Name name = names.fromString(entry.getKey()); JavaFileObject file = entry.getValue(); if (file.getKind() != JavaFileObject.Kind.CLASS) throw new AssertionError(file); ClassSymbol cs; if (isPkgInfo(file, JavaFileObject.Kind.CLASS)) { Name packageName = Convert.packagePart(name); PackageSymbol p = reader.enterPackage(packageName); if (p.package_info == null) p.package_info = reader.enterClass(Convert.shortName(name), p); cs = p.package_info; if (cs.classfile == null) cs.classfile = file; } else cs = reader.enterClass(name, file); list = list.prepend(cs); } return list.reverse(); }
private void doManifest( Jar jar, String[] digestNames, MessageDigest[] algorithms, OutputStream out) throws Exception { for (Map.Entry<String, Resource> entry : jar.getResources().entrySet()) { String name = entry.getKey(); if (!METAINFDIR.matcher(name).matches()) { out.write("\r\n".getBytes("UTF-8")); out.write("Name: ".getBytes("UTF-8")); out.write(name.getBytes("UTF-8")); out.write("\r\n".getBytes("UTF-8")); digest(algorithms, entry.getValue()); for (int a = 0; a < algorithms.length; a++) { if (algorithms[a] != null) { byte[] digest = algorithms[a].digest(); String header = digestNames[a] + "-Digest: " + new Base64(digest) + "\r\n"; out.write(header.getBytes("UTF-8")); } } } } }
public static void main(String[] args) { long start = System.currentTimeMillis(); Scanner input = new Scanner(System.in); int numberOfTestCases = input.nextInt(); ArrayList<Integer> order = new ArrayList<Integer>(numberOfTestCases); int previousKey = -1; int previousValue = 0; int cycleNumber = 0; Map<Integer, Integer> testCases = new TreeMap<Integer, Integer>(); for (int i = 0; i < numberOfTestCases; i++) { int numberOfCycles = input.nextInt(); testCases.put(numberOfCycles, 1); order.add(numberOfCycles); } for (Map.Entry<Integer, Integer> entry : testCases.entrySet()) { int numberOfCycles; int initialHeight; if (previousKey == -1) { numberOfCycles = entry.getKey(); initialHeight = entry.getValue(); } else { numberOfCycles = entry.getKey() - previousKey; initialHeight = previousValue; } for (int i = 0; i < numberOfCycles; i++) { if (cycleNumber % 2 == 0) { initialHeight *= 2; } else { initialHeight += 1; } cycleNumber++; } entry.setValue(initialHeight); previousKey = entry.getKey(); previousValue = initialHeight; } for (Integer element : order) { System.out.println(testCases.get(element)); } long elapsed = System.currentTimeMillis() - start; System.out.println("time: " + elapsed); }
/** * Gets the <tt>contactDetails</tt> to be set on a <tt>SourceContact</tt>. * * @param entry <tt>GoogleContactsEntry</tt> * @return the <tt>contactDetails</tt> to be set on a <tt>SourceContact</tt> */ private List<ContactDetail> getContactDetails(GoogleContactsEntry entry) { List<ContactDetail> ret = new LinkedList<ContactDetail>(); List<String> homeMails = entry.getHomeMails(); List<String> workMails = entry.getWorkMails(); List<String> mobilePhones = entry.getMobilePhones(); List<String> homePhones = entry.getHomePhones(); List<String> workPhones = entry.getWorkPhones(); Map<String, GoogleContactsEntry.IMProtocol> ims = entry.getIMAddresses(); ContactDetail detail = null; for (String mail : homeMails) { List<Class<? extends OperationSet>> supportedOpSets = new ArrayList<Class<? extends OperationSet>>(1); // can be added as contacts supportedOpSets.add(OperationSetPersistentPresence.class); detail = new ContactDetail( mail, ContactDetail.Category.Email, new ContactDetail.SubCategory[] {ContactDetail.SubCategory.Home}); detail.setSupportedOpSets(supportedOpSets); ret.add(detail); } for (String mail : workMails) { List<Class<? extends OperationSet>> supportedOpSets = new ArrayList<Class<? extends OperationSet>>(1); // can be added as contacts supportedOpSets.add(OperationSetPersistentPresence.class); detail = new ContactDetail( mail, ContactDetail.Category.Email, new ContactDetail.SubCategory[] {ContactDetail.SubCategory.Work}); detail.setSupportedOpSets(supportedOpSets); ret.add(detail); } for (String homePhone : homePhones) { List<Class<? extends OperationSet>> supportedOpSets = new ArrayList<Class<? extends OperationSet>>(2); supportedOpSets.add(OperationSetBasicTelephony.class); // can be added as contacts supportedOpSets.add(OperationSetPersistentPresence.class); homePhone = PhoneNumberI18nService.normalize(homePhone); detail = new ContactDetail( homePhone, ContactDetail.Category.Phone, new ContactDetail.SubCategory[] {ContactDetail.SubCategory.Home}); detail.setSupportedOpSets(supportedOpSets); ret.add(detail); } for (String workPhone : workPhones) { List<Class<? extends OperationSet>> supportedOpSets = new ArrayList<Class<? extends OperationSet>>(2); supportedOpSets.add(OperationSetBasicTelephony.class); // can be added as contacts supportedOpSets.add(OperationSetPersistentPresence.class); workPhone = PhoneNumberI18nService.normalize(workPhone); detail = new ContactDetail( workPhone, ContactDetail.Category.Phone, new ContactDetail.SubCategory[] {ContactDetail.SubCategory.Work}); detail.setSupportedOpSets(supportedOpSets); ret.add(detail); } for (String mobilePhone : mobilePhones) { List<Class<? extends OperationSet>> supportedOpSets = new ArrayList<Class<? extends OperationSet>>(2); supportedOpSets.add(OperationSetBasicTelephony.class); // can be added as contacts supportedOpSets.add(OperationSetPersistentPresence.class); mobilePhone = PhoneNumberI18nService.normalize(mobilePhone); detail = new ContactDetail( mobilePhone, ContactDetail.Category.Phone, new ContactDetail.SubCategory[] {ContactDetail.SubCategory.Mobile}); detail.setSupportedOpSets(supportedOpSets); ret.add(detail); } for (Map.Entry<String, GoogleContactsEntry.IMProtocol> im : ims.entrySet()) { if (im.getValue() != GoogleContactsEntry.IMProtocol.OTHER) { ContactDetail.SubCategory imSubCat; switch (im.getValue()) { case AIM: imSubCat = ContactDetail.SubCategory.AIM; break; case ICQ: imSubCat = ContactDetail.SubCategory.ICQ; break; case YAHOO: imSubCat = ContactDetail.SubCategory.Yahoo; break; case JABBER: imSubCat = ContactDetail.SubCategory.Jabber; break; case MSN: imSubCat = ContactDetail.SubCategory.MSN; break; case GOOGLETALK: imSubCat = ContactDetail.SubCategory.GoogleTalk; break; default: imSubCat = null; break; } detail = new ContactDetail( im.getKey(), ContactDetail.Category.InstantMessaging, new ContactDetail.SubCategory[] {imSubCat}); setIMCapabilities(detail, im.getValue()); // can be added as contacts detail.getSupportedOperationSets().add(OperationSetPersistentPresence.class); ret.add(detail); } } return ret; }
@Override protected int drawUnselectedText(Graphics graphics, int x, int y, int p0, int p1) throws BadLocationException { try { Document doc = getDocument(); String text = doc.getText(p0, p1 - p0); Segment segment = getLineBuffer(); int initialXpos = x; SortedMap<Integer, Integer> startMap = new TreeMap<Integer, Integer>(); SortedMap<Integer, Color> colorMap = new TreeMap<Integer, Color>(); // Match all regexes on this snippet, store positions for (Map.Entry<Pattern, Color> entry : patternColors.entrySet()) { Matcher matcher = entry.getKey().matcher(text); while (matcher.find()) { startMap.put(matcher.start(1), matcher.end(1)); colorMap.put(matcher.start(1), entry.getValue()); } } // TODO: check the map for overlapping parts int i = 0; // add tag highlighted background colors if (!TAG_HIGHLIGHTED.isEmpty()) { Matcher highlightMatcher = Pattern.compile(TAG_HIGHLIGHTED).matcher(text); while (highlightMatcher.find()) { int start = highlightMatcher.start(1); int end = highlightMatcher.end(1); if (i < start) { graphics.setColor(Color.black); doc.getText(p0 + i, start - i, segment); x = Utilities.drawTabbedText(segment, x, y, graphics, this, i); } graphics.setColor(TAG_HIGHLIGHTED_COLOR); i = end; doc.getText(p0 + start, i - start, segment); int width = Utilities.getTabbedTextWidth( segment, graphics.getFontMetrics(), x, this, p0 + start); // graphics.drawLine(x, y, width, y);graphics.getFontMetrics() graphics.fillRect( x, y - graphics.getFontMetrics().getHeight() + 2, width, graphics.getFontMetrics().getHeight()); graphics.setColor(Color.black); doc.getText(p0 + start, i - start, segment); x = Utilities.drawTabbedText(segment, x, y, graphics, this, start); } } x = initialXpos; i = 0; // add highlighted background colors based on position // String textx = doc.getText(p0, p1 - p0); if ((HIGHLIGHTED_START < p1 && HIGHLIGHTED_START >= p0) || (HIGHLIGHTED_END <= p1 && HIGHLIGHTED_END > p0) || (HIGHLIGHTED_START < p1 && HIGHLIGHTED_END > p0)) { // select whole line int start = 0; int end = text.length(); // test to see if only partial line is needed. if (HIGHLIGHTED_START > p0) start = HIGHLIGHTED_START - p0; if (HIGHLIGHTED_END < p1) end -= p1 - HIGHLIGHTED_END; if (i < start) { // fill in normal color if start highlight isn't at the beginning graphics.setColor(Color.black); doc.getText(p0 + i, start - i, segment); x = Utilities.drawTabbedText(segment, x, y, graphics, this, i); } graphics.setColor(HIGHLIGHTED_COLOR); // fill in the highlight color i = end; if (i - start > 0) { doc.getText(p0 + start, i - start, segment); int width = Utilities.getTabbedTextWidth( segment, graphics.getFontMetrics(), x, this, p0 + start); // graphics.drawLine(x, y, width, y);graphics.getFontMetrics() graphics.fillRect( x, y - graphics.getFontMetrics().getHeight() + 2, width, graphics.getFontMetrics().getHeight()); graphics.setColor(Color.black); doc.getText(p0 + start, i - start, segment); x = Utilities.drawTabbedText(segment, x, y, graphics, this, start); } // else // System.out.println("invalid highlighting " + (i - start) + " is <= 0 (" + p0 + "-" + // p1 + "=" + (p1 - p0) +") " + start + ", " + end + " len=" + text.length()); } x = initialXpos; i = 0; // Color the parts of xml foreground font for (Map.Entry<Integer, Integer> entry : startMap.entrySet()) { int start = entry.getKey(); int end = entry.getValue(); if (i < start) { graphics.setColor(Color.black); doc.getText(p0 + i, start - i, segment); x = Utilities.drawTabbedText(segment, x, y, graphics, this, i); } graphics.setColor(colorMap.get(start)); i = end; doc.getText(p0 + start, i - start, segment); x = Utilities.drawTabbedText(segment, x, y, graphics, this, start); } // Paint possible remaining text black if (i < text.length()) { graphics.setColor(Color.black); doc.getText(p0 + i, text.length() - i, segment); x = Utilities.drawTabbedText(segment, x, y, graphics, this, i); } } catch (Exception e) { e.printStackTrace(); } return x; }
public int compare(Map.Entry<String, Integer> a, Map.Entry<String, Integer> b) { return a.getKey().compareTo(b.getKey()); }
public static void extract_consequent(PptMap ppts) { // Retrieve Ppt objects in sorted order. // Use a custom comparator for a specific ordering Comparator<PptTopLevel> comparator = new Ppt.NameComparator(); TreeSet<PptTopLevel> ppts_sorted = new TreeSet<PptTopLevel>(comparator); ppts_sorted.addAll(ppts.asCollection()); for (PptTopLevel ppt : ppts_sorted) { extract_consequent_maybe(ppt, ppts); } PrintWriter pw = new PrintWriter(System.out, true); // All conditions at a program point. A TreeSet to enable // deterministic output. TreeSet<String> allConds = new TreeSet<String>(); for (String pptname : pptname_to_conditions.keySet()) { Map<String, Map<String, HashedConsequent>> cluster_to_conditions = pptname_to_conditions.get(pptname); for (Map.Entry</*@KeyFor("cluster_to_conditions")*/ String, Map<String, HashedConsequent>> entry : cluster_to_conditions.entrySet()) { String predicate = entry.getKey(); Map<String, HashedConsequent> conditions = entry.getValue(); StringBuffer conjunctionJava = new StringBuffer(); StringBuffer conjunctionDaikon = new StringBuffer(); StringBuffer conjunctionESC = new StringBuffer(); StringBuffer conjunctionSimplify = new StringBuffer("(AND "); int count = 0; for (Map.Entry</*@KeyFor("conditions")*/ String, HashedConsequent> entry2 : conditions.entrySet()) { count++; String condIndex = entry2.getKey(); HashedConsequent cond = entry2.getValue(); if (cond.fakeFor != null) { count--; continue; } String javaStr = cond.inv.format_using(OutputFormat.JAVA); String daikonStr = cond.inv.format_using(OutputFormat.DAIKON); String escStr = cond.inv.format_using(OutputFormat.ESCJAVA); String simplifyStr = cond.inv.format_using(OutputFormat.SIMPLIFY); allConds.add(combineDummy(condIndex, "<dummy> " + daikonStr, escStr, simplifyStr)); // allConds.add(condIndex); if (count > 0) { conjunctionJava.append(" && "); conjunctionDaikon.append(" and "); conjunctionESC.append(" && "); conjunctionSimplify.append(" "); } conjunctionJava.append(javaStr); conjunctionDaikon.append(daikonStr); conjunctionESC.append(escStr); conjunctionSimplify.append(simplifyStr); } conjunctionSimplify.append(")"); String conj = conjunctionJava.toString(); // Avoid inserting self-contradictory conditions such as "x == 1 && // x == 2", or conjunctions of only a single condition. if (count < 2 || contradict_inv_pattern.matcher(conj).find() || useless_inv_pattern_1.matcher(conj).find() || useless_inv_pattern_2.matcher(conj).find()) { // System.out.println("Suppressing: " + conj); } else { allConds.add( combineDummy( conjunctionJava.toString(), conjunctionDaikon.toString(), conjunctionESC.toString(), conjunctionSimplify.toString())); } } if (allConds.size() > 0) { pw.println(); pw.println("PPT_NAME " + pptname); for (String s : allConds) { pw.println(s); } } allConds.clear(); } pw.flush(); }
public CommandData parseCommandData(ArtifactData artifact) throws Exception { File source = new File(artifact.file); if (!source.isFile()) throw new FileNotFoundException(); CommandData data = new CommandData(); data.sha = artifact.sha; data.jpmRepoDir = repoDir.getCanonicalPath(); JarFile jar = new JarFile(source); try { reporter.trace("Parsing %s", source); Manifest m = jar.getManifest(); Attributes main = m.getMainAttributes(); data.name = data.bsn = main.getValue("Bundle-SymbolicName"); String version = main.getValue("Bundle-Version"); if (version == null) data.version = Version.LOWEST; else data.version = new Version(version); data.main = main.getValue("Main-Class"); data.description = main.getValue("Bundle-Description"); data.title = main.getValue("JPM-Name"); reporter.trace("name " + data.name + " " + data.main + " " + data.title); DependencyCollector path = new DependencyCollector(this); path.add(artifact); DependencyCollector bundles = new DependencyCollector(this); if (main.getValue("JPM-Classpath") != null) { Parameters requires = OSGiHeader.parseHeader(main.getValue("JPM-Classpath")); for (Map.Entry<String, Attrs> e : requires.entrySet()) { path.add(e.getKey(), e.getValue().get("name")); // coordinate } } else if (!artifact.local) { // No JPM-Classpath, falling back to // server's revision // Iterable<RevisionRef> closure = // library.getClosure(artifact.sha, // false); // System.out.println("getting closure " + artifact.url + " " + // Strings.join("\n",closure)); // if (closure != null) { // for (RevisionRef ref : closure) { // path.add(Hex.toHexString(ref.revision)); // } // } } if (main.getValue("JPM-Runbundles") != null) { Parameters jpmrunbundles = OSGiHeader.parseHeader(main.getValue("JPM-Runbundles")); for (Map.Entry<String, Attrs> e : jpmrunbundles.entrySet()) { bundles.add(e.getKey(), e.getValue().get("name")); } } reporter.trace("collect digests runpath"); data.dependencies.addAll(path.getDigests()); reporter.trace("collect digests bundles"); data.runbundles.addAll(bundles.getDigests()); Parameters command = OSGiHeader.parseHeader(main.getValue("JPM-Command")); if (command.size() > 1) reporter.error("Only one command can be specified"); for (Map.Entry<String, Attrs> e : command.entrySet()) { data.name = e.getKey(); Attrs attrs = e.getValue(); if (attrs.containsKey("jvmargs")) data.jvmArgs = attrs.get("jvmargs"); if (attrs.containsKey("title")) data.title = attrs.get("title"); if (data.title != null) data.title = data.name; } return data; } finally { jar.close(); } }
public void nextEntry() { // Check if we are in read mode if (this.controlStruct != null) { String[] nextRecord = new String[1]; // System.out.println( "Reading Next Record: " + currIndex ); returnCode = ErrorCodes.ERR_RECPHYSDEL; while (returnCode != ErrorCodes.ZERO) { returnCode = ISISAPI.IsisRecRead(spaceHandler, 0, currIndex); if (returnCode != ErrorCodes.ZERO) System.out.println("\nSkipping Record " + currIndex); currIndex++; } returnCode = ISISAPI.IsisRecDump(spaceHandler, 0, nextRecord, Constants.MAXMFRL); // Looks complicated, but we have to do it that way, because in Java we have no chance to // convert a String object from a given encoding // That's because the ISISAPI fills the string, and in java we can only convert encodings // while filling the string object byte[] rawData = new byte[nextRecord[0].length()]; for (int i = 0; i < nextRecord[0].length(); i++) { rawData[i] = (byte) nextRecord[0].charAt(i); } String recordContent = ""; try { recordContent = new String(rawData, this.fileEncoding); } catch (Exception e) { e.printStackTrace(); } // Add the MFN-Number to the stack // TODO Replace IDRecord once custom field adding is supported DataRecord nRecord = new DataRecord(); nRecord.setIDRecord("001"); nRecord.setRecordContent(String.valueOf(currIndex - 1)); this.recordsStack.add(nRecord); // this.charDecoder.decode( ByteBuffer.wrap( nextRecord[0] ) ); // String recordContent = new String( nextRecord[0].getBytes( this.fileEncoding ), // this.fileEncoding ); Pattern p = Pattern.compile("<(\\d+)>(.*)</\\1>"); Matcher m = p.matcher(recordContent); // temporary stack used for sorting TreeMap<String, ArrayList<ArrayList<DataRecord>>> currRecordsStack = new TreeMap<String, ArrayList<ArrayList<DataRecord>>>(); // parse & add all fields while (m.find()) { ArrayList<DataRecord> currFieldRecords = new ArrayList<DataRecord>(); // System.out.println( "Found: " + m.group(1) + " / Content: " + m.group(2) + " / " + (int) // m.group(2).charAt(3) ); /*if( m.group(1).equals( "260$a" ) ) { System.out.println( "DEBUG: " + m.group(1) + " Char: '" + m.group(1).charAt(3) + "'" ); }*/ // Convert to 3 digit code (required) String fieldCode = String.format("%03d", Integer.valueOf(m.group(1))); // String fieldCode = m.group(1); // Add an entry for the whole record nRecord = new DataRecord(); nRecord.setIDRecord(fieldCode); nRecord.setRecordContent(m.group(2)); // this.recordsStack.add(nRecord); currFieldRecords.add(nRecord); // Find the sub-fields and add them to the stack String[] subFields = m.group(2).split("\\^"); if (subFields.length > 1) { for (int i = 1; i < subFields.length; i++) { if (subFields[i].length() <= 0) continue; nRecord = new DataRecord(); nRecord.setIDRecord(fieldCode + DataRecord.getIDSeperator() + subFields[i].charAt(0)); nRecord.setRecordContent(subFields[i].substring(1)); // this.recordsStack.add(nRecord); currFieldRecords.add(nRecord); } } Collections.sort(currFieldRecords); // fetch fitting entry from temporary stack ArrayList<ArrayList<DataRecord>> currRecordsStackEntry = currRecordsStack.get(fieldCode); if (currRecordsStackEntry == null) { currRecordsStackEntry = new ArrayList<ArrayList<DataRecord>>(); } // add the new (sorted) record to the stack currRecordsStackEntry.add(currFieldRecords); currRecordsStack.put(fieldCode, currRecordsStackEntry); } // add the temporary stack to the internal recordsStack // we use this as the entries are now correctly sorted Iterator<Map.Entry<String, ArrayList<ArrayList<DataRecord>>>> crs_It = currRecordsStack.entrySet().iterator(); while (crs_It.hasNext()) { Map.Entry<String, ArrayList<ArrayList<DataRecord>>> currEntry = crs_It.next(); Iterator<ArrayList<DataRecord>> ce_It = currEntry.getValue().iterator(); while (ce_It.hasNext()) { Iterator<DataRecord> dr_it = ce_It.next().iterator(); while (dr_it.hasNext()) { this.recordsStack.add(dr_it.next()); } } } // Sort collection by fieldID (refer to the compareTo implementation in DataRecord class) // Collections.sort( this.recordsStack ); } // Check if we are in write mode else if (this.writeBuffer != null) { String recordString = ""; Iterator<Map.Entry<String, HashMap<Character, String>>> wbIt = this.writeBuffer.entrySet().iterator(); while (wbIt.hasNext()) { Map.Entry<String, HashMap<Character, String>> currEntry = wbIt.next(); String fieldCode = currEntry.getKey(); HashMap<Character, String> subfields = currEntry.getValue(); String fieldString = "<" + fieldCode + ">"; Iterator<Map.Entry<Character, String>> sfIt = subfields.entrySet().iterator(); while (sfIt.hasNext()) { Map.Entry<Character, String> currSfEntry = sfIt.next(); Character subfieldCode = currSfEntry.getKey(); String content = currSfEntry.getValue(); if (subfieldCode.equals(' ')) { fieldString += content; break; } else { fieldString += "^" + subfieldCode + content; } } fieldString += "</" + fieldCode + ">\n"; recordString += fieldString; } returnCode = ISISAPI.IsisRecNew(spaceHandler, 0); ISISAPI.IsisRecUpdate(spaceHandler, 0, recordString); ISISAPI.IsisRecWrite(spaceHandler, 0); this.writeBuffer = new HashMap<String, HashMap<Character, String>>(); } }
private void discoverAndRunProcs( Context context, Set<TypeElement> annotationsPresent, List<ClassSymbol> topLevelClasses, List<PackageSymbol> packageInfoFiles) { Map<String, TypeElement> unmatchedAnnotations = new HashMap<String, TypeElement>(annotationsPresent.size()); for (TypeElement a : annotationsPresent) { unmatchedAnnotations.put(a.getQualifiedName().toString(), a); } // Give "*" processors a chance to match if (unmatchedAnnotations.size() == 0) unmatchedAnnotations.put("", null); DiscoveredProcessors.ProcessorStateIterator psi = discoveredProcs.iterator(); // TODO: Create proper argument values; need past round // information to fill in this constructor. Note that the 1 // st round of processing could be the last round if there // were parse errors on the initial source files; however, we // are not doing processing in that case. Set<Element> rootElements = new LinkedHashSet<Element>(); rootElements.addAll(topLevelClasses); rootElements.addAll(packageInfoFiles); rootElements = Collections.unmodifiableSet(rootElements); RoundEnvironment renv = new JavacRoundEnvironment(false, false, rootElements, JavacProcessingEnvironment.this); while (unmatchedAnnotations.size() > 0 && psi.hasNext()) { ProcessorState ps = psi.next(); Set<String> matchedNames = new HashSet<String>(); Set<TypeElement> typeElements = new LinkedHashSet<TypeElement>(); for (Map.Entry<String, TypeElement> entry : unmatchedAnnotations.entrySet()) { String unmatchedAnnotationName = entry.getKey(); if (ps.annotationSupported(unmatchedAnnotationName)) { matchedNames.add(unmatchedAnnotationName); TypeElement te = entry.getValue(); if (te != null) typeElements.add(te); } } if (matchedNames.size() > 0 || ps.contributed) { boolean processingResult = callProcessor(ps.processor, typeElements, renv); ps.contributed = true; ps.removeSupportedOptions(unmatchedProcessorOptions); if (printProcessorInfo || verbose) { log.printNoteLines( "x.print.processor.info", ps.processor.getClass().getName(), matchedNames.toString(), processingResult); } if (processingResult) { unmatchedAnnotations.keySet().removeAll(matchedNames); } } } unmatchedAnnotations.remove(""); if (lint && unmatchedAnnotations.size() > 0) { // Remove annotations processed by javac unmatchedAnnotations.keySet().removeAll(platformAnnotations); if (unmatchedAnnotations.size() > 0) { log = Log.instance(context); log.warning("proc.annotations.without.processors", unmatchedAnnotations.keySet()); } } // Run contributing processors that haven't run yet psi.runContributingProcs(renv); // Debugging if (options.isSet("displayFilerState")) filer.displayState(); }