/** * Generate mapping for the sub-classes for every class in this run. Return the sub-class list for * java.lang.Object which will be having sub-class listing for itself and also for each sub-class * itself will have their own sub-class lists. * * @param classes all the classes in this run. * @param configuration the current configuration of the doclet. */ private void buildTree(ClassDoc[] classes, Configuration configuration) { log.info("ClassTree buildTree"); for (int i = 0; i < classes.length; i++) { if (configuration.nodeprecated && classes[i].tags("deprecated").length > 0) { continue; } if (classes[i].isEnum()) { processType(classes[i], configuration, baseEnums, subEnums); } else if (classes[i].isClass()) { processType(classes[i], configuration, baseclasses, subclasses); } else if (classes[i].isInterface()) { processInterface(classes[i]); List list = (List) implementingclasses.get(classes[i]); if (list != null) { Collections.sort(list); } } else if (classes[i].isAnnotationType()) { processType(classes[i], configuration, baseAnnotationTypes, subAnnotationTypes); } } Collections.sort(baseinterfaces); for (Iterator it = subinterfaces.values().iterator(); it.hasNext(); ) { Collections.sort((List) it.next()); } for (Iterator it = subclasses.values().iterator(); it.hasNext(); ) { Collections.sort((List) it.next()); } }
/** * For the class return all implemented interfaces including the superinterfaces of the * implementing interfaces, also iterate over for all the superclasses. For interface return all * the extended interfaces as well as superinterfaces for those extended interfaces. * * @param type type whose implemented or super interfaces are sought. * @param configuration the current configuration of the doclet. * @param sort if true, return list of interfaces sorted alphabetically. * @return List of all the required interfaces. */ public static List<Type> getAllInterfaces(Type type, Configuration configuration, boolean sort) { Map<ClassDoc, Type> results = sort ? new TreeMap<ClassDoc, Type>() : new LinkedHashMap<ClassDoc, Type>(); Type[] interfaceTypes = null; Type superType = null; if (type instanceof ParameterizedType) { interfaceTypes = ((ParameterizedType) type).interfaceTypes(); superType = ((ParameterizedType) type).superclassType(); } else if (type instanceof ClassDoc) { interfaceTypes = ((ClassDoc) type).interfaceTypes(); superType = ((ClassDoc) type).superclassType(); } else { interfaceTypes = type.asClassDoc().interfaceTypes(); superType = type.asClassDoc().superclassType(); } for (int i = 0; i < interfaceTypes.length; i++) { Type interfaceType = interfaceTypes[i]; ClassDoc interfaceClassDoc = interfaceType.asClassDoc(); if (!(interfaceClassDoc.isPublic() || (configuration == null || isLinkable(interfaceClassDoc, configuration)))) { continue; } results.put(interfaceClassDoc, interfaceType); List<Type> superInterfaces = getAllInterfaces(interfaceType, configuration, sort); for (Iterator<Type> iter = superInterfaces.iterator(); iter.hasNext(); ) { Type t = iter.next(); results.put(t.asClassDoc(), t); } } if (superType == null) return new ArrayList<Type>(results.values()); // Try walking the tree. addAllInterfaceTypes( results, superType, superType instanceof ClassDoc ? ((ClassDoc) superType).interfaceTypes() : ((ParameterizedType) superType).interfaceTypes(), false, configuration); List<Type> resultsList = new ArrayList<Type>(results.values()); if (sort) { Collections.sort(resultsList, new TypeComparator()); } return resultsList; }
/** * The entry point into the Parser class. * * @param root A RootDoc intstance obtained via the doclet API * @return A XML (XStream) serializable element, containing everything parsed from javadoc doclet */ public static Root ParseRoot(RootDoc root) { processingStorage = new HashMap<PackageDoc, ParserMediary>(); try { md5 = MessageDigest.getInstance("MD5"); } catch (NoSuchAlgorithmException e) { log.error("unable to acquire MD5 algorithm", e); return null; } rootXml = new Root(); ClassDoc[] allClasses = root.classes(); for (ClassDoc classDoc : allClasses) { PackageDoc doc = classDoc.containingPackage(); ParserMediary mediary = null; // the age old 'if I have it pull out existing, if I don't make a new one' if (processingStorage.containsKey(doc)) { mediary = processingStorage.get(doc); } else { mediary = new ParserMediary( doc.name(), doc.commentText(), ParseAnnotationInstances(doc.annotations(), doc.name())); processingStorage.put(doc, mediary); } if (classDoc.isIncluded()) { // dev comment--why do enums show up as ordinary class? if (classDoc.isOrdinaryClass() || classDoc.isException() || classDoc.isError()) { mediary.addClass(ParseClass(classDoc)); } else if (classDoc.isEnum()) { mediary.addEnum(ParseEnum(classDoc)); } else if (isAnnotation(classDoc)) { mediary.addAnnotation(ParseAnnotation(classDoc)); } else if (classDoc.isInterface()) { mediary.addInterface(ParseInterface(classDoc)); } } else { log.debug("Skipping not-included class " + classDoc.qualifiedName()); } } if (processingStorage.size() > 0) { List list = new ArrayList<Package>(); for (ParserMediary mediary : processingStorage.values()) { list.add(mediary.wrapup()); } rootXml.packages = (Package[]) list.toArray(new Package[] {}); } else { log.warn("No packages found!"); } return rootXml; }
/** * Process each package and the classes/interfaces within it. * * @param pd an array of PackageDoc objects */ public void processPackages(RootDoc root) { PackageDoc[] specified_pd = root.specifiedPackages(); Map pdl = new TreeMap(); for (int i = 0; specified_pd != null && i < specified_pd.length; i++) { pdl.put(specified_pd[i].name(), specified_pd[i]); } // Classes may be specified separately, so merge their packages into the // list of specified packages. ClassDoc[] cd = root.specifiedClasses(); // This is lists of the specific classes to document Map classesToUse = new HashMap(); for (int i = 0; cd != null && i < cd.length; i++) { PackageDoc cpd = cd[i].containingPackage(); if (cpd == null && !packagesOnly) { // If the RootDoc object has been created from a jar file // this duplicates classes, so we have to be able to disable it. // TODO this is still null? cpd = root.packageNamed("anonymous"); } String pkgName = cpd.name(); String className = cd[i].name(); if (trace) System.out.println("Found package " + pkgName + " for class " + className); if (!pdl.containsKey(pkgName)) { if (trace) System.out.println("Adding new package " + pkgName); pdl.put(pkgName, cpd); } // Keep track of the specific classes to be used for this package List classes; if (classesToUse.containsKey(pkgName)) { classes = (ArrayList) classesToUse.get(pkgName); } else { classes = new ArrayList(); } classes.add(cd[i]); classesToUse.put(pkgName, classes); } PackageDoc[] pd = (PackageDoc[]) pdl.values().toArray(new PackageDoc[0]); for (int i = 0; pd != null && i < pd.length; i++) { String pkgName = pd[i].name(); // Check for an exclude tag in the package doc block, but not // in the package.htm[l] file. if (!shownElement(pd[i], null)) continue; if (trace) System.out.println("PROCESSING PACKAGE: " + pkgName); outputFile.println("<package name=\"" + pkgName + "\">"); int tagCount = pd[i].tags().length; if (trace) System.out.println("#tags: " + tagCount); List classList; if (classesToUse.containsKey(pkgName)) { // Use only the specified classes in the package System.out.println("Using the specified classes"); classList = (ArrayList) classesToUse.get(pkgName); } else { // Use all classes in the package classList = new LinkedList(Arrays.asList(pd[i].allClasses())); } Collections.sort(classList); ClassDoc[] classes = new ClassDoc[classList.size()]; classes = (ClassDoc[]) classList.toArray(classes); processClasses(classes, pkgName); addPkgDocumentation(root, pd[i], 2); outputFile.println("</package>"); } } // processPackages