/** * Given a master list and the new sub list, replace the items in the master list with the * matching items from the new sub list. This process works even if the length of the new sublist * is different. * * <p>For example, givn: * * <pre> * replace A by A': * M=[A,B,C], S=[A'] => [A',B,C] * M=[A,B,A,B,C], S=[A',A'] => [A',B,A',B,C] * * when list length is different: * M=[A,A,B,C], S=[] => [B,C] * M=[A,B,C], S=[A',A'] => [A',A',B,C] * M=[B,C], S=[A',A'] => [B,C,A',A'] * </pre> */ private static List<Child> stitchList( List<Child> list, String name, List<? extends Child> newSubList) { List<Child> removed = new LinkedList<Child>(); // to preserve order, try to put new itesm where old items are found. // if the new list is longer than the current list, we put all the extra // after the last item in the sequence. That is, // given [A,A,B,C] and [A',A',A'], we'll update the list to [A',A',A',B,C] // The 'last' variable remembers the insertion position. int last = list.size(); ListIterator<Child> itr = list.listIterator(); ListIterator<? extends Child> jtr = newSubList.listIterator(); while (itr.hasNext()) { Child child = itr.next(); if (child.name.equals(name)) { if (jtr.hasNext()) { itr.set(jtr.next()); // replace last = itr.nextIndex(); removed.add(child); } else { itr.remove(); // remove removed.add(child); } } } // new list is longer than the current one if (jtr.hasNext()) list.addAll(last, newSubList.subList(jtr.nextIndex(), newSubList.size())); return removed; }
public String _path(String args[]) { List<String> list = new ArrayList<String>(); for (int i = 1; i < args.length; i++) { list.addAll(Processor.split(args[i])); } return Processor.join(list, File.pathSeparator); }
/** * Method that will collect all member (non-static) fields that are either public, or have at * least a single annotation associated with them. * * @param collectIgnored Whether to collect list of ignored methods for later retrieval */ public void resolveFields(boolean collectIgnored) { LinkedHashMap<String, AnnotatedField> foundFields = new LinkedHashMap<String, AnnotatedField>(); _addFields(foundFields, _class); /* And last but not least: let's remove all fields that are * deemed to be ignorable after all annotations have been * properly collapsed. */ Iterator<Map.Entry<String, AnnotatedField>> it = foundFields.entrySet().iterator(); while (it.hasNext()) { AnnotatedField f = it.next().getValue(); if (_annotationIntrospector.isIgnorableField(f)) { it.remove(); if (collectIgnored) { _ignoredFields = ArrayBuilders.addToList(_ignoredFields, f); } } else { } } if (foundFields.isEmpty()) { _fields = Collections.emptyList(); } else { _fields = new ArrayList<AnnotatedField>(foundFields.size()); _fields.addAll(foundFields.values()); } }
public List<InferredType> materializeWithoutUnions() { List<InferredType> allOptions = new ArrayList<InferredType>(); for (InferredType branch : unionTypes) { allOptions.addAll(branch.materializeWithoutUnions()); } return allOptions; }
List<String> getBases() { List<String> tr = new ArrayList<String>(); for (InferredType subelt : structTypes) { tr.addAll(subelt.getBases()); } return tr; }
public List<InferredType> materializeWithoutUnions() { List<InferredType> newStructs = new ArrayList<InferredType>(); for (int i = 0; i < structTypes.size(); i++) { List<InferredType> curTrees = structTypes.get(i).materializeWithoutUnions(); if (i == 0) { for (int j = 0; j < curTrees.size(); j++) { List<InferredType> curTypeList = new ArrayList<InferredType>(); curTypeList.add(curTrees.get(j)); newStructs.add(new StructType(curTypeList)); } } else { List<InferredType> evenNewerStructs = new ArrayList<InferredType>(); evenNewerStructs.addAll(newStructs); for (int j = 1; j < curTrees.size(); j++) { for (int k = 0; k < newStructs.size(); k++) { evenNewerStructs.add(newStructs.get(k).duplicate()); } } for (int j = 0; j < curTrees.size(); j++) { for (int k = 0; k < evenNewerStructs.size(); k++) { ((StructType) evenNewerStructs.get(k)).addElt(curTrees.get(j)); } } newStructs = evenNewerStructs; } } return newStructs; }
public TagReader<T> with(ContentReader... aMappers) { if (mappers == null) { mappers = new ArrayList<XmlReader>(); } mappers.addAll(0, Arrays.asList(aMappers)); return this; }
static PropertyDescriptor[] getInterfacePropertyDescriptors(Class<?> interfaceClass) { List<PropertyDescriptor> propDescriptors = new ArrayList<PropertyDescriptor>(); // Add prop descriptors for interface passed in propDescriptors.addAll(Arrays.asList(BeanUtils.getPropertyDescriptors(interfaceClass))); // Look for interface inheritance. If super interfaces are found, recurse up the hierarchy tree // and add prop // descriptors for each interface found. // PropertyUtils.getPropertyDescriptors() does not correctly walk the inheritance hierarchy for // interfaces. Class<?>[] interfaces = interfaceClass.getInterfaces(); if (interfaces != null) { for (Class<?> superInterfaceClass : interfaces) { List<PropertyDescriptor> superInterfacePropertyDescriptors = Arrays.asList(getInterfacePropertyDescriptors(superInterfaceClass)); /* * #1814758 * Check for existing descriptor with the same name to prevent 2 property descriptors with the same name being added * to the result list. This caused issues when getter and setter of an attribute on different interfaces in * an inheritance hierarchy */ for (PropertyDescriptor superPropDescriptor : superInterfacePropertyDescriptors) { PropertyDescriptor existingPropDescriptor = findPropDescriptorByName(propDescriptors, superPropDescriptor.getName()); if (existingPropDescriptor == null) { propDescriptors.add(superPropDescriptor); } else { try { if (existingPropDescriptor.getReadMethod() == null) { existingPropDescriptor.setReadMethod(superPropDescriptor.getReadMethod()); } if (existingPropDescriptor.getWriteMethod() == null) { existingPropDescriptor.setWriteMethod(superPropDescriptor.getWriteMethod()); } } catch (IntrospectionException e) { throw new MappingException(e); } } } } } return propDescriptors.toArray(new PropertyDescriptor[propDescriptors.size()]); }
/** * If a {@link Model} is part of a composition, and is the weak part (is part of other models), * this method returns all the fields containing connection of {@link ConnectionType.BelongsTo} * type. It's useful in the <code>INSERT</code> of the record, for preventing foreign key * costraint inconsistency. * * @param model The given {@link Model}. * @return A list of fields that represent the Models that owns <code>model</code>. */ protected List<String> belongsTo(Class model) { if (belongsToFields.containsKey(model)) return belongsToFields.get(model); List<Class> sC = getSupers(model); List<String> ownerFields = new ArrayList<String>(); Method[] getters = CommonStatic.getDeclaredGetters(model); for (Method g : getters) /* has a belongs to annotation */ if (g.isAnnotationPresent(Connection.class) && g.getAnnotation(Connection.class).type().equals(ConnectionType.BelongsTo)) { ownerFields.add(fieldName(g)); } for (Class s : sC) ownerFields.addAll(belongsTo(s)); belongsToFields.put(model, ownerFields); return ownerFields; }
private void writeJsonJobs(ApplicationInfo appInfo, List<CIJob> jobs, String status) throws PhrescoException { try { if (jobs == null) { return; } Gson gson = new Gson(); List<CIJob> existingJobs = getJobs(appInfo); if (CI_CREATE_NEW_JOBS.equals(status) || existingJobs == null) { existingJobs = new ArrayList<CIJob>(); } existingJobs.addAll(jobs); FileWriter writer = null; File ciJobFile = new File(getCIJobPath(appInfo)); String jobJson = gson.toJson(existingJobs); writer = new FileWriter(ciJobFile); writer.write(jobJson); writer.flush(); } catch (Exception e) { throw new PhrescoException(e); } }
private boolean adaptExistingJobs(ApplicationInfo appInfo) { try { CIJob existJob = getJob(appInfo); S_LOGGER.debug("Going to get existing jobs to relocate!!!!!"); if (existJob != null) { S_LOGGER.debug("Existing job found " + existJob.getName()); boolean deleteExistJob = deleteCIJobFile(appInfo); Gson gson = new Gson(); List<CIJob> existingJobs = new ArrayList<CIJob>(); existingJobs.addAll(Arrays.asList(existJob)); FileWriter writer = null; File ciJobFile = new File(getCIJobPath(appInfo)); String jobJson = gson.toJson(existingJobs); writer = new FileWriter(ciJobFile); writer.write(jobJson); writer.flush(); S_LOGGER.debug("Existing job moved to new type of project!!"); } return true; } catch (Exception e) { S_LOGGER.debug("It is already adapted !!!!! "); } return false; }
private static <T> List<T> join(List<? extends T> list, T element) { List<T> result = new ArrayList<T>(list.size() + 1); result.addAll(list); result.add(element); return result; }