public SelectMany05Bean() { HobbitBean[] hobbits = { new HobbitBean("Bilbo", "Ring Finder"), new HobbitBean("Frodo", "Ring Bearer"), new HobbitBean("Merry", "Trouble Maker"), new HobbitBean("Pippin", "Trouble Maker") }; Set<SelectItem> items = new LinkedHashSet<SelectItem>(); for (HobbitBean hobbit : hobbits) { items.add(new SelectItem(hobbit.getName())); } hobbitCollection = new TreeSet<HobbitBean>(); hobbitCollection.addAll(Arrays.asList(hobbits)); possibleValues = Collections.unmodifiableSet(items); initialSortedSetValues = new TreeSet<String>(Collections.reverseOrder()); initialSortedSetValues.add("Pippin"); initialSortedSetValues.add("Frodo"); initialCollectionValues = new LinkedHashSet<String>(2); initialCollectionValues.add("Bilbo"); initialCollectionValues.add("Merry"); initialSetValues = new CopyOnWriteArraySet<String>(); // not Cloneable initialSetValues.add("Frodo"); initialListValues = new Vector<String>(); initialListValues.add("Bilbo"); initialListValues.add("Pippin"); initialListValues.add("Merry"); hobbitDataModel = new ListDataModel<HobbitBean>(new ArrayList<HobbitBean>(Arrays.asList(hobbits))); }
@Test @SuppressWarnings("unchecked") public void edit() throws Exception { final DocumentClass documentClass = new DocumentClass("hello"); final DocumentReference documentReference = newDocumentReference("hello.txt").withDocumentClass(documentClass).build(); when(documentService.findDocumentReference(1L)).thenReturn(documentReference); SortedSet<Attribute> attributes = new TreeSet<>(); attributes.add(new Attribute("a", false, AttributeDataType.CURRENCY)); attributes.add(new Attribute("b", false, AttributeDataType.STRING)); when(documentService.findAttributes(documentClass)).thenReturn(attributes); final ModelAndView modelAndView = controller.editDocument(1L); assertThat(modelAndView.getViewName(), is("edit.doc")); assertThat(modelAndView.getModel().size(), is(2)); assertThat(modelAndView.getModel().containsKey("doc"), is(true)); assertThat((DocumentReference) modelAndView.getModel().get("doc"), is(documentReference)); assertThat(modelAndView.getModel().containsKey("attributes"), is(true)); assertThat(((SortedSet<Attribute>) modelAndView.getModel().get("attributes")).size(), is(2)); InOrder order = inOrder(documentService); order.verify(documentService).findDocumentReference(1L); order.verify(documentService).findAttributes(documentReference.getDocumentClass()); order.verifyNoMoreInteractions(); }
/** * Takes a potentially non-sequential alignment and guesses a sequential version of it. Residues * from each structure are sorted sequentially and then compared directly. * * <p>The results of this method are consistent with what one might expect from an identity * function, and are therefore useful with {@link #getSymmetryOrder(Map, Map identity, int, * float)}. * * <ul> * <li>Perfect self-alignments will have the same pre-image and image, so will map X->X * <li>Gaps and alignment errors will cause errors in the resulting map, but only locally. * Errors do not propagate through the whole alignment. * </ul> * * <h4>Example:</h4> * * A non sequential alignment, represented schematically as * * <pre> * 12456789 * 78912345</pre> * * would result in a map * * <pre> * 12456789 * 12345789</pre> * * @param alignment The non-sequential input alignment * @param inverseAlignment If false, map from structure1 to structure2. If true, generate the * inverse of that map. * @return A mapping from sequential residues of one protein to those of the other * @throws IllegalArgumentException if the input alignment is not one-to-one. */ public static Map<Integer, Integer> guessSequentialAlignment( Map<Integer, Integer> alignment, boolean inverseAlignment) { Map<Integer, Integer> identity = new HashMap<Integer, Integer>(); SortedSet<Integer> aligned1 = new TreeSet<Integer>(); SortedSet<Integer> aligned2 = new TreeSet<Integer>(); for (Entry<Integer, Integer> pair : alignment.entrySet()) { aligned1.add(pair.getKey()); if (!aligned2.add(pair.getValue())) throw new IllegalArgumentException( "Alignment is not one-to-one for residue " + pair.getValue() + " of the second structure."); } Iterator<Integer> it1 = aligned1.iterator(); Iterator<Integer> it2 = aligned2.iterator(); while (it1.hasNext()) { if (inverseAlignment) { // 2->1 identity.put(it2.next(), it1.next()); } else { // 1->2 identity.put(it1.next(), it2.next()); } } return identity; }
@Test public void getAutoCompletionHintsForMethodWhenJustAfterTheDot() throws Exception { setupMocks("$key.", createTestVelocityContext("key", new TestClass())); Hints expectedMethods = new Hints() .withHints( new HintData("doWork", "doWork(...) AncillaryTestClass"), new HintData("something", "something String"), new HintData("getSomething", "getSomething(...) String"), new HintData("method1", "method1(...)"), new HintData("method2", "method2(...) String")); setupMethodFinderMock(expectedMethods, "", TestClass.class); String velocity = "{{velocity}}$key."; Hints hints = mocker .getComponentUnderTest() .getAutoCompletionHints(velocity.length(), "xwiki/2.0", velocity); assertEquals(5, hints.getHints().size()); // Verify methods are returned sorted SortedSet<HintData> expected = new TreeSet<HintData>(); expected.add(new HintData("doWork", "doWork(...) AncillaryTestClass")); expected.add(new HintData("getSomething", "getSomething(...) String")); expected.add(new HintData("method1", "method1(...)")); expected.add(new HintData("method2", "method2(...) String")); expected.add(new HintData("something", "something String")); assertEquals(expected, hints.getHints()); }
public void testSetSortedSet() { Iterator<Integer> i; SortedSet<Integer> set = new TreeSet<Integer>(); set.add(Integer.valueOf(00)); set.add(Integer.valueOf(10)); set.add(Integer.valueOf(20)); set.add(Integer.valueOf(30)); set.add(Integer.valueOf(40)); this.list.set(set); i = this.list.iterator(); assertTrue(i.hasNext()); assertEquals(Integer.valueOf(0), i.next()); assertTrue(i.hasNext()); assertEquals(Integer.valueOf(10), i.next()); assertTrue(i.hasNext()); assertEquals(Integer.valueOf(20), i.next()); assertTrue(i.hasNext()); assertEquals(Integer.valueOf(30), i.next()); assertTrue(i.hasNext()); assertEquals(Integer.valueOf(40), i.next()); assertFalse(i.hasNext()); }
public static List<Integer> findMinimumVisits(Interval[] intervals) { SortedSet<Interval> left = new TreeSet<>(new LeftComp()); SortedSet<Interval> right = new TreeSet<>(new RightComp()); for (Interval interval : intervals) { left.add(interval); right.add(interval); } List<Integer> s = new ArrayList<>(); while (!left.isEmpty() && !right.isEmpty()) { int b = right.first().right; s.add(b); // Removes the intervals which intersect with R.cbegin(). Iterator<Interval> it = left.iterator(); while (it.hasNext()) { Interval interval = it.next(); if (interval.left > b) { break; } right.remove(interval); it.remove(); } } return s; }
Collection<Point2DInt> cutPoints(LineSegmentInt original) { // Log.println("original=" + original); // Log.println("inflateX=" + inflateX); // Log.println("inflateY=" + inflateY); final SortedSet<Point2DInt> result = new TreeSet<Point2DInt>(new Point2DIntComparatorDistance(original.getP1())); if (original.isHorizontal() == false) { for (InflateData x : inflateX) { final LineSegmentInt vertical = new LineSegmentInt(x.getPos(), original.getMinY(), x.getPos(), original.getMaxY()); final Point2DInt inter = original.getSegIntersection(vertical); if (inter != null) { result.add(inter); } } } if (original.isVertical() == false) { for (InflateData y : inflateY) { final LineSegmentInt horizontal = new LineSegmentInt(original.getMinX(), y.getPos(), original.getMaxX(), y.getPos()); final Point2DInt inter = original.getSegIntersection(horizontal); if (inter != null) { result.add(inter); } } } return result; }
/** * Visits a class declaration. * * @param d the declaration to visit */ public void visitClassDeclaration(ClassDeclaration d) { d.accept(pre); SortedSet<Declaration> decls = new TreeSet<Declaration>(SourceOrderDeclScanner.comparator); for (TypeParameterDeclaration tpDecl : d.getFormalTypeParameters()) { decls.add(tpDecl); } for (FieldDeclaration fieldDecl : d.getFields()) { decls.add(fieldDecl); } for (MethodDeclaration methodDecl : d.getMethods()) { decls.add(methodDecl); } for (TypeDeclaration typeDecl : d.getNestedTypes()) { decls.add(typeDecl); } for (ConstructorDeclaration ctorDecl : d.getConstructors()) { decls.add(ctorDecl); } for (Declaration decl : decls) decl.accept(this); d.accept(post); }
private SortedSet<String> generateAllPrefixes() { SortedSet<String> oldPrefs = new TreeSet<String>(); SortedSet<String> newPrefs = new TreeSet<String>(); SortedSet<String> dummySet; oldPrefs.add(""); Alphabet alph = motifSearchSpace.getAlphabet(); for (int i = 0; i < prefixLength; i++) { for (Character c : alph) { for (String s : oldPrefs) { newPrefs.add(s + c); } } dummySet = oldPrefs; oldPrefs = newPrefs; newPrefs = dummySet; newPrefs.clear(); } return oldPrefs; }
private void dispatchForExecution(Schedule schedule) { long now = System.currentTimeMillis(); synchronized (timingQueue) { if (timingQueue.size() == 0) { long nextRun = schedule.nextRun(now); if (nextRun < 0) { return; } System.out.println("Next run at: " + new DateTime(nextRun)); timingQueue.add(new ScheduleTime(schedule.identity().get(), nextRun)); if (scheduleHandler == null) { dispatchHandler(); } } else { ScheduleTime first = timingQueue.first(); long nextRun = schedule.nextRun(now); if (nextRun < 0) { return; } System.out.println("Next run at: " + new DateTime(nextRun)); timingQueue.add(new ScheduleTime(schedule.identity().get(), nextRun)); ScheduleTime newFirst = timingQueue.first(); if (!first.equals(newFirst)) { // We need to restart the managementThread, which is currently waiting for a 'later' event // to // occur than the one that was just scheduled. scheduleHandler.future.cancel(true); dispatchHandler(); } } } }
protected Map<Individual, SortedSet<Boolean>> getBooleanDatatypeMembersImpl( DatatypeProperty datatypeProperty) throws ReasoningMethodUnsupportedException { Map<Individual, SortedSet<Constant>> mapping = getDatatypeMembersImpl(datatypeProperty); Map<Individual, SortedSet<Boolean>> ret = new TreeMap<Individual, SortedSet<Boolean>>(); for (Entry<Individual, SortedSet<Constant>> e : mapping.entrySet()) { SortedSet<Constant> values = e.getValue(); SortedSet<Boolean> valuesBoolean = new TreeSet<Boolean>(); for (Constant c : values) { String s = c.getLiteral(); if (s.equalsIgnoreCase("true")) { valuesBoolean.add(true); } else if (s.equalsIgnoreCase("false")) { valuesBoolean.add(false); } else { logger.warn( "Requested to parse boolean value of property " + datatypeProperty + ", but " + c + " could not be parsed successfully."); } } ret.put(e.getKey(), valuesBoolean); } return ret; }
@VisibleForTesting static StripCapabilitiesResult stripCapabilities( XmlElement configElement, Set<String> allCapabilitiesFromHello) { // collect all namespaces Set<String> foundNamespacesInXML = getNamespaces(configElement); LOG.trace( "All capabilities {}\nFound namespaces in XML {}", allCapabilitiesFromHello, foundNamespacesInXML); // required are referenced both in xml and hello SortedSet<String> requiredCapabilities = new TreeSet<>(); // can be removed SortedSet<String> obsoleteCapabilities = new TreeSet<>(); for (String capability : allCapabilitiesFromHello) { String namespace = capability.replaceAll("\\?.*", ""); if (foundNamespacesInXML.contains(namespace)) { requiredCapabilities.add(capability); } else { obsoleteCapabilities.add(capability); } } LOG.trace( "Required capabilities {}, \nObsolete capabilities {}", requiredCapabilities, obsoleteCapabilities); return new StripCapabilitiesResult(requiredCapabilities, obsoleteCapabilities); }
@Override public int complete(String buffer, int cursor, List<CharSequence> candidates) { Bundle[] bundles = bundleContext.getBundles(); List<Long> bundleIds = new ArrayList<Long>(); for (Bundle b : bundles) { if (matcher.bundleMatches(b)) { bundleIds.add(b.getBundleId()); } } SortedSet<String> variants = new TreeSet<String>(); if (buffer.matches("^[0-9]+$")) { for (Long l : bundleIds) { variants.add(String.valueOf(l)); } } else { for (Long l : bundleIds) { variants.add(bundleContext.getBundle(l).getSymbolicName()); } } if (buffer.isEmpty()) { candidates.addAll(variants); } else { for (String match : variants.tailSet(buffer)) { if (!match.startsWith(buffer)) { break; } candidates.add(match.substring(buffer.length())); } } return candidates.isEmpty() ? -1 : buffer.length(); }
/** @tests java.util.TreeSet#add(java.lang.Object) */ public void test_addLjava_lang_Object() { // Test for method boolean java.util.TreeSet.add(java.lang.Object) ts.add(new Integer(-8)); assertTrue("Failed to add Object", ts.contains(new Integer(-8))); ts.add(objArray[0]); assertTrue("Added existing element", ts.size() == objArray.length + 1); }
private void setProps(ModelNode requestProperties) throws Exception { props = new TreeSet<RequestProp>(); if (opName.equals("add")) { UserObject usrObj = (UserObject) node.getUserObject(); props.add( new RequestProp( "/" + usrObj.getName() + "=<name>/", "Resource name for the new " + usrObj.getName(), true, ModelType.STRING)); } if (opName.equals("write-attribute") && node.isLeaf()) { ModelNode nameNode = requestProperties.get("name"); nameNode .get("type") .set(ModelType.UNDEFINED); // undefined type will display as uneditable String UserObject usrObj = (UserObject) node.getUserObject(); ModelNode nameNodeValue = new ModelNode(); nameNodeValue.set(usrObj.getName()); props.add(new RequestProp("name", requestProperties.get("name"), nameNodeValue)); ModelNode rscDesc = cliGuiCtx.getExecutor().doCommand(node.addressPath() + ":read-resource-description"); ModelNode valueNode = rscDesc.get("result", "attributes", usrObj.getName()); valueNode.get("required").set(false); // value is never required for write-attribute ModelNode valueNodeValue = usrObj.getBackingNode().get(usrObj.getName()); props.add(new RequestProp("value", valueNode, valueNodeValue)); return; } for (Property prop : requestProperties.asPropertyList()) { props.add(new RequestProp(prop.getName(), prop.getValue(), null)); } }
public StringToBoolean() { $yes.add("yes"); $yes.add("y"); $yes.add("true"); $yes.add("ja"); $yes.add("on"); }
/** * Saves a given graph to a dot file, it also creates the file, or overwrites the old one * * @param g : The jung graph to save * @param filename : A string that points to the destination of the save * @param labeler : A node object -> Node name converter object * @param graphName : The name of the graph to export (usually this is set the project's name) * @throws IOException On IO error */ public void save(Graph<V, E> g, String filename, Transformer<V, String> labeler, String graphName) throws IOException { SortedSet<V> nodes = new TreeSet<V>(); Map<V, SortedSet<V>> successors = new HashMap<V, SortedSet<V>>(); for (V source : g.getVertices()) { Collection<V> actSuccessors = g.getSuccessors(source); SortedSet<V> successorTree = new TreeSet<V>(); for (V destination : actSuccessors) { successorTree.add(destination); } nodes.add(source); successors.put(source, successorTree); } BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(filename), "UTF-8")); writer.write("digraph \"" + graphName + "\" {\n"); for (V from : nodes) { Collection<V> actSuccessors = successors.get(from); for (V to : actSuccessors) { writer.write( "\t\"" + labeler.transform(from) + "\" -> \"" + labeler.transform(to) + "\";\n"); } if (g.getPredecessorCount(from) == 0 && actSuccessors.isEmpty()) { writer.write("\t\"" + labeler.transform(from) + "\";\n"); } } writer.write("}"); writer.close(); }
public SortedSet<Grid> children() { SortedSet<Grid> children = new TreeSet<Grid>(); Grid gridA, gridB; int t = 0; gridA = new Grid(this); for (int i = 0; i < 4; i++) { while (gridA.moveRight()) { t++; gridB = new Grid(gridA); while (gridB.moveDown()) gridB.down++; gridB.rotations = i; gridB.translations = t; children.add(gridB); } while (gridA.moveLeft()) { t--; gridB = new Grid(gridA); while (gridB.moveDown()) gridB.down++; gridB.rotations = i; gridB.translations = t; children.add(gridB); } gridA.turnClockwise(); } return children; }
/** @see de.interactive_instruments.ShapeChange.Model.ClassInfo#supertypes() */ public SortedSet<String> supertypes() { // Convert base class object set to base class id set. SortedSet<String> baseids = new TreeSet<String>(); if (baseclassInfo != null) baseids.add(baseclassInfo.id()); else if (baseclassInfoSet != null) for (ClassInfoEA bci : baseclassInfoSet) baseids.add(bci.id()); return baseids; } // supertypes()
/** * @param version * @return * @see * edu.utah.further.mdr.api.domain.asset.Asset#addVersion(edu.utah.further.mdr.api.domain.asset.Version) */ @Override public void addVersion(final Version version) { final Long versionId = (version == null) ? null : version.getId(); if (versionId != null) { versionIdSet.add(versionId); } versionSet.add(version); }
/** * runs all the internal checks to verify that the job has been initialized * * @return true if the job was succesfully initialized */ private boolean runJobNotInitializedChecks() { LOGGER.trace("runJobNotInitializedChecks [in]"); // capture all the start timestamps from the agents final SortedSet<Timestamp> lStartTimeStamps = new TreeSet<Timestamp>(); // check all the control units for (ProcessControlUnitWithMonitoring lUnit : mProcessUnits) { // obtain status first, if the machine address has been set, // it means it was initialized succesfully final String lAgentID = lUnit.getAgentId(); LOGGER.trace("Checking machine address for unit [{}]", lAgentID); if (lUnit.getMachineAddress() == null) { // one single machine missing means that it hasn't been initialized LOGGER.trace("Machine address not found found"); return false; } LOGGER.trace("[{}] Machine address found [{}]", lAgentID, lUnit.getMachineAddress()); // obtain test duration and start time final Long lUnitTestDuration = lUnit.getDuration(); if (mTotalJobDuration == null && lUnitTestDuration != null) { // do this only if it hasn't been set before LOGGER.trace("[{}] Setting test duration to [{}]", lAgentID, lUnitTestDuration); mTotalJobDuration = lUnitTestDuration; } final Timestamp lUnitStartTimeStamp = lUnit.getStartTimestamp(); // tree set doesn't handle nulls very well if (lUnitStartTimeStamp != null) { LOGGER.trace("[{}] Setting initial timestamp [{}]", lAgentID, lUnitStartTimeStamp); // add the start timestamp to the set lStartTimeStamps.add(lUnitStartTimeStamp); } else if (lUnit.getCurrentUserCount() != null && lUnit.getCurrentUserCount() > 0) { final Timestamp lInitialTimestamp = new Timestamp(System.currentTimeMillis()); LOGGER.trace("[{}] Default initial timestamp to [{}]", lAgentID, lInitialTimestamp); // if there are already users created, // then this means this has already started lStartTimeStamps.add(lInitialTimestamp); } } // the biggest, or null if we don't have any start time yet mJobStartTime = lStartTimeStamps.isEmpty() ? null : lStartTimeStamps.last(); mJobInitialized = mJobStartTime != null && mTotalJobDuration != null; LOGGER.trace("Job has been initialized [{}]", mJobInitialized); return mJobInitialized; }
public void endElement(String uri, String name, String qName) { if (qName.equals("MaxResults")) { maxResults = Integer.parseInt(currentText.toString().trim()); } else if (qName.equals("Marker")) { marker = currentText.toString().trim(); marker = (marker.equals("")) ? null : marker; } else if (qName.equals("Prefix")) { prefix = currentText.toString().trim(); prefix = (prefix.equals("")) ? null : prefix; } else if (qName.equals("Delimiter")) { delimiter = currentText.toString().trim(); delimiter = (delimiter.equals("")) ? null : delimiter; } else if (qName.equals("NextMarker")) { nextMarker = currentText.toString().trim(); nextMarker = (nextMarker.equals("")) ? null : nextMarker; } else if (qName.equals("Blob")) { ListableBlobProperties md = new ListableBlobPropertiesImpl( currentName, currentUrl, currentLastModified, currentETag, currentSize, currentContentType, currentContentEncoding, currentContentLanguage); blobMetadata.add(md); currentName = null; currentUrl = null; currentLastModified = null; currentETag = null; currentSize = -1; currentContentType = null; currentContentEncoding = null; currentContentLanguage = null; } else if (qName.equals("Url")) { currentUrl = URI.create(currentText.toString().trim()); } else if (qName.equals("LastModified")) { currentLastModified = dateParser.rfc822DateParse(currentText.toString().trim()); } else if (qName.equals("Etag")) { currentETag = currentText.toString().trim(); } else if (qName.equals("Name")) { if (inBlob) currentName = currentText.toString().trim(); else if (inBlobPrefix) blobPrefixes.add(currentText.toString().trim()); } else if (qName.equals("Size")) { currentSize = Long.parseLong(currentText.toString().trim()); } else if (qName.equals("ContentType")) { currentContentType = currentText.toString().trim(); } else if (qName.equals("ContentEncoding")) { currentContentEncoding = currentText.toString().trim(); if (currentContentEncoding.equals("")) currentContentEncoding = null; } else if (qName.equals("ContentLanguage")) { currentContentLanguage = currentText.toString().trim(); if (currentContentLanguage.equals("")) currentContentLanguage = null; } currentText = new StringBuilder(); }
private static void parseHiddenAttributes( String hide, SortedSet<String> hiddenStrings, SortedSet<String> hiddenPrefixes) { final StringTokenizer tok = new StringTokenizer(hide); while (tok.hasMoreTokens()) { String s = tok.nextToken(); if (s.endsWith("*")) hiddenPrefixes.add(s.substring(0, s.length() - 1)); else hiddenStrings.add(s); } }
public static void main(String[] args) { SortedSet<String> sortedNames = new TreeSet<>(); sortedNames.add("John"); sortedNames.add("Adam"); sortedNames.add("Eve"); sortedNames.add("Donna"); System.out.println(sortedNames); }
@Test public void testConvertNetworkMetaAnalysis() throws Exception, InstantiationException, InvocationTargetException, NoSuchMethodException { Domain domain = new DomainImpl(); ExampleData.initDefaultData(domain); String name = "CGI network meta-analysis"; MetaAnalysisWithStudies ma = d_jaxbConverterTest.buildNetworkMetaAnalysis(name); List<Study> studies = new ArrayList<Study>(); for (org.drugis.addis.entities.data.Study study : ma.d_studies) { Study studyEnt = JAXBConvertor.convertStudy(study, domain); domain.getStudies().add(studyEnt); studies.add(studyEnt); } TreatmentDefinition combi = TreatmentDefinition.createTrivial( Arrays.asList(ExampleData.buildDrugFluoxetine(), ExampleData.buildDrugSertraline())); TreatmentDefinition parox = TreatmentDefinition.createTrivial(ExampleData.buildDrugParoxetine()); TreatmentDefinition sertr = TreatmentDefinition.createTrivial(ExampleData.buildDrugSertraline()); SortedSet<TreatmentDefinition> alternatives = new TreeSet<TreatmentDefinition>(); alternatives.add(combi); alternatives.add(parox); alternatives.add(sertr); Map<Study, Map<TreatmentDefinition, Arm>> armMap = new HashMap<Study, Map<TreatmentDefinition, Arm>>(); Map<TreatmentDefinition, Arm> study1map = new HashMap<TreatmentDefinition, Arm>(); study1map.put(combi, studies.get(0).getArms().get(0)); study1map.put(sertr, studies.get(0).getArms().get(1)); armMap.put(studies.get(0), study1map); Map<TreatmentDefinition, Arm> study2map = new HashMap<TreatmentDefinition, Arm>(); study2map.put(parox, studies.get(1).getArms().get(0)); study2map.put(sertr, studies.get(1).getArms().get(1)); armMap.put(studies.get(1), study2map); Map<TreatmentDefinition, Arm> study3map = new HashMap<TreatmentDefinition, Arm>(); study3map.put(sertr, studies.get(2).getArms().get(0)); study3map.put(parox, studies.get(2).getArms().get(1)); study3map.put(combi, studies.get(2).getArms().get(2)); armMap.put(studies.get(2), study3map); Collections.sort( studies); // So the reading *by definition* puts the studies in their natural order NetworkMetaAnalysis expected = new NetworkMetaAnalysis( name, ExampleData.buildIndicationDepression(), ExampleData.buildEndpointCgi(), studies, alternatives, armMap); assertEntityEquals(expected, NetworkMetaAnalysisConverter.load(ma.d_nwma, domain)); assertEquals(ma.d_nwma, NetworkMetaAnalysisConverter.save(expected)); }
private SortedSet<Package> getInstalledPackages() { SortedSet<Package> installedPackages = new TreeSet<Package>(); installedPackages.add(PackageBuilder.builder().setName("Package 1").setID("1").build()); installedPackages.add(PackageBuilder.builder().setName("Package 2").setID("2").build()); installedPackages.add(PackageBuilder.builder().setName("Package 3").setID("3").build()); installedPackages.add(PackageBuilder.builder().setName("Package 4").setID("4").build()); installedPackages.add(PackageBuilder.builder().setName("Package 5").setID("5").build()); return installedPackages; }
private static void applyTiePointGeoCoding( TiffFileInfo info, double[] tiePoints, Product product) { final SortedSet<Double> xSet = new TreeSet<>(); final SortedSet<Double> ySet = new TreeSet<>(); for (int i = 0; i < tiePoints.length; i += 6) { xSet.add(tiePoints[i]); ySet.add(tiePoints[i + 1]); } final double xMin = xSet.first(); final double xMax = xSet.last(); final double xDiff = (xMax - xMin) / (xSet.size() - 1); final double yMin = ySet.first(); final double yMax = ySet.last(); final double yDiff = (yMax - yMin) / (ySet.size() - 1); final int width = xSet.size(); final int height = ySet.size(); int idx = 0; final Map<Double, Integer> xIdx = new HashMap<>(); for (Double val : xSet) { xIdx.put(val, idx); idx++; } idx = 0; final Map<Double, Integer> yIdx = new HashMap<>(); for (Double val : ySet) { yIdx.put(val, idx); idx++; } final float[] lats = new float[width * height]; final float[] lons = new float[width * height]; for (int i = 0; i < tiePoints.length; i += 6) { final int idxX = xIdx.get(tiePoints[i + 0]); final int idxY = yIdx.get(tiePoints[i + 1]); final int arrayIdx = idxY * width + idxX; lons[arrayIdx] = (float) tiePoints[i + 3]; lats[arrayIdx] = (float) tiePoints[i + 4]; } String[] names = Utils.findSuitableLatLonNames(product); final TiePointGrid latGrid = new TiePointGrid(names[0], width, height, xMin, yMin, xDiff, yDiff, lats); final TiePointGrid lonGrid = new TiePointGrid(names[1], width, height, xMin, yMin, xDiff, yDiff, lons); product.addTiePointGrid(latGrid); product.addTiePointGrid(lonGrid); final SortedMap<Integer, GeoKeyEntry> geoKeyEntries = info.getGeoKeyEntries(); final Datum datum = getDatum(geoKeyEntries); product.setGeoCoding(new TiePointGeoCoding(latGrid, lonGrid, datum)); }
private SortedSet<Package> getAvailablePackages() { SortedSet<Package> availablePackages = new TreeSet<Package>(); availablePackages.add(PackageBuilder.builder().setName("Package 3").setID("3").build()); availablePackages.add(PackageBuilder.builder().setName("Package 4").setID("4").build()); availablePackages.add(PackageBuilder.builder().setName("Package 5").setID("5").build()); availablePackages.add(PackageBuilder.builder().setName("Package 6").setID("6").build()); availablePackages.add(PackageBuilder.builder().setName("Package 7").setID("7").build()); availablePackages.add(PackageBuilder.builder().setName("Package 8").setID("8").build()); return availablePackages; }
@Override public void process(JCas jcas) throws AnalysisEngineProcessException { // sentenceCount = 0; tokenCount = 0; String text = jcas.getDocumentText(); String[] zones = getZoneTypes(); if (isStrictZoning()) { if (zones == null || zones.length == 0) { process(jcas, text.substring(0, text.length()), 0); } else if (zones.length != 1) { throw new AnalysisEngineProcessException( new IllegalStateException("Strict zoning cannot use multiple zone types")); } else { CAS cas = jcas.getCas(); for (AnnotationFS zone : select(cas, getType(cas, zones[0]))) { int[] adjusted = limit(text, zone.getBegin(), zone.getEnd()); process(jcas, text.substring(adjusted[0], adjusted[1]), adjusted[0]); } } } else { // This set collects all zone boundaries. SortedSet<Integer> boundarySet = new TreeSet<Integer>(); boundarySet.add(0); // Add start boundary boundarySet.add(text.length()); // Add end boundary // If zoneTypes have been define then get the boundaries, otherwise we will // simply have one big zone covering the whole document. if (zones != null) { // Iterate over all the zone indices and create sentences respecting // the zone boundaries. If the zoneTypes overlap... well... bad luck! for (String zoneName : zones) { CAS cas = jcas.getCas(); for (AnnotationFS zone : select(cas, getType(cas, zoneName))) { int[] adjusted = limit(text, zone.getBegin(), zone.getEnd()); boundarySet.add(adjusted[0]); boundarySet.add(adjusted[1]); } } } // Now process all zoneTypes. There will be at least two entries in the // boundary set (see above). Iterator<Integer> bi = boundarySet.iterator(); int begin = bi.next(); while (bi.hasNext()) { int end = bi.next(); process(jcas, text.substring(begin, end), begin); begin = end; } } }
public static File[] getDirectoryListing(File dir) { SortedSet<File> dirSet = new TreeSet<File>(); SortedSet<File> fileSet = new TreeSet<File>(); File[] files = dir.listFiles(); for (int i = 0; i < files.length; i++) { if (files[i].isDirectory()) dirSet.add(files[i]); else fileSet.add(files[i]); } List<File> fileList = new LinkedList<File>(); fileList.addAll(dirSet); fileList.addAll(fileSet); return fileList.toArray(new File[] {}); }