public void add(final OIdentifiable identifiable) { if (identifiable == null) throw new NullPointerException("Impossible to add a null identifiable in a ridbag"); if (identifiable.getIdentity().isValid()) { Change counter = changes.get(identifiable); if (counter == null) changes.put(identifiable, new DiffChange(1)); else { if (counter.isUndefined()) { counter = getAbsoluteValue(identifiable); changes.put(identifiable, counter); } counter.increment(); } } else { final OModifiableInteger counter = newEntries.get(identifiable); if (counter == null) newEntries.put(identifiable, new OModifiableInteger(1)); else counter.increment(); } if (size >= 0) size++; if (this.owner != null) ORecordInternal.track(this.owner, identifiable); if (updateOwner) fireCollectionChangedEvent( new OMultiValueChangeEvent<OIdentifiable, OIdentifiable>( OMultiValueChangeEvent.OChangeType.ADD, identifiable, identifiable, null, false)); }
public void remove(OIdentifiable identifiable) { if (removeFromNewEntries(identifiable)) { if (size >= 0) size--; } else { final Change counter = changes.get(identifiable); if (counter == null) { // Not persistent keys can only be in changes or newEntries if (identifiable.getIdentity().isPersistent()) { changes.put(identifiable, new DiffChange(-1)); size = -1; } else // Return immediately to prevent firing of event return; } else { counter.decrement(); if (size >= 0) if (counter.isUndefined()) size = -1; else size--; } } if (this.owner != null) ORecordInternal.unTrack(this.owner, identifiable); if (updateOwner) fireCollectionChangedEvent( new OMultiValueChangeEvent<OIdentifiable, OIdentifiable>( OMultiValueChangeEvent.OChangeType.REMOVE, identifiable, null, identifiable, false)); }
public void testBackwardIteration() throws IOException { final int records = 10000; long seed = System.currentTimeMillis(); MersenneTwisterFast mersenneTwisterFast = new MersenneTwisterFast(1381162033616L); System.out.println("testBackwardIteration seed : " + seed); NavigableMap<OClusterPosition, byte[]> positionRecordMap = new TreeMap<OClusterPosition, byte[]>(); ORecordVersion recordVersion = OVersionFactory.instance().createVersion(); recordVersion.increment(); recordVersion.increment(); for (int i = 0; i < records; i++) { int recordSize = mersenneTwisterFast.nextInt(2 * OClusterPage.MAX_RECORD_SIZE) + 1; byte[] record = new byte[recordSize]; mersenneTwisterFast.nextBytes(record); final OPhysicalPosition physicalPosition = paginatedCluster.createRecord(record, recordVersion, (byte) 2); positionRecordMap.put(physicalPosition.clusterPosition, record); } Iterator<OClusterPosition> positionIterator = positionRecordMap.keySet().iterator(); while (positionIterator.hasNext()) { OClusterPosition clusterPosition = positionIterator.next(); if (mersenneTwisterFast.nextBoolean()) { Assert.assertTrue(paginatedCluster.deleteRecord(clusterPosition)); positionIterator.remove(); } } OPhysicalPosition physicalPosition = new OPhysicalPosition(); physicalPosition.clusterPosition = OClusterPositionFactory.INSTANCE.valueOf(Long.MAX_VALUE); OPhysicalPosition[] positions = paginatedCluster.floorPositions(physicalPosition); Assert.assertTrue(positions.length > 0); positionIterator = positionRecordMap.descendingKeySet().iterator(); int counter = 0; while (positionIterator.hasNext()) { Assert.assertTrue(positions.length > 0); OClusterPosition testedPosition = positionIterator.next(); Assert.assertEquals(positions[positions.length - 1].clusterPosition, testedPosition); OPhysicalPosition positionToFind = positions[positions.length - 1]; positions = paginatedCluster.lowerPositions(positionToFind); counter++; } Assert.assertEquals(paginatedCluster.getEntries(), counter); Assert.assertEquals(paginatedCluster.getFirstPosition(), positionRecordMap.firstKey()); Assert.assertEquals(paginatedCluster.getLastPosition(), positionRecordMap.lastKey()); }
public IndexRateSource update(Collection<IndexRate> indexRates) { for (IndexRate indexRate : indexRates) { if (!data.containsKey(indexRate.indexId)) { data.put(indexRate.indexId, new ConcurrentSkipListMap<>()); } NavigableMap<LocalDate, IndexRate> dateMap = data.get(indexRate.indexId); dateMap.put(indexRate.date, indexRate); } return this; }
@Override public void remove() { if (currentRemoved) throw new IllegalStateException("Current element has already been removed"); if (currentValue == null) throw new IllegalStateException("Next method was not called for given iterator"); if (removeFromNewEntries(currentValue)) { if (size >= 0) size--; } else { Change counter = changedValues.get(currentValue); if (counter != null) { counter.decrement(); if (size >= 0) if (counter.isUndefined()) size = -1; else size--; } else { if (nextChange != null) { changedValues.put(currentValue, new DiffChange(-1)); changedValuesIterator = changedValues.tailMap(nextChange.getKey(), false).entrySet().iterator(); } else { changedValues.put(currentValue, new DiffChange(-1)); } size = -1; } } if (OSBTreeRidBag.this.owner != null) ORecordInternal.unTrack(OSBTreeRidBag.this.owner, currentValue); if (updateOwner) fireCollectionChangedEvent( new OMultiValueChangeEvent<OIdentifiable, OIdentifiable>( OMultiValueChangeEvent.OChangeType.REMOVE, currentValue, null, currentValue, false)); currentRemoved = true; }
private void addRequestEndPointUsingPath( ContextMeta context, ServiceMeta service, ServiceMethodMeta method, RequestMeta requestMeta, String path) { RequestMetaData metaData = new RequestMetaData(path, context, requestMeta, method, service); if (requestMeta.getCallType() == CallType.ADDRESS) { metaDataMap.put(path, metaData); } else { treeMap.put(path, metaData); } }
@Test public void NoSuchElem_After_Clear() { // bug reported by : Lazaros Tsochatzidis // But after clearing the tree using: // // public void Delete() { // db.getTreeMap("Names").clear(); // db.compact(); // } // // every next call of getLastKey() leads to the exception "NoSuchElement". Not // only the first one... DB db = DBMaker.memoryDB().transactionDisable().make(); NavigableMap m = db.treeMap("name"); try { m.lastKey(); fail(); } catch (NoSuchElementException e) { } m.put("aa", "aa"); assertEquals("aa", m.lastKey()); m.put("bb", "bb"); assertEquals("bb", m.lastKey()); db.treeMap("name").clear(); db.compact(); try { Object key = m.lastKey(); fail(key.toString()); } catch (NoSuchElementException e) { } m.put("aa", "aa"); assertEquals("aa", m.lastKey()); m.put("bb", "bb"); assertEquals("bb", m.lastKey()); }
public static void main(String[] args) { LinkedHashMap<Integer, String> hashmap = new LinkedHashMap<Integer, String>(); hashmap.put(1, "apple"); hashmap.put(2, "lemon"); hashmap.put(3, "orange"); hashmap.put(4, "banana"); hashmap.put(5, "litchi"); hashmap.put(6, "mango"); hashmap.put(7, "papaya"); System.out.println(hashmap); System.out.println(valuesBetween(2, 5, hashmap)); // // note insertion order lost // Map map1 = new HashMap<>(); // //for (int i = 1; i < 15; i++) { // for (int i = 14; i > 0; i--) { // map1.put(String.format("%04d", i), ""); // System.out.println(String.format("%04d:", i) + map1); // } // // map1 = new LinkedHashMap<>(); // //for (int i = 1; i < 15; i++) { // for (int i = 14; i > 0; i--) { // map1.put(String.format("%04d", i), ""); // System.out.println(String.format("%04d:", i) + map1); // } Map<String, String> myMap = new HashMap<>(); myMap.put("A", "1"); myMap.put("B", "1"); myMap.put("C", "1"); System.out.println(myMap); myMap = new LinkedHashMap<>(); myMap.put("A", "1"); myMap.put("C", "1"); myMap.put("B", "1"); NavigableMap<Integer, String> map = new TreeMap<Integer, String>(); map.put(0, "Kid"); map.put(11, "Teens"); map.put(20, "Twenties"); map.put(30, "Thirties"); map.put(40, "Forties"); map.put(50, "Senior"); map.put(100, "OMG OMG OMG!"); System.out.println(map.get(map.floorKey(13))); System.out.println(map.get(map.higherKey(20))); }
@Override public <T> T getProperty(String key, Class<T> targetType, T defaultValue) { Object originalValue = configMap.get(key); if (originalValue == null && key.contains(".")) { originalValue = configMap.navigate(key.split("\\.")); if (originalValue != null) { try { configMap.put(key, originalValue); } catch (Exception e) { // ignore } } } if (originalValue != null) { T value = conversionService.convert(originalValue, targetType); return DefaultGroovyMethods.asBoolean(value) ? value : defaultValue; } return defaultValue; }
/** * This method is going to take the optionSetIdNumbers objects with the page number is equals or * higher that the endTemplateStartPage parameter * * @param inputDto * @param endTemplateStartPage * @return */ private static NavigableMap<String, OptionSetIdNumber> getGroupEndOptionSetNumbers( LoadEndTemplatePatternInputDto inputDto, String endTemplateStartPage) { NavigableMap<String, OptionSetIdNumber> groupEndOptionSetNumbers = new TreeMap<String, OptionSetIdNumber>(); for (Map.Entry<String, OptionSetIdNumber> entry : inputDto.getMapOptionSetIdNumber().entrySet()) { int comparatorResult = entry.getValue().getPageNumber().compareTo(Integer.valueOf(endTemplateStartPage)); if (comparatorResult >= 0) { groupEndOptionSetNumbers.put(entry.getKey(), entry.getValue()); } } if (LOG.isDebugEnabled()) { LOG.debug( "UtilityNavigationPointerEnd loadEndTemplatePattern() groupEndOptionSetNumbers : " + groupEndOptionSetNumbers); } return groupEndOptionSetNumbers; }
@Override public boolean contains(OIdentifiable identifiable) { if (newEntries.containsKey(identifiable)) return true; Change counter = changes.get(identifiable); if (counter != null) { AbsoluteChange absoluteValue = getAbsoluteValue(identifiable); if (counter.isUndefined()) { changes.put(identifiable, absoluteValue); } counter = absoluteValue; } else { counter = getAbsoluteValue(identifiable); } return counter.applyTo(0) > 0; }
@Override public Object put(String key, Object value) { return configMap.put(key, value); }
@Override public void setAt(Object key, Object value) { configMap.put(key.toString(), value); }
@Override public int serialize(byte[] stream, int offset, UUID ownerUuid) { for (Map.Entry<OIdentifiable, OModifiableInteger> entry : newEntries.entrySet()) { OIdentifiable identifiable = entry.getKey(); assert identifiable instanceof ORecord; Change c = changes.get(identifiable); final int delta = entry.getValue().intValue(); if (c == null) changes.put(identifiable, new DiffChange(delta)); else c.applyDiff(delta); } newEntries.clear(); final ORecordSerializationContext context; boolean remoteMode = ODatabaseRecordThreadLocal.INSTANCE.get().getStorage() instanceof OStorageProxy; if (remoteMode) { context = null; } else context = ORecordSerializationContext.getContext(); // make sure that we really save underlying record. if (collectionPointer == null) { if (context != null) { final int clusterId = getHighLevelDocClusterId(); assert clusterId > -1; collectionPointer = ODatabaseRecordThreadLocal.INSTANCE .get() .getSbTreeCollectionManager() .createSBTree(clusterId, ownerUuid); } } OBonsaiCollectionPointer collectionPointer; if (this.collectionPointer != null) collectionPointer = this.collectionPointer; else { collectionPointer = OBonsaiCollectionPointer.INVALID; } OLongSerializer.INSTANCE.serializeLiteral(collectionPointer.getFileId(), stream, offset); offset += OLongSerializer.LONG_SIZE; OBonsaiBucketPointer rootPointer = collectionPointer.getRootPointer(); OLongSerializer.INSTANCE.serializeLiteral(rootPointer.getPageIndex(), stream, offset); offset += OLongSerializer.LONG_SIZE; OIntegerSerializer.INSTANCE.serializeLiteral(rootPointer.getPageOffset(), stream, offset); offset += OIntegerSerializer.INT_SIZE; // Keep this section for binary compatibility with versions older then 1.7.5 OIntegerSerializer.INSTANCE.serializeLiteral(size, stream, offset); offset += OIntegerSerializer.INT_SIZE; if (context == null) { ChangeSerializationHelper.INSTANCE.serializeChanges( changes, OLinkSerializer.INSTANCE, stream, offset); } else { context.push(new ORidBagUpdateSerializationOperation(changes, collectionPointer)); // 0-length serialized list of changes OIntegerSerializer.INSTANCE.serializeLiteral(0, stream, offset); offset += OIntegerSerializer.INT_SIZE; } return offset; }
/** * Adds the specified element to this set if it is not already present. More formally, adds the * specified element {@code e} to this set if the set contains no element {@code e2} such that * <tt>(e==null ? e2==null : e.equals(e2))</tt>. If this set already contains * the element, the call leaves the set unchanged and returns {@code false}. * * @param e element to be added to this set * @return {@code true} if this set did not already contain the specified element * @throws ClassCastException if the specified object cannot be compared with the elements * currently in this set * @throws NullPointerException if the specified element is null and this set uses natural * ordering, or its comparator does not permit null elements */ public boolean add(E e) { return m.put(e, PRESENT) == null; }
public void addValue(long weight, String value) { totalWeight += weight; model.put(totalWeight, value); }
/** * Adds the specified object to this {@code TreeSet}. * * @param object the object to add. * @return {@code true} when this {@code TreeSet} did not already contain the object, {@code * false} otherwise. * @throws ClassCastException when the object cannot be compared with the elements in this {@code * TreeSet}. * @throws NullPointerException when the object is null and the comparator cannot handle null. */ @Override public boolean add(E object) { return backingMap.put(object, Boolean.TRUE) == null; }
@Test public void testRandom() throws Exception { Directory directory = newDirectory(); final Random r = random(); final IndexWriterConfig iwc = LuceneTestCase.newIndexWriterConfig(r, new MockAnalyzer(r)) .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) .setRAMBufferSizeMB( scaledRandomIntBetween(16, 64)); // we might index a lot - don't go crazy here RandomIndexWriter indexWriter = new RandomIndexWriter(r, directory, iwc); int numUniqueChildValues = scaledRandomIntBetween(100, 2000); String[] childValues = new String[numUniqueChildValues]; for (int i = 0; i < numUniqueChildValues; i++) { childValues[i] = Integer.toString(i); } IntOpenHashSet filteredOrDeletedDocs = new IntOpenHashSet(); int childDocId = 0; int numParentDocs = scaledRandomIntBetween(1, numUniqueChildValues); ObjectObjectOpenHashMap<String, NavigableMap<String, FloatArrayList>> childValueToParentIds = new ObjectObjectOpenHashMap<>(); for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) { boolean markParentAsDeleted = rarely(); boolean filterMe = rarely(); String parent = Integer.toString(parentDocId); Document document = new Document(); document.add( new StringField(UidFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.YES)); document.add(new StringField(TypeFieldMapper.NAME, "parent", Field.Store.NO)); if (markParentAsDeleted) { filteredOrDeletedDocs.add(parentDocId); document.add(new StringField("delete", "me", Field.Store.NO)); } if (filterMe) { filteredOrDeletedDocs.add(parentDocId); document.add(new StringField("filter", "me", Field.Store.NO)); } indexWriter.addDocument(document); int numChildDocs = scaledRandomIntBetween(0, 100); for (int i = 0; i < numChildDocs; i++) { boolean markChildAsDeleted = rarely(); String childValue = childValues[random().nextInt(childValues.length)]; document = new Document(); document.add( new StringField( UidFieldMapper.NAME, Uid.createUid("child", Integer.toString(childDocId++)), Field.Store.NO)); document.add(new StringField(TypeFieldMapper.NAME, "child", Field.Store.NO)); document.add( new StringField( ParentFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.NO)); document.add(new StringField("field1", childValue, Field.Store.NO)); if (markChildAsDeleted) { document.add(new StringField("delete", "me", Field.Store.NO)); } indexWriter.addDocument(document); if (!markChildAsDeleted) { NavigableMap<String, FloatArrayList> parentIdToChildScores; if (childValueToParentIds.containsKey(childValue)) { parentIdToChildScores = childValueToParentIds.lget(); } else { childValueToParentIds.put(childValue, parentIdToChildScores = new TreeMap<>()); } if (!markParentAsDeleted && !filterMe) { FloatArrayList childScores = parentIdToChildScores.get(parent); if (childScores == null) { parentIdToChildScores.put(parent, childScores = new FloatArrayList()); } childScores.add(1f); } } } } // Delete docs that are marked to be deleted. indexWriter.deleteDocuments(new Term("delete", "me")); indexWriter.commit(); IndexReader indexReader = DirectoryReader.open(directory); IndexSearcher searcher = new IndexSearcher(indexReader); Engine.Searcher engineSearcher = new Engine.Searcher(ChildrenQueryTests.class.getSimpleName(), searcher); ((TestSearchContext) SearchContext.current()) .setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher)); int max = numUniqueChildValues / 4; for (int i = 0; i < max; i++) { // Simulate a parent update if (random().nextBoolean()) { final int numberOfUpdatableParents = numParentDocs - filteredOrDeletedDocs.size(); int numberOfUpdates = RandomInts.randomIntBetween( random(), 0, Math.min(numberOfUpdatableParents, TEST_NIGHTLY ? 25 : 5)); for (int j = 0; j < numberOfUpdates; j++) { int parentId; do { parentId = random().nextInt(numParentDocs); } while (filteredOrDeletedDocs.contains(parentId)); String parentUid = Uid.createUid("parent", Integer.toString(parentId)); indexWriter.deleteDocuments(new Term(UidFieldMapper.NAME, parentUid)); Document document = new Document(); document.add(new StringField(UidFieldMapper.NAME, parentUid, Field.Store.YES)); document.add(new StringField(TypeFieldMapper.NAME, "parent", Field.Store.NO)); indexWriter.addDocument(document); } indexReader.close(); indexReader = DirectoryReader.open(indexWriter.w, true); searcher = new IndexSearcher(indexReader); engineSearcher = new Engine.Searcher(ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher); ((TestSearchContext) SearchContext.current()) .setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher)); } String childValue = childValues[random().nextInt(numUniqueChildValues)]; int shortCircuitParentDocSet = random().nextInt(numParentDocs); ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)]; // leave min/max set to 0 half the time int minChildren = random().nextInt(2) * scaledRandomIntBetween(0, 110); int maxChildren = random().nextInt(2) * scaledRandomIntBetween(minChildren, 110); QueryBuilder queryBuilder = hasChildQuery("child", constantScoreQuery(termQuery("field1", childValue))) .scoreType(scoreType.name().toLowerCase(Locale.ENGLISH)) .minChildren(minChildren) .maxChildren(maxChildren) .setShortCircuitCutoff(shortCircuitParentDocSet); // Using a FQ, will invoke / test the Scorer#advance(..) and also let the Weight#scorer not // get live docs as acceptedDocs queryBuilder = filteredQuery(queryBuilder, notFilter(termFilter("filter", "me"))); Query query = parseQuery(queryBuilder); BitSetCollector collector = new BitSetCollector(indexReader.maxDoc()); int numHits = 1 + random().nextInt(25); TopScoreDocCollector actualTopDocsCollector = TopScoreDocCollector.create(numHits); searcher.search(query, MultiCollector.wrap(collector, actualTopDocsCollector)); FixedBitSet actualResult = collector.getResult(); FixedBitSet expectedResult = new FixedBitSet(indexReader.maxDoc()); TopScoreDocCollector expectedTopDocsCollector = TopScoreDocCollector.create(numHits); if (childValueToParentIds.containsKey(childValue)) { LeafReader slowLeafReader = SlowCompositeReaderWrapper.wrap(indexReader); final FloatArrayList[] scores = new FloatArrayList[slowLeafReader.maxDoc()]; Terms terms = slowLeafReader.terms(UidFieldMapper.NAME); if (terms != null) { NavigableMap<String, FloatArrayList> parentIdToChildScores = childValueToParentIds.lget(); TermsEnum termsEnum = terms.iterator(null); DocsEnum docsEnum = null; for (Map.Entry<String, FloatArrayList> entry : parentIdToChildScores.entrySet()) { int count = entry.getValue().elementsCount; if (count >= minChildren && (maxChildren == 0 || count <= maxChildren)) { TermsEnum.SeekStatus seekStatus = termsEnum.seekCeil(Uid.createUidAsBytes("parent", entry.getKey())); if (seekStatus == TermsEnum.SeekStatus.FOUND) { docsEnum = termsEnum.docs(slowLeafReader.getLiveDocs(), docsEnum, DocsEnum.FLAG_NONE); expectedResult.set(docsEnum.nextDoc()); scores[docsEnum.docID()] = new FloatArrayList(entry.getValue()); } else if (seekStatus == TermsEnum.SeekStatus.END) { break; } } } } MockScorer mockScorer = new MockScorer(scoreType); final LeafCollector leafCollector = expectedTopDocsCollector.getLeafCollector(slowLeafReader.getContext()); leafCollector.setScorer(mockScorer); for (int doc = expectedResult.nextSetBit(0); doc < slowLeafReader.maxDoc(); doc = doc + 1 >= expectedResult.length() ? DocIdSetIterator.NO_MORE_DOCS : expectedResult.nextSetBit(doc + 1)) { mockScorer.scores = scores[doc]; leafCollector.collect(doc); } } assertBitSet(actualResult, expectedResult, searcher); assertTopDocs(actualTopDocsCollector.topDocs(), expectedTopDocsCollector.topDocs()); } indexWriter.close(); indexReader.close(); directory.close(); }