public void testMultiValuedField() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( dateHistogram("histo") .field("dates") .dateHistogramInterval(DateHistogramInterval.MONTH) .minDocCount(0) .subAggregation(derivative("deriv", "_count"))) .execute() .actionGet(); assertSearchResponse(response); Histogram deriv = response.getAggregations().get("histo"); assertThat(deriv, notNullValue()); assertThat(deriv.getName(), equalTo("histo")); List<? extends Bucket> buckets = deriv.getBuckets(); assertThat(buckets.size(), equalTo(4)); DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC); Histogram.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((DateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); assertThat(bucket.getAggregations().asList().isEmpty(), is(true)); SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); assertThat(docCountDeriv, nullValue()); key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((DateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); docCountDeriv = bucket.getAggregations().get("deriv"); assertThat(docCountDeriv, notNullValue()); assertThat(docCountDeriv.value(), equalTo(2.0)); key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((DateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(5L)); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); docCountDeriv = bucket.getAggregations().get("deriv"); assertThat(docCountDeriv, notNullValue()); assertThat(docCountDeriv.value(), equalTo(2.0)); key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC); bucket = buckets.get(3); assertThat(bucket, notNullValue()); assertThat((DateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); docCountDeriv = bucket.getAggregations().get("deriv"); assertThat(docCountDeriv, notNullValue()); assertThat(docCountDeriv.value(), equalTo(-2.0)); }
@Test public void docCountDerivativeWithGaps() throws Exception { SearchResponse searchResponse = client() .prepareSearch("empty_bucket_idx") .setQuery(matchAllQuery()) .addAggregation( histogram("histo") .field(SINGLE_VALUED_FIELD_NAME) .interval(1) .subAggregation(derivative("deriv").setBucketsPaths("_count"))) .execute() .actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(numDocsEmptyIdx)); InternalHistogram<Bucket> deriv = searchResponse.getAggregations().get("histo"); assertThat(deriv, Matchers.notNullValue()); assertThat(deriv.getName(), equalTo("histo")); List<Bucket> buckets = deriv.getBuckets(); assertThat(buckets.size(), equalTo(valueCounts_empty.length)); for (int i = 0; i < valueCounts_empty.length; i++) { Histogram.Bucket bucket = buckets.get(i); checkBucketKeyAndDocCount("Bucket " + i, bucket, i, valueCounts_empty[i]); SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); if (firstDerivValueCounts_empty[i] == null) { assertThat(docCountDeriv, nullValue()); } else { assertThat(docCountDeriv.value(), equalTo(firstDerivValueCounts_empty[i])); } } }
@Test public void partiallyUnmapped() throws Exception { SearchResponse response = client() .prepareSearch("idx", "idx_unmapped") .addAggregation( histogram("histo") .field(SINGLE_VALUED_FIELD_NAME) .interval(interval) .subAggregation(derivative("deriv").setBucketsPaths("_count"))) .execute() .actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> deriv = response.getAggregations().get("histo"); assertThat(deriv, notNullValue()); assertThat(deriv.getName(), equalTo("histo")); List<? extends Bucket> buckets = deriv.getBuckets(); assertThat(deriv.getBuckets().size(), equalTo(numValueBuckets)); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); checkBucketKeyAndDocCount("Bucket " + i, bucket, i * interval, valueCounts[i]); SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); if (i > 0) { assertThat(docCountDeriv, notNullValue()); assertThat(docCountDeriv.value(), equalTo((double) firstDerivValueCounts[i - 1])); } else { assertThat(docCountDeriv, nullValue()); } } }
/** * @see * org.newdawn.slick.particles.ParticleEmitter#updateParticle(org.newdawn.slick.particles.Particle, * int) */ @SuppressWarnings("null") @Override public void updateParticle(Particle particle, int delta) { particleCount++; // adjust the particles if required particle.x += adjustx; particle.y += adjusty; particle.adjustVelocity( windFactor.getValue(0) * 0.00005f * delta, gravityFactor.getValue(0) * 0.00005f * delta); float offset = particle.getLife() / particle.getOriginalLife(); float inv = 1 - offset; float colOffset = 0; float colInv = 1; Color startColor = null; Color endColor = null; for (int i = 0; i < colors.size() - 1; i++) { ColorRecord rec1 = colors.get(i); ColorRecord rec2 = colors.get(i + 1); if ((inv >= rec1.pos) && (inv <= rec2.pos)) { startColor = rec1.col; endColor = rec2.col; float step = rec2.pos - rec1.pos; colOffset = inv - rec1.pos; colOffset /= step; colOffset = 1 - colOffset; colInv = 1 - colOffset; } } if (startColor != null) { float r = (startColor.r * colOffset) + (endColor.r * colInv); float g = (startColor.g * colOffset) + (endColor.g * colInv); float b = (startColor.b * colOffset) + (endColor.b * colInv); float a; if (alpha.isActive()) a = alpha.getValue(inv) / 255.0f; else a = ((startAlpha.getValue(0) / 255.0f) * offset) + ((endAlpha.getValue(0) / 255.0f) * inv); particle.setColor(r, g, b, a); } if (size.isActive()) { float s = size.getValue(inv); particle.setSize(s); } else particle.adjustSize(delta * growthFactor.getValue(0) * 0.001f); if (velocity.isActive()) particle.setSpeed(velocity.getValue(inv)); if (scaleY.isActive()) particle.setScaleY(scaleY.getValue(inv)); }
@Test public void testReplaceValue() { ConcurrentMap<SimpleKey, SimpleValue> map = redisson.getMap("simple"); map.put(new SimpleKey("1"), new SimpleValue("2")); SimpleValue res = map.replace(new SimpleKey("1"), new SimpleValue("3")); Assert.assertEquals("2", res.getValue()); SimpleValue val1 = map.get(new SimpleKey("1")); Assert.assertEquals("3", val1.getValue()); }
@Test public void testReplaceOldValueFail() { ConcurrentMap<SimpleKey, SimpleValue> map = redisson.getMap("simple"); map.put(new SimpleKey("1"), new SimpleValue("2")); boolean res = map.replace(new SimpleKey("1"), new SimpleValue("43"), new SimpleValue("31")); Assert.assertFalse(res); SimpleValue val1 = map.get(new SimpleKey("1")); Assert.assertEquals("2", val1.getValue()); }
@Test public void multiValueAggDerivative() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( histogram("histo") .field(SINGLE_VALUED_FIELD_NAME) .interval(interval) .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) .subAggregation(derivative("deriv").setBucketsPaths("stats.sum"))) .execute() .actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> deriv = response.getAggregations().get("histo"); assertThat(deriv, notNullValue()); assertThat(deriv.getName(), equalTo("histo")); assertThat(deriv.getBuckets().size(), equalTo(numValueBuckets)); Object[] propertiesKeys = (Object[]) deriv.getProperty("_key"); Object[] propertiesDocCounts = (Object[]) deriv.getProperty("_count"); Object[] propertiesSumCounts = (Object[]) deriv.getProperty("stats.sum"); List<Bucket> buckets = new ArrayList<Bucket>(deriv.getBuckets()); Long expectedSumPreviousBucket = Long.MIN_VALUE; // start value, gets // overwritten for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); checkBucketKeyAndDocCount("Bucket " + i, bucket, i * interval, valueCounts[i]); Stats stats = bucket.getAggregations().get("stats"); assertThat(stats, notNullValue()); long expectedSum = valueCounts[i] * (i * interval); assertThat(stats.getSum(), equalTo((double) expectedSum)); SimpleValue sumDeriv = bucket.getAggregations().get("deriv"); if (i > 0) { assertThat(sumDeriv, notNullValue()); long sumDerivValue = expectedSum - expectedSumPreviousBucket; assertThat(sumDeriv.value(), equalTo((double) sumDerivValue)); assertThat( (double) bucket.getProperty( "histo", AggregationPath.parse("deriv.value").getPathElementsAsStringList()), equalTo((double) sumDerivValue)); } else { assertThat(sumDeriv, nullValue()); } expectedSumPreviousBucket = expectedSum; assertThat((long) propertiesKeys[i], equalTo((long) i * interval)); assertThat((long) propertiesDocCounts[i], equalTo(valueCounts[i])); assertThat((double) propertiesSumCounts[i], equalTo((double) expectedSum)); } }
@Test public void testPutGet() { Map<SimpleKey, SimpleValue> map = redisson.getMap("simple"); map.put(new SimpleKey("1"), new SimpleValue("2")); map.put(new SimpleKey("33"), new SimpleValue("44")); map.put(new SimpleKey("5"), new SimpleValue("6")); SimpleValue val1 = map.get(new SimpleKey("33")); Assert.assertEquals("44", val1.getValue()); SimpleValue val2 = map.get(new SimpleKey("5")); Assert.assertEquals("6", val2.getValue()); }
@Test public void singleValueAggDerivativeWithGaps_random() throws Exception { GapPolicy gapPolicy = randomFrom(GapPolicy.values()); SearchResponse searchResponse = client() .prepareSearch("empty_bucket_idx_rnd") .setQuery(matchAllQuery()) .addAggregation( histogram("histo") .field(SINGLE_VALUED_FIELD_NAME) .interval(1) .extendedBounds(0l, (long) numBuckets_empty_rnd - 1) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) .subAggregation( derivative("deriv").setBucketsPaths("sum").gapPolicy(gapPolicy))) .execute() .actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(numDocsEmptyIdx_rnd)); InternalHistogram<Bucket> deriv = searchResponse.getAggregations().get("histo"); assertThat(deriv, Matchers.notNullValue()); assertThat(deriv.getName(), equalTo("histo")); List<Bucket> buckets = deriv.getBuckets(); assertThat(buckets.size(), equalTo(numBuckets_empty_rnd)); double lastSumValue = Double.NaN; for (int i = 0; i < valueCounts_empty_rnd.length; i++) { Histogram.Bucket bucket = buckets.get(i); checkBucketKeyAndDocCount("Bucket " + i, bucket, i, valueCounts_empty_rnd[i]); Sum sum = bucket.getAggregations().get("sum"); double thisSumValue = sum.value(); if (bucket.getDocCount() == 0) { thisSumValue = gapPolicy == GapPolicy.INSERT_ZEROS ? 0 : Double.NaN; } SimpleValue sumDeriv = bucket.getAggregations().get("deriv"); if (i == 0) { assertThat(sumDeriv, nullValue()); } else { double expectedDerivative = thisSumValue - lastSumValue; if (Double.isNaN(expectedDerivative)) { assertThat(sumDeriv.value(), equalTo(expectedDerivative)); } else { assertThat(sumDeriv.value(), closeTo(expectedDerivative, 0.00001)); } } lastSumValue = thisSumValue; } }
public void testSingleValuedFieldWithSubAggregation() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( dateHistogram("histo") .field("date") .dateHistogramInterval(DateHistogramInterval.MONTH) .minDocCount(0) .subAggregation(sum("sum").field("value")) .subAggregation(derivative("deriv", "sum"))) .execute() .actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(3)); Object[] propertiesKeys = (Object[]) histo.getProperty("_key"); Object[] propertiesDocCounts = (Object[]) histo.getProperty("_count"); Object[] propertiesCounts = (Object[]) histo.getProperty("sum.value"); DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC); Histogram.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((DateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); assertThat(sum.getValue(), equalTo(1.0)); SimpleValue deriv = bucket.getAggregations().get("deriv"); assertThat(deriv, nullValue()); assertThat((DateTime) propertiesKeys[0], equalTo(key)); assertThat((long) propertiesDocCounts[0], equalTo(1L)); assertThat((double) propertiesCounts[0], equalTo(1.0)); key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((DateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(2L)); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); assertThat(sum.getValue(), equalTo(5.0)); deriv = bucket.getAggregations().get("deriv"); assertThat(deriv, notNullValue()); assertThat(deriv.value(), equalTo(4.0)); assertThat( (double) bucket.getProperty( "histo", AggregationPath.parse("deriv.value").getPathElementsAsStringList()), equalTo(4.0)); assertThat((DateTime) propertiesKeys[1], equalTo(key)); assertThat((long) propertiesDocCounts[1], equalTo(2L)); assertThat((double) propertiesCounts[1], equalTo(5.0)); key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((DateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); assertThat(sum.getValue(), equalTo(15.0)); deriv = bucket.getAggregations().get("deriv"); assertThat(deriv, notNullValue()); assertThat(deriv.value(), equalTo(10.0)); assertThat( (double) bucket.getProperty( "histo", AggregationPath.parse("deriv.value").getPathElementsAsStringList()), equalTo(10.0)); assertThat((DateTime) propertiesKeys[2], equalTo(key)); assertThat((long) propertiesDocCounts[2], equalTo(3L)); assertThat((double) propertiesCounts[2], equalTo(15.0)); }
/** * @see * org.newdawn.slick.particles.ParticleEmitter#update(org.newdawn.slick.particles.ParticleSystem, * int) */ @Override public void update(ParticleSystem system, int delta) { this.engine = system; if (!adjust) { adjustx = 0; adjusty = 0; } else adjust = false; if (updateImage) { updateImage = false; try { image = new Image(relativePath + imageName); } catch (SlickException e) { image = null; Log.error(e); } } if (((wrapUp) || ((length.isEnabled()) && (timeout < 0)) || ((emitCount.isEnabled() && (leftToEmit <= 0)))) && (particleCount == 0)) { completed = true; } particleCount = 0; if (wrapUp) return; if (length.isEnabled()) { if (timeout < 0) return; timeout -= delta; } if (emitCount.isEnabled() && leftToEmit <= 0) return; nextSpawn -= delta; if (nextSpawn < 0) { nextSpawn = (int) spawnInterval.random(); int count = (int) spawnCount.random(); for (int i = 0; i < count; i++) { Particle p = system.getNewParticle(this, initialLife.random()); p.setSize(initialSize.random()); p.setPosition(x + xOffset.random(), y + yOffset.random()); p.setVelocity(0, 0, 0); float dist = initialDistance.random(); float power = speed.random(); if ((dist != 0) || (power != 0)) { float s = spread.getValue(0); float ang = (s + angularOffset.getValue(0) - (spread.getValue() / 2)) - 90; float xa = (float) FastTrig.cos(Math.toRadians(ang)) * dist; float ya = (float) FastTrig.sin(Math.toRadians(ang)) * dist; p.adjustPosition(xa, ya); float xv = (float) FastTrig.cos(Math.toRadians(ang)); float yv = (float) FastTrig.sin(Math.toRadians(ang)); p.setVelocity(xv, yv, power * 0.001f); } if (image != null) p.setImage(image); ColorRecord start = colors.get(0); p.setColor(start.col.r, start.col.g, start.col.b, startAlpha.getValue(0) / 255.0f); p.setUsePoint(usePoints); p.setOriented(useOriented); if (emitCount.isEnabled()) { leftToEmit--; if (leftToEmit <= 0) break; } } } }
/** * Visit a single-valued (name,value) pair for this annotation * * @see org.objectweb.asm.AnnotationVisitor#visit(java.lang.String, java.lang.Object) */ @Override public void visit(String aname, Object avalue) { SimpleValue v = new SimpleValue(aname); v.setValue(avalue); _annotationValues.add(v); }