示例#1
0
 public void testMurmur3_128() {
   HashTestUtils.check2BitAvalanche(Hashing.murmur3_128(), 250, 0.20);
   HashTestUtils.checkAvalanche(Hashing.murmur3_128(), 250, 0.17);
   HashTestUtils.checkNo2BitCharacteristics(Hashing.murmur3_128());
   HashTestUtils.checkNoFunnels(Hashing.murmur3_128());
   HashTestUtils.assertInvariants(Hashing.murmur3_128());
 }
示例#2
0
  /**
   * Constructor. Initializes the RLBS Bloom Filter.
   *
   * @param k number of simple Bloom Filters composing this instance
   */
  public RLBSBloomFilter(int k, int m, double thresFPR) {
    // Initialize parameters and calculate derived ones
    this.thresFPR = thresFPR;
    this.m = m;
    this.k = k;
    this.bitSetSize = (int) (m / k);

    // Prepare the hash functions to map items to positions in the bit array
    this.arrHashFunctions = new HashFunction[k];
    this.arrBitSets = new BitSet[k];

    for (int i = 0; i < k; i++) {
      // Murmur3 hashing functions, having different seeds are independent from each other
      this.arrHashFunctions[i] = Hashing.murmur3_128(i);
      // Each bit array implements a memory of m/k bit positions
      this.arrBitSets[i] = new BitSet(this.bitSetSize);
    }

    // Compute suggessted k, according to Bera et al. (pg. 24)
    double computedK = (Math.log(this.thresFPR) / Math.log(1 - (1 / Math.E)));

    logger.info(
        "RLBSBF initialized. Memory size (m): {}, Hash-functions: {}, Suggested Hash-functions: {}",
        this.m,
        this.k,
        computedK);
  }
示例#3
0
 static {
   HASH_FUNCTIONS.put("md5", Hashing.md5());
   HASH_FUNCTIONS.put("sha1", Hashing.sha1());
   HASH_FUNCTIONS.put("sha256", Hashing.sha256());
   HASH_FUNCTIONS.put("sha512", Hashing.sha512());
   HASH_FUNCTIONS.put("murmur3_32", Hashing.murmur3_32());
   HASH_FUNCTIONS.put("murmur3_128", Hashing.murmur3_128());
 }
示例#4
0
 /*
  * (non-Javadoc)
  *
  * @see org.opendedup.sdfs.io.CacheBufferInterface2#hashCode()
  */
 @Override
 public int hashCode() {
   this.lock.lock();
   try {
     HashFunction hf = Hashing.murmur3_128(6442);
     return hf.hashBytes(buf.array()).asInt();
   } finally {
     this.lock.unlock();
   }
 }
示例#5
0
 @Override
 public <T> void put(T object, Funnel<? super T> funnel, int numHashFunctions, BitArray bits) {
   // TODO(user): when the murmur's shortcuts are implemented, update this code
   long hash64 = Hashing.murmur3_128().newHasher().putObject(object, funnel).hash().asLong();
   int hash1 = (int) hash64;
   int hash2 = (int) (hash64 >>> 32);
   for (int i = 1; i <= numHashFunctions; i++) {
     int nextHash = hash1 + i * hash2;
     if (nextHash < 0) {
       nextHash = ~nextHash;
     }
     // up to here, the code is identical with the next method
     bits.set(nextHash % bits.size());
   }
 }
示例#6
0
  public static void main(String[] args) {
    Stopwatch timer = Stopwatch.createStarted();
    OptionsParser optionsParser = OptionsParser.newOptionsParser(Options.class);
    optionsParser.parseAndExitUponError(args);
    Options options = optionsParser.getOptions(Options.class);

    checkFlags(options);

    FileSystem fileSystem = FileSystems.getDefault();
    Path working = fileSystem.getPath("").toAbsolutePath();

    AndroidResourceProcessor resourceProcessor =
        new AndroidResourceProcessor(new StdLogger(com.android.utils.StdLogger.Level.VERBOSE));

    try {
      Path resourcesOut = Files.createTempDirectory("tmp-resources");
      resourcesOut.toFile().deleteOnExit();
      Path assetsOut = Files.createTempDirectory("tmp-assets");
      assetsOut.toFile().deleteOnExit();
      logger.fine(String.format("Setup finished at %dms", timer.elapsed(TimeUnit.MILLISECONDS)));

      ImmutableList<DirectoryModifier> modifiers =
          ImmutableList.of(
              new PackedResourceTarExpander(working.resolve("expanded"), working),
              new FileDeDuplicator(
                  Hashing.murmur3_128(), working.resolve("deduplicated"), working));
      MergedAndroidData mergedData =
          resourceProcessor.mergeData(
              options.mainData,
              options.dependencyData,
              resourcesOut,
              assetsOut,
              modifiers,
              null,
              options.strictMerge);
      logger.info(String.format("Merging finished at %dms", timer.elapsed(TimeUnit.MILLISECONDS)));

      writeAar(options.aarOutput, mergedData, options.manifest, options.rtxt, options.classes);
      logger.info(
          String.format("Packaging finished at %dms", timer.elapsed(TimeUnit.MILLISECONDS)));

    } catch (IOException | MergingException e) {
      logger.log(Level.SEVERE, "Error during merging resources", e);
      System.exit(1);
    }
    System.exit(0);
  }
示例#7
0
 @Override
 public <T> boolean mightContain(
     T object, Funnel<? super T> funnel, int numHashFunctions, BitArray bits) {
   long hash64 = Hashing.murmur3_128().newHasher().putObject(object, funnel).hash().asLong();
   int hash1 = (int) hash64;
   int hash2 = (int) (hash64 >>> 32);
   for (int i = 1; i <= numHashFunctions; i++) {
     int nextHash = hash1 + i * hash2;
     if (nextHash < 0) {
       nextHash = ~nextHash;
     }
     // up to here, the code is identical with the previous method
     if (!bits.get(nextHash % bits.size())) {
       return false;
     }
   }
   return true;
 }
示例#8
0
/** Created by eickovic on 4/15/16. */
public class ParserTest {
  String foo =
      "{\"foo\":{\"bar\":\"bbq\"}, \"body\": \"\\r\\nenrique garcia mierbanorteclabe 072580006215798578monto 10400 pesos \\t\\t \\t   \\t\\t  \", \"inreplyto\": [], \"bccs_line\": [], \"originating_ips\": [\"[187.192.117.124]\"], \"tos\": [\"[email protected]\"], \"tos_line\": [\"<*****@*****.**>\"], \"ccs\": [], \"datetime\": \"2012-02-03T20:27:38\", \"attachments\": [], \"bccs\": [], \"senders\": [\"[email protected]\"], \"ccs_line\": [], \"references\": [], \"messageid\": [\"<*****@*****.**>\"], \"forensic-bcc\": [], \"subject\": \"dlls\", \"id\": \"9dd46c42-0123-11e6-bb05-08002705cb99\", \"categories\": [\"[email protected]\", \"14\", \"5fe6ab1e-6ee0-4fef-98b1-4625d386f702\"], \"senders_line\": [\"enrique garcia <*****@*****.**>\"]}";

  private static final HashFunction murmur3Hash = Hashing.murmur3_128();

  @Test
  public void hashTest() throws Exception {
    String s1 = "abcdef";
    String s2 = "12345";
    HashCode hash1 = murmur3Hash.newHasher().putBytes(s1.getBytes()).hash();
    System.out.println(hash1);
    HashCode hash2 = murmur3Hash.newHasher().putBytes(s2.getBytes()).hash();
    System.out.println(hash2);
    HashCode hash3 = murmur3Hash.newHasher().putBytes(s1.getBytes()).hash();
    System.out.println(hash3);
    System.out.println(hash1.equals(hash2));
    System.out.println(hash1.equals(hash3));
  }

  @Test
  public void parseTest() throws Exception {
    List l = Lists.newArrayList();

    Type type = new TypeToken<Map<String, Object>>() {}.getType();
    Gson gson = new GsonBuilder().create();
    l.add(gson.fromJson(foo, type));
    String r = "";
    ByteArrayOutputStream buf = new ByteArrayOutputStream();
    try (Writer writer = new OutputStreamWriter(buf, "UTF-8")) {
      //            Gson gson2 = new GsonBuilder().create();
      gson.toJson(l, writer);
      writer.flush();
      r = buf.toString("utf-8");
    }
    System.out.println(r);
  }
}
示例#9
0
/** For comparing the load differences between consistent hash and HRW */
public class Compare {
  private static final HashFunction hfunc = Hashing.murmur3_128();
  private static final Funnel<CharSequence> strFunnel =
      Funnels.stringFunnel(Charset.defaultCharset());

  public static void main(String[] args) {
    Map<String, AtomicInteger> distribution = Maps.newHashMap();

    System.out.println("======: ConsistentHash :========");
    ConsistentHash<String, String> c =
        new ConsistentHash(hfunc, strFunnel, strFunnel, getNodes(distribution));
    for (int i = 0; i < 10000; i++) {
      distribution.get(c.get("" + i)).incrementAndGet();
    }
    for (Entry<String, AtomicInteger> e : distribution.entrySet()) {
      System.out.println(e.getKey() + "," + e.getValue().get());
      e.getValue().set(0);
    }
    System.out.println("====== remove 2 ========");
    for (int i = 0; i < 2; i++) {
      c.remove("Node" + i);
      distribution.remove("Node" + i);
    }
    for (int i = 0; i < 10000; i++) {
      distribution.get(c.get("" + i)).incrementAndGet();
    }
    for (Entry<String, AtomicInteger> e : distribution.entrySet()) {
      System.out.println(e.getKey() + "," + e.getValue().get());
    }

    System.out.println("======: RendezvousHash :========");
    distribution = Maps.newHashMap();
    RendezvousHash<String, String> r =
        new RendezvousHash(hfunc, strFunnel, strFunnel, getNodes(distribution));

    for (int i = 0; i < 10000; i++) {
      distribution.get(r.get("" + i)).incrementAndGet();
    }
    for (Entry<String, AtomicInteger> e : distribution.entrySet()) {
      System.out.println(e.getKey() + "," + e.getValue().get());
      e.getValue().set(0);
    }
    System.out.println("====== remove 2 ========");
    for (int i = 0; i < 2; i++) {
      r.remove("Node" + i);
      distribution.remove("Node" + i);
    }
    for (int i = 0; i < 10000; i++) {
      distribution.get(r.get("" + i)).incrementAndGet();
    }
    for (Entry<String, AtomicInteger> e : distribution.entrySet()) {
      System.out.println(e.getKey() + "," + e.getValue().get());
    }
  }

  private static List<String> getNodes(Map<String, AtomicInteger> distribution) {
    List<String> nodes = Lists.newArrayList();
    for (int i = 0; i < 5; i++) {
      nodes.add("Node" + i);
      distribution.put("Node" + i, new AtomicInteger());
    }
    return nodes;
  }
}
示例#10
0
 @Benchmark
 public long guava(BenchmarkData data, ByteCounter counter) {
   counter.add(data.getSlice().length());
   return Hashing.murmur3_128().hashBytes(data.getBytes()).asLong();
 }
示例#11
0
public class MemcachedCache implements Cache {
  private static final Logger log = new Logger(MemcachedCache.class);

  static final HashAlgorithm MURMUR3_128 =
      new HashAlgorithm() {
        final HashFunction fn = Hashing.murmur3_128();

        @Override
        public long hash(String k) {
          return fn.hashString(k, Charsets.UTF_8).asLong();
        }

        @Override
        public String toString() {
          return fn.toString();
        }
      };

  public static MemcachedCache create(final MemcachedCacheConfig config) {
    final ConcurrentMap<String, AtomicLong> counters = new ConcurrentHashMap<>();
    final ConcurrentMap<String, AtomicLong> meters = new ConcurrentHashMap<>();
    final AbstractMonitor monitor =
        new AbstractMonitor() {
          final AtomicReference<Map<String, Long>> priorValues =
              new AtomicReference<Map<String, Long>>(new HashMap<String, Long>());

          @Override
          public boolean doMonitor(ServiceEmitter emitter) {
            final Map<String, Long> priorValues = this.priorValues.get();
            final Map<String, Long> currentValues = getCurrentValues();
            final ServiceMetricEvent.Builder builder = ServiceMetricEvent.builder();
            for (Map.Entry<String, Long> entry : currentValues.entrySet()) {
              emitter.emit(
                  builder
                      .setDimension("memcached metric", entry.getKey())
                      .build("query/cache/memcached/total", entry.getValue()));
              final Long prior = priorValues.get(entry.getKey());
              if (prior != null) {
                emitter.emit(
                    builder
                        .setDimension("memcached metric", entry.getKey())
                        .build("query/cache/memcached/delta", entry.getValue() - prior));
              }
            }

            if (!this.priorValues.compareAndSet(priorValues, currentValues)) {
              log.error("Prior value changed while I was reporting! updating anyways");
              this.priorValues.set(currentValues);
            }
            return true;
          }

          private Map<String, Long> getCurrentValues() {
            final ImmutableMap.Builder<String, Long> builder = ImmutableMap.builder();
            for (Map.Entry<String, AtomicLong> entry : counters.entrySet()) {
              builder.put(entry.getKey(), entry.getValue().get());
            }
            for (Map.Entry<String, AtomicLong> entry : meters.entrySet()) {
              builder.put(entry.getKey(), entry.getValue().get());
            }
            return builder.build();
          }
        };
    try {
      LZ4Transcoder transcoder = new LZ4Transcoder(config.getMaxObjectSize());

      // always use compression
      transcoder.setCompressionThreshold(0);

      OperationQueueFactory opQueueFactory;
      long maxQueueBytes = config.getMaxOperationQueueSize();
      if (maxQueueBytes > 0) {
        opQueueFactory = new MemcachedOperationQueueFactory(maxQueueBytes);
      } else {
        opQueueFactory = new LinkedOperationQueueFactory();
      }

      final Predicate<String> interesting =
          new Predicate<String>() {
            // See net.spy.memcached.MemcachedConnection.registerMetrics()
            private final Set<String> interestingMetrics =
                ImmutableSet.of(
                    "[MEM] Reconnecting Nodes (ReconnectQueue)",
                    // "[MEM] Shutting Down Nodes (NodesToShutdown)", // Busted
                    "[MEM] Request Rate: All",
                    "[MEM] Average Bytes written to OS per write",
                    "[MEM] Average Bytes read from OS per read",
                    "[MEM] Average Time on wire for operations (µs)",
                    "[MEM] Response Rate: All (Failure + Success + Retry)",
                    "[MEM] Response Rate: Retry",
                    "[MEM] Response Rate: Failure",
                    "[MEM] Response Rate: Success");

            @Override
            public boolean apply(@Nullable String input) {
              return input != null && interestingMetrics.contains(input);
            }
          };

      final MetricCollector metricCollector =
          new MetricCollector() {
            @Override
            public void addCounter(String name) {
              if (!interesting.apply(name)) {
                return;
              }
              counters.putIfAbsent(name, new AtomicLong(0L));

              if (log.isDebugEnabled()) {
                log.debug("Add Counter [%s]", name);
              }
            }

            @Override
            public void removeCounter(String name) {
              if (log.isDebugEnabled()) {
                log.debug("Ignoring request to remove [%s]", name);
              }
            }

            @Override
            public void incrementCounter(String name) {
              if (!interesting.apply(name)) {
                return;
              }
              AtomicLong counter = counters.get(name);
              if (counter == null) {
                counters.putIfAbsent(name, new AtomicLong(0));
                counter = counters.get(name);
              }
              counter.incrementAndGet();

              if (log.isDebugEnabled()) {
                log.debug("Increment [%s]", name);
              }
            }

            @Override
            public void incrementCounter(String name, int amount) {
              if (!interesting.apply(name)) {
                return;
              }
              AtomicLong counter = counters.get(name);
              if (counter == null) {
                counters.putIfAbsent(name, new AtomicLong(0));
                counter = counters.get(name);
              }
              counter.addAndGet(amount);

              if (log.isDebugEnabled()) {
                log.debug("Increment [%s] %d", name, amount);
              }
            }

            @Override
            public void decrementCounter(String name) {
              if (!interesting.apply(name)) {
                return;
              }
              AtomicLong counter = counters.get(name);
              if (counter == null) {
                counters.putIfAbsent(name, new AtomicLong(0));
                counter = counters.get(name);
              }
              counter.decrementAndGet();

              if (log.isDebugEnabled()) {
                log.debug("Decrement [%s]", name);
              }
            }

            @Override
            public void decrementCounter(String name, int amount) {
              if (!interesting.apply(name)) {
                return;
              }
              AtomicLong counter = counters.get(name);
              if (counter == null) {
                counters.putIfAbsent(name, new AtomicLong(0L));
                counter = counters.get(name);
              }
              counter.addAndGet(-amount);

              if (log.isDebugEnabled()) {
                log.debug("Decrement [%s] %d", name, amount);
              }
            }

            @Override
            public void addMeter(String name) {
              if (!interesting.apply(name)) {
                return;
              }
              meters.putIfAbsent(name, new AtomicLong(0L));
              if (log.isDebugEnabled()) {
                log.debug("Adding meter [%s]", name);
              }
            }

            @Override
            public void removeMeter(String name) {
              if (!interesting.apply(name)) {
                return;
              }
              if (log.isDebugEnabled()) {
                log.debug("Ignoring request to remove meter [%s]", name);
              }
            }

            @Override
            public void markMeter(String name) {
              if (!interesting.apply(name)) {
                return;
              }
              AtomicLong meter = meters.get(name);
              if (meter == null) {
                meters.putIfAbsent(name, new AtomicLong(0L));
                meter = meters.get(name);
              }
              meter.incrementAndGet();

              if (log.isDebugEnabled()) {
                log.debug("Increment counter [%s]", name);
              }
            }

            @Override
            public void addHistogram(String name) {
              log.debug("Ignoring add histogram [%s]", name);
            }

            @Override
            public void removeHistogram(String name) {
              log.debug("Ignoring remove histogram [%s]", name);
            }

            @Override
            public void updateHistogram(String name, int amount) {
              log.debug("Ignoring update histogram [%s]: %d", name, amount);
            }
          };

      final ConnectionFactory connectionFactory =
          new MemcachedCustomConnectionFactoryBuilder()
              // 1000 repetitions gives us good distribution with murmur3_128
              // (approx < 5% difference in counts across nodes, with 5 cache nodes)
              .setKetamaNodeRepetitions(1000)
              .setHashAlg(MURMUR3_128)
              .setProtocol(ConnectionFactoryBuilder.Protocol.BINARY)
              .setLocatorType(ConnectionFactoryBuilder.Locator.CONSISTENT)
              .setDaemon(true)
              .setFailureMode(FailureMode.Cancel)
              .setTranscoder(transcoder)
              .setShouldOptimize(true)
              .setOpQueueMaxBlockTime(config.getTimeout())
              .setOpTimeout(config.getTimeout())
              .setReadBufferSize(config.getReadBufferSize())
              .setOpQueueFactory(opQueueFactory)
              .setMetricCollector(metricCollector)
              .setEnableMetrics(MetricType.DEBUG) // Not as scary as it sounds
              .build();

      final List<InetSocketAddress> hosts = AddrUtil.getAddresses(config.getHosts());

      final Supplier<ResourceHolder<MemcachedClientIF>> clientSupplier;

      if (config.getNumConnections() > 1) {
        clientSupplier =
            new LoadBalancingPool<MemcachedClientIF>(
                config.getNumConnections(),
                new Supplier<MemcachedClientIF>() {
                  @Override
                  public MemcachedClientIF get() {
                    try {
                      return new MemcachedClient(connectionFactory, hosts);
                    } catch (IOException e) {
                      log.error(e, "Unable to create memcached client");
                      throw Throwables.propagate(e);
                    }
                  }
                });
      } else {
        clientSupplier =
            Suppliers.<ResourceHolder<MemcachedClientIF>>ofInstance(
                StupidResourceHolder.<MemcachedClientIF>create(
                    new MemcachedClient(connectionFactory, hosts)));
      }

      return new MemcachedCache(clientSupplier, config, monitor);
    } catch (IOException e) {
      throw Throwables.propagate(e);
    }
  }

  private final int timeout;
  private final int expiration;
  private final String memcachedPrefix;

  private final Supplier<ResourceHolder<MemcachedClientIF>> client;

  private final AtomicLong hitCount = new AtomicLong(0);
  private final AtomicLong missCount = new AtomicLong(0);
  private final AtomicLong timeoutCount = new AtomicLong(0);
  private final AtomicLong errorCount = new AtomicLong(0);
  private final AbstractMonitor monitor;

  MemcachedCache(
      Supplier<ResourceHolder<MemcachedClientIF>> client,
      MemcachedCacheConfig config,
      AbstractMonitor monitor) {
    Preconditions.checkArgument(
        config.getMemcachedPrefix().length() <= MAX_PREFIX_LENGTH,
        "memcachedPrefix length [%d] exceeds maximum length [%d]",
        config.getMemcachedPrefix().length(),
        MAX_PREFIX_LENGTH);
    this.monitor = monitor;
    this.timeout = config.getTimeout();
    this.expiration = config.getExpiration();
    this.client = client;
    this.memcachedPrefix = config.getMemcachedPrefix();
  }

  @Override
  public CacheStats getStats() {
    return new CacheStats(
        hitCount.get(), missCount.get(), 0, 0, 0, timeoutCount.get(), errorCount.get());
  }

  @Override
  public byte[] get(NamedKey key) {
    try (ResourceHolder<MemcachedClientIF> clientHolder = client.get()) {
      Future<Object> future;
      try {
        future = clientHolder.get().asyncGet(computeKeyHash(memcachedPrefix, key));
      } catch (IllegalStateException e) {
        // operation did not get queued in time (queue is full)
        errorCount.incrementAndGet();
        log.warn(e, "Unable to queue cache operation");
        return null;
      }
      try {
        byte[] bytes = (byte[]) future.get(timeout, TimeUnit.MILLISECONDS);
        if (bytes != null) {
          hitCount.incrementAndGet();
        } else {
          missCount.incrementAndGet();
        }
        return bytes == null ? null : deserializeValue(key, bytes);
      } catch (TimeoutException e) {
        timeoutCount.incrementAndGet();
        future.cancel(false);
        return null;
      } catch (InterruptedException e) {
        Thread.currentThread().interrupt();
        throw Throwables.propagate(e);
      } catch (ExecutionException e) {
        errorCount.incrementAndGet();
        log.warn(e, "Exception pulling item from cache");
        return null;
      }
    }
  }

  @Override
  public void put(NamedKey key, byte[] value) {
    try (final ResourceHolder<MemcachedClientIF> clientHolder = client.get()) {
      clientHolder
          .get()
          .set(computeKeyHash(memcachedPrefix, key), expiration, serializeValue(key, value));
    } catch (IllegalStateException e) {
      // operation did not get queued in time (queue is full)
      errorCount.incrementAndGet();
      log.warn(e, "Unable to queue cache operation");
    }
  }

  private static byte[] serializeValue(NamedKey key, byte[] value) {
    byte[] keyBytes = key.toByteArray();
    return ByteBuffer.allocate(Ints.BYTES + keyBytes.length + value.length)
        .putInt(keyBytes.length)
        .put(keyBytes)
        .put(value)
        .array();
  }

  private static byte[] deserializeValue(NamedKey key, byte[] bytes) {
    ByteBuffer buf = ByteBuffer.wrap(bytes);

    final int keyLength = buf.getInt();
    byte[] keyBytes = new byte[keyLength];
    buf.get(keyBytes);
    byte[] value = new byte[buf.remaining()];
    buf.get(value);

    Preconditions.checkState(
        Arrays.equals(keyBytes, key.toByteArray()), "Keys do not match, possible hash collision?");
    return value;
  }

  @Override
  public Map<NamedKey, byte[]> getBulk(Iterable<NamedKey> keys) {
    try (ResourceHolder<MemcachedClientIF> clientHolder = client.get()) {
      Map<String, NamedKey> keyLookup =
          Maps.uniqueIndex(
              keys,
              new Function<NamedKey, String>() {
                @Override
                public String apply(@Nullable NamedKey input) {
                  return computeKeyHash(memcachedPrefix, input);
                }
              });

      Map<NamedKey, byte[]> results = Maps.newHashMap();

      BulkFuture<Map<String, Object>> future;
      try {
        future = clientHolder.get().asyncGetBulk(keyLookup.keySet());
      } catch (IllegalStateException e) {
        // operation did not get queued in time (queue is full)
        errorCount.incrementAndGet();
        log.warn(e, "Unable to queue cache operation");
        return results;
      }

      try {
        Map<String, Object> some = future.getSome(timeout, TimeUnit.MILLISECONDS);

        if (future.isTimeout()) {
          future.cancel(false);
          timeoutCount.incrementAndGet();
        }
        missCount.addAndGet(keyLookup.size() - some.size());
        hitCount.addAndGet(some.size());

        for (Map.Entry<String, Object> entry : some.entrySet()) {
          final NamedKey key = keyLookup.get(entry.getKey());
          final byte[] value = (byte[]) entry.getValue();
          if (value != null) {
            results.put(key, deserializeValue(key, value));
          }
        }

        return results;
      } catch (InterruptedException e) {
        Thread.currentThread().interrupt();
        throw Throwables.propagate(e);
      } catch (ExecutionException e) {
        errorCount.incrementAndGet();
        log.warn(e, "Exception pulling item from cache");
        return results;
      }
    }
  }

  @Override
  public void close(String namespace) {
    // no resources to cleanup
  }

  public static final int MAX_PREFIX_LENGTH =
      MemcachedClientIF.MAX_KEY_LENGTH
          - 40 // length of namespace hash
          - 40 // length of key hash
          - 2 // length of separators
      ;

  private static String computeKeyHash(String memcachedPrefix, NamedKey key) {
    // hash keys to keep things under 250 characters for memcached
    return memcachedPrefix
        + ":"
        + DigestUtils.sha1Hex(key.namespace)
        + ":"
        + DigestUtils.sha1Hex(key.key);
  }

  public boolean isLocal() {
    return false;
  }

  @Override
  public void doMonitor(ServiceEmitter emitter) {
    monitor.doMonitor(emitter);
  }
}
  @Setup
  public void setup() throws IOException {
    log.info("SETUP CALLED AT " + System.currentTimeMillis());

    if (ComplexMetrics.getSerdeForType("hyperUnique") == null) {
      ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde(Hashing.murmur3_128()));
    }

    executorService = Execs.multiThreaded(numSegments, "TimeseriesThreadPool");

    setupQueries();

    String[] schemaQuery = schemaAndQuery.split("\\.");
    String schemaName = schemaQuery[0];
    String queryName = schemaQuery[1];

    schemaInfo = BenchmarkSchemas.SCHEMA_MAP.get(schemaName);
    query = SCHEMA_QUERY_MAP.get(schemaName).get(queryName);

    incIndexes = new ArrayList<>();
    for (int i = 0; i < numSegments; i++) {
      log.info("Generating rows for segment " + i);
      BenchmarkDataGenerator gen =
          new BenchmarkDataGenerator(
              schemaInfo.getColumnSchemas(),
              RNG_SEED + i,
              schemaInfo.getDataInterval(),
              rowsPerSegment);

      IncrementalIndex incIndex = makeIncIndex();

      for (int j = 0; j < rowsPerSegment; j++) {
        InputRow row = gen.nextRow();
        if (j % 10000 == 0) {
          log.info(j + " rows generated.");
        }
        incIndex.add(row);
      }
      log.info(rowsPerSegment + " rows generated");
      incIndexes.add(incIndex);
    }

    File tmpFile = Files.createTempDir();
    log.info("Using temp dir: " + tmpFile.getAbsolutePath());
    tmpFile.deleteOnExit();

    qIndexes = new ArrayList<>();
    for (int i = 0; i < numSegments; i++) {
      File indexFile = INDEX_MERGER_V9.persist(incIndexes.get(i), tmpFile, new IndexSpec());

      QueryableIndex qIndex = INDEX_IO.loadIndex(indexFile);
      qIndexes.add(qIndex);
    }

    factory =
        new TimeseriesQueryRunnerFactory(
            new TimeseriesQueryQueryToolChest(
                QueryBenchmarkUtil.NoopIntervalChunkingQueryRunnerDecorator()),
            new TimeseriesQueryEngine(),
            QueryBenchmarkUtil.NOOP_QUERYWATCHER);
  }
 @Test
 public void testMurmurWithSeed() {
   testMurmur(LongHashFunction.murmur_3(42L), Hashing.murmur3_128(42));
 }
 @Test
 public void testMurmurWithoutSeed() {
   testMurmur(LongHashFunction.murmur_3(), Hashing.murmur3_128());
 }
示例#15
0
 private static class Murmur3_128Holder {
   static final HashFunction MURMUR3_128 = new Murmur3_128HashFunction(0);
   static final HashFunction GOOD_FAST_HASH_FUNCTION_128 =
       Hashing.murmur3_128(Hashing.GOOD_FAST_HASH_SEED);
 }
  public static void main(String[] args) throws Exception {
    final Stopwatch timer = Stopwatch.createStarted();
    OptionsParser optionsParser =
        OptionsParser.newOptionsParser(Options.class, AaptConfigOptions.class);
    optionsParser.parseAndExitUponError(args);
    aaptConfigOptions = optionsParser.getOptions(AaptConfigOptions.class);
    options = optionsParser.getOptions(Options.class);

    FileSystem fileSystem = FileSystems.getDefault();
    Path working = fileSystem.getPath("").toAbsolutePath();
    final AndroidResourceProcessor resourceProcessor = new AndroidResourceProcessor(STD_LOGGER);

    try {
      final Path tmp = Files.createTempDirectory("android_resources_tmp");
      // Clean up the tmp file on exit to keep diskspace low.
      tmp.toFile().deleteOnExit();

      final Path expandedOut = tmp.resolve("tmp-expanded");
      final Path deduplicatedOut = tmp.resolve("tmp-deduplicated");
      final Path mergedAssets = tmp.resolve("merged_assets");
      final Path mergedResources = tmp.resolve("merged_resources");
      final Path filteredResources = tmp.resolve("resources-filtered");
      final Path densityManifest = tmp.resolve("manifest-filtered/AndroidManifest.xml");
      final Path processedManifest = tmp.resolve("manifest-processed/AndroidManifest.xml");

      Path generatedSources = null;
      if (options.srcJarOutput != null
          || options.rOutput != null
          || options.symbolsTxtOut != null) {
        generatedSources = tmp.resolve("generated_resources");
      }

      LOGGER.fine(String.format("Setup finished at %sms", timer.elapsed(TimeUnit.MILLISECONDS)));

      final ImmutableList<DirectoryModifier> modifiers =
          ImmutableList.of(
              new PackedResourceTarExpander(expandedOut, working),
              new FileDeDuplicator(Hashing.murmur3_128(), deduplicatedOut, working));

      // Resources can appear in both the direct dependencies and transitive -- use a set to
      // ensure depeduplication.
      List<DependencyAndroidData> data =
          ImmutableSet.<DependencyAndroidData>builder()
              .addAll(options.directData)
              .addAll(options.transitiveData)
              .build()
              .asList();

      final MergedAndroidData mergedData =
          resourceProcessor.mergeData(
              options.primaryData,
              data,
              mergedResources,
              mergedAssets,
              modifiers,
              selectPngCruncher(),
              true);

      LOGGER.fine(String.format("Merging finished at %sms", timer.elapsed(TimeUnit.MILLISECONDS)));

      final DensityFilteredAndroidData filteredData =
          mergedData.filter(
              new DensitySpecificResourceFilter(
                  options.densities, filteredResources, mergedResources),
              new DensitySpecificManifestProcessor(options.densities, densityManifest));

      LOGGER.fine(
          String.format(
              "Density filtering finished at %sms", timer.elapsed(TimeUnit.MILLISECONDS)));

      final MergedAndroidData processedManifestData =
          resourceProcessor.processManifest(
              options.packageType,
              options.packageForR,
              options.applicationId,
              options.versionCode,
              options.versionName,
              filteredData,
              processedManifest);

      resourceProcessor.processResources(
          aaptConfigOptions.aapt,
          aaptConfigOptions.androidJar,
          aaptConfigOptions.buildToolsVersion,
          options.packageType,
          aaptConfigOptions.debug,
          options.packageForR,
          new FlagAaptOptions(aaptConfigOptions),
          aaptConfigOptions.resourceConfigs,
          aaptConfigOptions.splits,
          processedManifestData,
          data,
          generatedSources,
          options.packagePath,
          options.proguardOutput,
          options.mainDexProguardOutput,
          options.resourcesOutput != null
              ? processedManifestData.getResourceDir().resolve("values").resolve("public.xml")
              : null);
      LOGGER.fine(String.format("aapt finished at %sms", timer.elapsed(TimeUnit.MILLISECONDS)));

      if (options.manifestOutput != null) {
        resourceProcessor.copyManifestToOutput(processedManifestData, options.manifestOutput);
      }
      if (options.srcJarOutput != null) {
        resourceProcessor.createSrcJar(
            generatedSources,
            options.srcJarOutput,
            VariantConfiguration.Type.LIBRARY == options.packageType);
      }
      if (options.rOutput != null) {
        resourceProcessor.copyRToOutput(
            generatedSources,
            options.rOutput,
            VariantConfiguration.Type.LIBRARY == options.packageType);
      }
      if (options.symbolsTxtOut != null) {
        resourceProcessor.copyRToOutput(
            generatedSources,
            options.symbolsTxtOut,
            VariantConfiguration.Type.LIBRARY == options.packageType);
      }
      if (options.resourcesOutput != null) {
        resourceProcessor.createResourcesZip(
            processedManifestData.getResourceDir(),
            processedManifestData.getAssetDir(),
            options.resourcesOutput);
      }
      LOGGER.fine(
          String.format("Packaging finished at %sms", timer.elapsed(TimeUnit.MILLISECONDS)));
    } catch (MergingException e) {
      LOGGER.log(java.util.logging.Level.SEVERE, "Error during merging resources", e);
      throw e;
    } catch (IOException | InterruptedException | LoggedErrorException e) {
      LOGGER.log(java.util.logging.Level.SEVERE, "Error during processing resources", e);
      throw e;
    } catch (Exception e) {
      LOGGER.log(java.util.logging.Level.SEVERE, "Unexpected", e);
      throw e;
    } finally {
      resourceProcessor.shutdown();
    }
    LOGGER.fine(String.format("Resources processed in %sms", timer.elapsed(TimeUnit.MILLISECONDS)));
  }