Exemple #1
0
  public static void main(String[] args) throws Exception {
    System.out.println("Starting Cache Client...");
    int bucket;

    List<CacheServiceInterface> serverlist = new ArrayList<CacheServiceInterface>();
    serverlist.add(new DistributedCacheService("http://localhost:3000"));
    serverlist.add(new DistributedCacheService("http://localhost:3001"));
    serverlist.add(new DistributedCacheService("http://localhost:3002"));

    CharSequence val;
    for (int key = 1; key <= 10; key++) {
      val = generateRandomChar();
      bucket =
          Hashing.consistentHash(
              Hashing.md5().hashString(Integer.toString(key)), serverlist.size());
      serverlist.get(bucket).put(key, val.toString());

      System.out.println("put (key " + key + " and value " + val + ")" + " in bucket " + bucket);
    }
    for (int key = 1; key <= 10; key++) {
      bucket =
          Hashing.consistentHash(
              Hashing.md5().hashString(Integer.toString(key)), serverlist.size());
      System.out.println(
          "get (key "
              + key
              + " and value "
              + serverlist.get(bucket).get(key)
              + ")"
              + "from bucket "
              + bucket);
    }

    System.out.println("Exiting Cache Client...");
  }
  @Test
  public void mappedServersTest() {

    Map<Integer, String> servers = new HashMap<>();
    servers.put(0, "server0");
    servers.put(1, "server1");

    HashFunction md5 = Hashing.md5();
    List<PartitionEntry> triggers = generateTriggers(3, 1000);

    Map<Integer, String> newPartition;
    Map<Integer, String> oldPartition;

    print("initial - test 2 servers " + servers.toString());
    newPartition = new HashMap<>();
    for (PartitionEntry trigger : triggers) {
      newPartition.put(
          trigger.hashCode(),
          servers.get(Hashing.consistentHash(md5.hashInt(trigger.hashCode()), 2)));
    }

    for (int buckets = 3; buckets < 10; buckets++) {

      servers.put(buckets - 1, "server" + (buckets - 1));
      print("test " + buckets + " servers " + servers.toString());

      oldPartition = newPartition;
      newPartition = new HashMap<>();
      for (PartitionEntry trigger : triggers) {
        newPartition.put(
            trigger.hashCode(),
            servers.get(Hashing.consistentHash(md5.hashInt(trigger.hashCode()), buckets)));
      }

      int changes = comparePartitions(oldPartition, newPartition);
      print(
          "Changes from "
              + (buckets - 1)
              + "  to "
              + buckets
              + " servers: "
              + changes
              + " of "
              + oldPartition.size());
      print("" + (((float) changes / (float) oldPartition.size()) * 100) + " % moved");
      print(
          "K("
              + oldPartition.size()
              + ")/n("
              + buckets
              + "): "
              + ((float) oldPartition.size() / (float) buckets));
    }
  }
Exemple #3
0
  public static void main(String[] args) throws Exception {
    System.out.println("Starting Cache Client...");
    String[] value = {
      "0",
      "Ferrari 250 GTO",
      "Ferrari 250 Testa Rossa",
      "Jaguar XJ13",
      "Mercedes-Benz SLR McLaren",
      "Ferrari 330 P4",
      "Maybach Exelero",
      "Rolls-Royce Hyperion",
      "Lamborghini Veneno",
      "Zenvo ST1",
      "Audi Le Mans Concept",
      " McLaren X-1 Concept",
      "Koenigsegg CCXR Trevita"
    };
    List<DistributedCacheService> server = new ArrayList<DistributedCacheService>();
    server.add(new DistributedCacheService("http://localhost:3000"));
    server.add(new DistributedCacheService("http://localhost:3001"));
    server.add(new DistributedCacheService("http://localhost:3002"));

    System.out.println(" ----------------------Putting values to server------------------");
    for (int putkey = 1; putkey <= 12; putkey++) {
      int bucket =
          Hashing.consistentHash(Hashing.md5().hashString(Integer.toString(putkey)), server.size());
      server.get(bucket).put(putkey, value[putkey]);
      System.out.println(
          "The key value pair "
              + putkey
              + "-"
              + value[putkey]
              + " is assigned to server "
              + bucket);
    }
    System.out.println(" ----------------------Getting values from server------------------");
    for (int getkey = 1; getkey <= 12; getkey++) {
      int bucket =
          Hashing.consistentHash(Hashing.md5().hashString(Integer.toString(getkey)), server.size());
      System.out.println(
          "The key value pair "
              + getkey
              + "-"
              + server.get(bucket).get(getkey)
              + " is received to server "
              + bucket);
    }
    System.out.println(" ------------------------------Terminated---------------------------");
  }
 public void testConsistentHash_outOfRange() {
   try {
     Hashing.consistentHash(5L, 0);
     fail();
   } catch (IllegalArgumentException expected) {
   }
 }
Exemple #5
0
  /**
   * * Estimates whether the item provided as parameter is already part of the RLBS Bloom Filter,
   * that is, if the item is a duplicate of a previously seen item. If the item is found to be
   * duplicate, returns true, otherwise false. False Positives and False Negatives can occur with a
   * low probability
   *
   * @param strItem item to be checked, in order to estimate if it was previously detected
   * @return True if the item has been previously detected, false otherwise (with certain a false
   *     positive/negative rate)
   */
  public boolean checkDuplicate(String strItem) {
    // Hash the item through the k hashing functions
    int[] arrItemHashings = new int[this.k];
    boolean isDuplicate = true;

    // Hash the item through the k hash functions and determine whether is duplicate
    for (int i = 0; i < this.k; i++) {
      // Generate the hashing index of the item for bit array #i
      arrItemHashings[i] =
          Hashing.consistentHash(
              this.arrHashFunctions[i].hashUnencodedChars(strItem), this.bitSetSize);
      // Check if the corresponding bit is set to 0, which is enough to regard the item as distinct
      isDuplicate = isDuplicate && this.arrBitSets[i].get(arrItemHashings[i]);
    }
    logger.debug(
        "Item: {} encoded, is duplicate: {}. Positions: {}", strItem, isDuplicate, arrItemHashings);

    if (!isDuplicate) {
      Random randGen = new Random();
      BitSet bitArray = null;
      BigDecimal curProbReset = null;

      // The item was estimated to be distinct, randomly balance the load on each bit array
      for (int j = 0; j < this.k; j++) {
        // Determine how many bits are set in the array
        bitArray = this.arrBitSets[j];

        // Compute the probability of reseting a bit of the current bit array, as L(i)/s
        curProbReset =
            (new BigDecimal(bitArray.cardinality()))
                .divide(BigDecimal.valueOf(this.bitSetSize), RoundingMode.HALF_EVEN);

        // Reset a random bit of the bit array, with probability curProbReset
        if (randGen.nextDouble() <= curProbReset.doubleValue()) {
          // Select a random bit position to be reset...
          int randPosToReset = randGen.nextInt(this.bitSetSize);
          bitArray.clear(randPosToReset);
          logger.debug(
              "Load balancing BF{}. Resetting item at position {} will be reset (reset prob. {})",
              j,
              randPosToReset,
              curProbReset);
        } else {
          logger.debug(
              "Load balancing BF{}. Randomly chose not to reset any position (reset prob. {})",
              j,
              curProbReset);
        }

        // Set the bit the item was mapped to by the hashing function
        bitArray.set(arrItemHashings[j]);
      }
    }

    return isDuplicate;
  }
 private void countRemaps(long h, AtomicLongMap<Integer> map) {
   int last = 0;
   for (int shards = 2; shards <= MAX_SHARDS; shards++) {
     int chosen = Hashing.consistentHash(h, shards);
     if (chosen != last) {
       map.incrementAndGet(shards);
       last = chosen;
     }
   }
 }
 private void checkConsistentHashCorrectness(long hashCode) {
   int last = 0;
   for (int shards = 1; shards <= 100000; shards++) {
     int b = Hashing.consistentHash(hashCode, shards);
     if (b != last) {
       assertEquals(shards - 1, b);
       last = b;
     }
   }
 }
  @Override
  public Integer calculate(String columnValue) {
    try {
      long targetTime = new SimpleDateFormat(dateFormat).parse(columnValue).getTime();
      int targetPartition = (int) ((targetTime - beginDate) / partionTime);
      int innerIndex = Hashing.consistentHash(targetTime, intGroupPartionSize);
      return targetPartition * intGroupPartionSize + innerIndex;

    } catch (ParseException e) {
      throw new IllegalArgumentException(e);
    }
  }
 @Override
 public AsyncLeaderElector newAsyncLeaderElector(String name) {
   checkNotNull(name);
   Map<PartitionId, AsyncLeaderElector> leaderElectors =
       Maps.transformValues(members, partition -> partition.newAsyncLeaderElector(name));
   Hasher<String> hasher =
       topic -> {
         long hashCode = HashCode.fromBytes(topic.getBytes(Charsets.UTF_8)).asLong();
         return sortedMemberPartitionIds.get(Hashing.consistentHash(hashCode, members.size()));
       };
   return new PartitionedAsyncLeaderElector(name, leaderElectors, hasher);
 }
 @Override
 public <K, V> AsyncConsistentMap<K, V> newAsyncConsistentMap(String name, Serializer serializer) {
   checkNotNull(name);
   checkNotNull(serializer);
   Map<PartitionId, AsyncConsistentMap<K, V>> maps =
       Maps.transformValues(
           members, partition -> partition.newAsyncConsistentMap(name, serializer));
   Hasher<K> hasher =
       key -> {
         long hashCode =
             HashCode.fromBytes(Bytes.ensureCapacity(serializer.encode(key), 8, 0)).asLong();
         return sortedMemberPartitionIds.get(Hashing.consistentHash(hashCode, members.size()));
       };
   return new PartitionedAsyncConsistentMap<>(name, maps, hasher);
 }
  @Test
  public void consistentHashTest() {

    HashFunction md5 = Hashing.md5();
    List<PartitionEntry> triggers = generateTriggers(3, 1000);

    Map<Integer, Integer> newPartition;
    Map<Integer, Integer> oldPartition;

    print("initial - test 2 servers");
    newPartition = new HashMap<>();
    for (PartitionEntry trigger : triggers) {
      newPartition.put(
          trigger.hashCode(), Hashing.consistentHash(md5.hashInt(trigger.hashCode()), 2));
    }

    for (int buckets = 3; buckets < 10; buckets++) {

      print("test " + buckets + " servers");
      oldPartition = newPartition;
      newPartition = new HashMap<>();
      for (PartitionEntry trigger : triggers) {
        newPartition.put(
            trigger.hashCode(), Hashing.consistentHash(md5.hashInt(trigger.hashCode()), buckets));
      }

      int changes = comparePartitions(oldPartition, newPartition);
      print(
          "Changes from "
              + (buckets - 1)
              + "  to "
              + buckets
              + " servers: "
              + changes
              + " of "
              + oldPartition.size());
      print("" + (((float) changes / (float) oldPartition.size()) * 100) + " % moved");
      print(
          "K("
              + oldPartition.size()
              + ")/n("
              + buckets
              + "): "
              + ((float) oldPartition.size() / (float) buckets));
    }

    for (int buckets = 10; buckets > 3; buckets--) {

      print("test " + buckets + " servers");
      oldPartition = newPartition;
      newPartition = new HashMap<>();
      for (PartitionEntry trigger : triggers) {
        newPartition.put(
            trigger.hashCode(), Hashing.consistentHash(md5.hashInt(trigger.hashCode()), buckets));
      }

      int changes = comparePartitions(oldPartition, newPartition);
      print(
          "Changes from "
              + (buckets)
              + "  to "
              + (buckets - 1)
              + " servers: "
              + changes
              + " of "
              + oldPartition.size());
      print("" + (((float) changes / (float) oldPartition.size()) * 100) + " % moved");
      print(
          "K("
              + oldPartition.size()
              + ")/n("
              + buckets
              + "): "
              + ((float) oldPartition.size() / (float) buckets));
    }
  }
 /**
  * Returns the {@code DistributedPrimitiveCreator} to use for hosting a primitive.
  *
  * @param name primitive name
  * @return primitive creator
  */
 private DistributedPrimitiveCreator getCreator(String name) {
   int index = Hashing.consistentHash(name.hashCode(), members.size());
   return members.get(sortedMemberPartitionIds.get(index));
 }
 public void checkSameResult(HashCode hashCode, long equivLong) {
   assertEquals(Hashing.consistentHash(equivLong, 5555), Hashing.consistentHash(hashCode, 5555));
 }