private void createSubAssunto(int idAssunto) {
    SubAssunto subAssunto = new SubAssunto(idAssunto);
    ArrayAdapter<Assunto> adapter = getAssuntosAdapterByAssuntoPai(idAssunto);

    if (adapter.getCount() == 0) {
      Toast.makeText(
              this,
              R.string.acompanhamento_estudos_assunto_doesnt_contains_subassuntos,
              Toast.LENGTH_LONG)
          .show();
      return;
    }

    LinearLayout rootLinearLayout =
        (LinearLayout) findViewById(R.id.acompanhamento_estudos_linlyt_subassuntos);
    subAssunto.layoutContainer = new LinearLayout(this);
    subAssunto.layoutContainer.setOrientation(LinearLayout.HORIZONTAL);
    rootLinearLayout.addView(subAssunto.layoutContainer);

    subAssunto.spnSubAssuntos = new Spinner(this);
    subAssunto.spnSubAssuntos.setId(View.generateViewId());
    subAssunto.spnSubAssuntos.setAdapter(adapter);
    subAssunto.spnSubAssuntos.setOnItemSelectedListener(
        new AdapterView.OnItemSelectedListener() {
          @Override
          public void onItemSelected(AdapterView<?> adapterView, View view, int position, long id) {
            int startingIndexKey =
                calcularIndiceParaLimparSubAssuntos((Assunto) adapterView.getSelectedItem());
            clearSubAssuntos(startingIndexKey);
          }

          @Override
          public void onNothingSelected(AdapterView<?> adapterView) {}
        });
    subAssunto.spnSubAssuntos.setLayoutParams(
        new LinearLayout.LayoutParams(390, LinearLayout.LayoutParams.WRAP_CONTENT));
    subAssunto.layoutContainer.addView(subAssunto.spnSubAssuntos);

    subAssunto.btnExpandirSubAssuntos = new ImageButton(this);
    subAssunto.btnExpandirSubAssuntos.setImageResource(R.drawable.arrow_combo);
    subAssunto.btnExpandirSubAssuntos.setBackgroundColor(Color.TRANSPARENT);
    final int dicPosAssuntoToExpand = dicIdxAndIdSubAssuntos.size();
    subAssunto.btnExpandirSubAssuntos.setOnClickListener(
        new OnClickListener() {
          @Override
          public void onClick(View view) {
            SubAssunto correspondingSubAssunto =
                (SubAssunto) dicIdxAndIdSubAssuntos.get(dicPosAssuntoToExpand);
            int selectedId =
                ((Assunto) correspondingSubAssunto.spnSubAssuntos.getSelectedItem()).getId();
            createSubAssunto(selectedId);
          }
        });
    subAssunto.btnExpandirSubAssuntos.setLayoutParams(
        new LinearLayout.LayoutParams(
            LinearLayout.LayoutParams.WRAP_CONTENT, LinearLayout.LayoutParams.WRAP_CONTENT));
    subAssunto.layoutContainer.addView(subAssunto.btnExpandirSubAssuntos);

    dicIdxAndIdSubAssuntos.put(dicIdxAndIdSubAssuntos.size(), subAssunto);
  }
  @Test
  public void testFutureUpdateExpiration() throws Exception {
    CyclicBarrier loadBarrier = new CyclicBarrier(2);
    CountDownLatch flushLatch = new CountDownLatch(2);
    CountDownLatch commitLatch = new CountDownLatch(1);

    Future<Boolean> first = updateFlushWait(itemId, loadBarrier, null, flushLatch, commitLatch);
    Future<Boolean> second = updateFlushWait(itemId, loadBarrier, null, flushLatch, commitLatch);
    awaitOrThrow(flushLatch);

    Map contents = Caches.entrySet(entityCache).toMap();
    assertEquals(1, contents.size());
    assertEquals(FutureUpdate.class, contents.get(itemId).getClass());
    commitLatch.countDown();
    first.get(WAIT_TIMEOUT, TimeUnit.SECONDS);
    second.get(WAIT_TIMEOUT, TimeUnit.SECONDS);

    // since we had two concurrent updates, the result should be invalid
    contents = Caches.entrySet(entityCache).toMap();
    assertEquals(1, contents.size());
    Object value = contents.get(itemId);
    if (value instanceof FutureUpdate) {
      // DB did not blocked two concurrent updates
      TIME_SERVICE.advance(timeout + 1);
      assertNull(entityCache.get(itemId));
      contents = Caches.entrySet(entityCache).toMap();
      assertEquals(Collections.EMPTY_MAP, contents);
    } else {
      // DB left only one update to proceed, and the entry should not be expired
      assertNotNull(value);
      assertEquals(StandardCacheEntryImpl.class, value.getClass());
      TIME_SERVICE.advance(timeout + 1);
      assertEquals(value, entityCache.get(itemId));
    }
  }
Beispiel #3
0
  @Test(dependsOnMethods = "testGetMetrics")
  @SuppressWarnings("unchecked")
  public void testGetMetricsWithFilter() {
    MetricFilter filter =
        new MetricFilter() {
          @Override
          public boolean matches(String name, Metric metric) {
            return !name.equals(MetricContext.GOBBLIN_METRICS_NOTIFICATIONS_TIMER_NAME);
          }
        };

    Map<String, Counter> counters = this.context.getCounters(filter);
    Assert.assertEquals(counters.size(), 1);
    Assert.assertTrue(counters.containsKey(RECORDS_PROCESSED));

    Map<String, Meter> meters = this.context.getMeters(filter);
    Assert.assertEquals(meters.size(), 1);
    Assert.assertTrue(meters.containsKey(RECORD_PROCESS_RATE));

    Map<String, Histogram> histograms = this.context.getHistograms(filter);
    Assert.assertEquals(histograms.size(), 1);
    Assert.assertTrue(histograms.containsKey(RECORD_SIZE_DISTRIBUTION));

    Map<String, Timer> timers = this.context.getTimers(filter);
    Assert.assertEquals(timers.size(), 1);
    Assert.assertTrue(timers.containsKey(TOTAL_DURATION));

    Map<String, Gauge> gauges = this.context.getGauges(filter);
    Assert.assertEquals(gauges.size(), 1);
    Assert.assertTrue(gauges.containsKey(QUEUE_SIZE));
  }
    public void addWay(OSMWay way) {
      if (_ways.containsKey(way.getId())) return;

      _ways.put(way.getId(), way);

      if (_ways.size() % 10000 == 0) _log.debug("ways=" + _ways.size());
    }
Beispiel #5
0
 /**
  * Fetches data for the given primary keys.
  *
  * @param pksToDo a Map of the primary keys to fetch
  * @param results a Map to hold results that are to be added to the cache
  * @param cldToObjectsForCld a Map of Lists of objects relevant to PrimaryKeys
  * @param time1 the time that processing started
  * @throws ObjectStoreException if something goes wrong
  */
 protected void doPks(
     Map<PrimaryKey, ClassDescriptor> pksToDo,
     Map<InterMineObject, Set<InterMineObject>> results,
     Map<ClassDescriptor, List<InterMineObject>> cldToObjectsForCld,
     long time1)
     throws ObjectStoreException {
   Set<Integer> fetchedObjectIds = Collections.synchronizedSet(new HashSet<Integer>());
   Map<PrimaryKey, ClassDescriptor> pksNotDone =
       new IdentityHashMap<PrimaryKey, ClassDescriptor>(pksToDo);
   while (!pksToDo.isEmpty()) {
     int startPksToDoSize = pksToDo.size();
     Iterator<PrimaryKey> pkIter = pksToDo.keySet().iterator();
     while (pkIter.hasNext()) {
       PrimaryKey pk = pkIter.next();
       ClassDescriptor cld = pksToDo.get(pk);
       if (canDoPkNow(pk, cld, pksNotDone)) {
         // LOG.error("Running pk " + cld.getName() + "." + pk.getName());
         doPk(pk, cld, results, cldToObjectsForCld.get(cld), fetchedObjectIds);
         pkIter.remove();
         pksNotDone.remove(pk);
       } else {
         // LOG.error("Cannot do pk " + cld.getName() + "." + pk.getName() + " yet");
       }
     }
     if (pksToDo.size() == startPksToDoSize) {
       throw new RuntimeException("Error - cannot fetch any pks: " + pksToDo.keySet());
     }
   }
   long time2 = System.currentTimeMillis();
   timeSpentPrefetchEquiv += time2 - time1;
   dataTracker.prefetchIds(fetchedObjectIds);
   time1 = System.currentTimeMillis();
   timeSpentPrefetchTracker += time1 - time2;
 }
  @SuppressWarnings("ResultOfMethodCallIgnored")
  @Test
  public void testUnchangedPageEntries() throws IOException, XMLStreamException {
    // source dump
    File tmpSrcDump = File.createTempFile("wiki-src-dump", "xml");
    File tmpTargetDump = File.createTempFile("wiki-target-dump", "xml");

    // base structure - "<mediawiki><page><title></title><id><id></page></mediawiki>"
    BufferedWriter bw = new BufferedWriter(new FileWriter(tmpSrcDump));
    bw.write(
        "<mediawiki><page><title>Test1</title><id>1</id></page><page><title>Test2</title><id>2</id></page></mediawiki>");
    bw.close();

    bw = new BufferedWriter(new FileWriter(tmpTargetDump));
    bw.write(
        "<mediawiki><page><title>Test1</title><id>1</id></page><page><title>Test2</title><id>2</id></page></mediawiki>");
    bw.close();

    // default diff will also include all unchanged entries
    Map<String, String> hshResults = WikipediaRevisionMapper.map(tmpSrcDump, tmpTargetDump);
    assertEquals(2, hshResults.size());

    // setting the flag to false will include unchanged entries
    hshResults = WikipediaRevisionMapper.map(tmpSrcDump, tmpTargetDump, false);
    assertEquals(0, hshResults.size());
    // remove tmp files
    tmpSrcDump.delete();
    tmpTargetDump.delete();
  }
Beispiel #7
0
  /**
   * Méthode appelée pour le calcul de la cohésion entre 2 fonctions.
   *
   * <p>Calcule le pourcentage de variables locales similaires entre les deux fonctions sur le
   * nombre total de variables locales utilisées dans les deux fonctions.
   *
   * @param f1 Une fonction d'un modèle de code source
   * @param f2 Une autre fonction d'un modèle de code source
   * @return Un double entre 0.0 et 100.0
   * @see #cohesion(Function, Function)
   */
  private double cohesionLocalVars(Function f1, Function f2) {
    double result = 0.0;
    double nbCommon = 0;

    Map<LocalVariable, Integer> localVars1 = f1.getLocalVariables();
    Map<LocalVariable, Integer> localVars2 = f2.getLocalVariables();

    for (LocalVariable var1 : localVars1.keySet()) {
      for (LocalVariable var2 : localVars2.keySet()) {
        if (var1.getName().length() >= VARNAME_MIN_LEN
            && var2.getName().length() >= VARNAME_MIN_LEN) {
          if (var1.getType().equals(var2.getType())) {
            if (similarity.similar(var1.getName(), var2.getName())) {
              ++nbCommon;
            }
          }
        }
      }
    }

    double nbPairs = localVars1.size() * localVars2.size();

    if (nbPairs > 0) {
      result = 100.0 * nbCommon / nbPairs;
    }

    return result;
  }
Beispiel #8
0
  private void initForNamedParametersCall(
      Map<String, Object> parameters, SortedSet<ActionParameter> actionParameters) {
    if (!actionParameters.isEmpty()) {
      classes = new Class[parameters.size()];
      objects = new Object[parameters.size()];

      for (ActionParameter actionParameter : actionParameters) {
        Object obj = parameters.get(actionParameter.getKey());
        Integer idx = actionParameter.getOrder();

        objects[idx] = obj;

        if (obj instanceof Map) {
          classes[idx] = Map.class;
        } else if (obj instanceof List) {
          classes[idx] = List.class;
        } else if (obj != null) {
          classes[idx] = obj.getClass();
        } else {
          classes[idx] = Object.class;
        }
      }
    } else {
      classes = new Class[0];
      objects = new Object[0];
    }
  }
  private void assertGroupByResults(JSONArray jsonArray, Map<Object, Double> groupResultsFromAvro)
      throws JSONException {
    final Map<String, Double> groupResultsFromQuery = new HashMap<String, Double>();
    if (groupResultsFromAvro.size() > 10) {
      Assert.assertEquals(jsonArray.length(), 10);
    } else {
      Assert.assertTrue(jsonArray.length() >= groupResultsFromAvro.size());
    }
    for (int i = 0; i < jsonArray.length(); ++i) {
      groupResultsFromQuery.put(
          jsonArray.getJSONObject(i).getJSONArray("group").getString(0),
          jsonArray.getJSONObject(i).getDouble("value"));
    }

    for (final Object key : groupResultsFromAvro.keySet()) {
      String keyString;
      if (key == null) {
        keyString = "null";
      } else {
        keyString = key.toString();
      }
      if (!groupResultsFromQuery.containsKey(keyString)) {
        continue;
      }
      final double actual = groupResultsFromQuery.get(keyString);
      // System.out.println("Result from query - group:" + keyString + ", value:" + actual);
      final double expected = groupResultsFromAvro.get(key);
      // System.out.println("Result from avro - group:" + keyString + ", value:" + expected);
      try {
        Assert.assertEquals(actual, expected);
      } catch (AssertionError e) {
        throw new AssertionError(e);
      }
    }
  }
  @Test
  public void testGetRoundZeroForFourteenTeams() {
    this.doubleRoundRobinGenerator = new DoubleRoundRobinGenerator(14);

    Map<Integer, Integer> roundExpected = new HashMap<Integer, Integer>();
    roundExpected.put(0, 13);
    roundExpected.put(1, 12);
    roundExpected.put(2, 11);
    roundExpected.put(3, 10);
    roundExpected.put(4, 9);
    roundExpected.put(5, 8);
    roundExpected.put(6, 7);

    Map<Integer, Integer> round = doubleRoundRobinGenerator.getRound(0);

    assertEquals(roundExpected.size(), round.size());

    for (Map.Entry<Integer, Integer> match : roundExpected.entrySet()) {
      Integer home = match.getKey();
      Integer away = match.getValue();

      assertTrue(round.containsKey(home));
      assertTrue(round.containsValue(away));

      assertFalse(round.containsKey(away));
      assertFalse(round.containsValue(home));
    }
  }
  @Test
  public void testGetRoundThirteenForFourteenTeams() {
    this.doubleRoundRobinGenerator = new DoubleRoundRobinGenerator(14);

    Map<Integer, Integer> roundExpected = new HashMap<Integer, Integer>();
    roundExpected.put(13, 0);
    roundExpected.put(12, 1);
    roundExpected.put(11, 2);
    roundExpected.put(10, 3);
    roundExpected.put(9, 4);
    roundExpected.put(8, 5);
    roundExpected.put(7, 6);

    Map<Integer, Integer> round = doubleRoundRobinGenerator.getRound(13);
    // 13 is the first revenge round

    assertEquals(roundExpected.size(), round.size());

    for (Map.Entry<Integer, Integer> match : roundExpected.entrySet()) {
      Integer home = match.getKey();
      Integer away = match.getValue();

      assertTrue(round.containsKey(home));
      assertTrue(round.containsValue(away));

      assertFalse(round.containsKey(away));
      assertFalse(round.containsValue(home));
    }
  }
  @Test
  public void testShouldBeAbleToExecuteSimpleJavascriptAndReturnAnObjectLiteral() {

    Map<String, Object> expectedResult =
        new HashMap<String, Object>() {
          {
            put("foo", "bar");
            put("baz", Arrays.asList("a", "b", "c"));
            put(
                "person",
                new HashMap<String, String>() {
                  {
                    put("first", "John");
                    put("last", "Doe");
                  }
                });
          }
        };

    Object result =
        executeScript(
            "return {foo:'bar', baz: ['a', 'b', 'c'], person: {first: 'John',last: 'Doe'}};");
    assertTrue(result instanceof Map, "result was: " + result + " (" + result.getClass() + ")");

    Map<String, Object> map = (Map<String, Object>) result;
    assertTrue(map.size() == 3, "Expected:<" + expectedResult + ">, but was:<" + map + ">");
    assertEquals("bar", map.get("foo"));
    assertTrue(compareLists((List<?>) expectedResult.get("baz"), (List<?>) map.get("baz")));

    Map<String, String> person = (Map<String, String>) map.get("person");
    assertTrue(person.size() == (2), "Expected:<{first:John, last:Doe}>, but was:<" + person + ">");
    assertEquals("John", person.get("first"));
    assertEquals("Doe", person.get("last"));
  }
  /**
   * Delete the users that are present in the local content providers, but are not returned anymore
   * by Dailymotion. It means that these users are outdated and should not be proposed to the user.
   *
   * @return A boolean indicating if the deletion successfully occured.
   */
  private boolean deleteOutdatedUsers() {
    if (mLocalUsers.size() > 0) {
      int count = mLocalUsers.size();
      KidsLogger.i(LOG_TAG, String.format("Removing %d users from db", count));
      ArrayList<ContentProviderOperation> ops = new ArrayList<ContentProviderOperation>(count);
      for (String key : mLocalUsers.keySet()) {
        ContentProviderOperation operation =
            ContentProviderOperation.newDelete(UsersProvider.CONTENT_URI)
                .withSelection(UserContract.DAILYMOTION_ID + "=?", new String[] {key})
                .build();
        ops.add(operation);
      }
      try {
        return mContentResolver.applyBatch(UsersProvider.AUTHORITY, ops).length == count;
      } catch (RemoteException e) {
        // Should not happen, the Content Provider is local.
        KidsLogger.w(LOG_TAG, "An exception occured while communicating with provider.");
        return false;
      } catch (OperationApplicationException e) {
        KidsLogger.w(LOG_TAG, "An operation has failed when deleting local users.");
        return false;
      }
    }

    return true;
  }
  private Assunto fillAssuntoFromScreen(Assunto assunto) {
    if (assunto == null) {
      assunto = new Assunto();
    }

    long selectedId;
    String nomeAssunto;
    if (dicIdxAndIdSubAssuntos.size() == 0) {
      // Caso não hajam subassuntos, obtém o assunto pai:
      selectedId = ((Assunto) getAssuntosSpinner().getSelectedItem()).getId();
      nomeAssunto = getAssuntosSpinner().getSelectedItem().toString();
    } else {
      // Do contrário, obtém o último subassunto expandido:
      selectedId =
          ((Assunto)
                  dicIdxAndIdSubAssuntos
                      .get(dicIdxAndIdSubAssuntos.size() - 1)
                      .spnSubAssuntos
                      .getSelectedItem())
              .getId();
      nomeAssunto =
          dicIdxAndIdSubAssuntos
              .get(dicIdxAndIdSubAssuntos.size() - 1)
              .spnSubAssuntos
              .getSelectedItem()
              .toString();
    }

    assunto.setId((int) selectedId);
    assunto.setNome(nomeAssunto);
    return assunto;
  }
Beispiel #15
0
 public boolean addAll(final Collection<? extends E> coll) {
   final int size = map.size();
   for (final E e : coll) {
     map.put(e, dummyValue);
   }
   return map.size() != size;
 }
 /** Writes the example set into the given output stream. */
 public void writeSupportVectors(ObjectOutputStream out) throws IOException {
   out.writeInt(getNumberOfSupportVectors());
   out.writeDouble(b);
   out.writeInt(dim);
   if ((meanVarianceMap == null) || (meanVarianceMap.size() == 0)) {
     out.writeUTF("noscale");
   } else {
     out.writeUTF("scale");
     out.writeInt(meanVarianceMap.size());
     Iterator i = meanVarianceMap.keySet().iterator();
     while (i.hasNext()) {
       Integer index = (Integer) i.next();
       MeanVariance meanVariance = meanVarianceMap.get(index);
       out.writeInt(index.intValue());
       out.writeDouble(meanVariance.getMean());
       out.writeDouble(meanVariance.getVariance());
     }
   }
   for (int e = 0; e < train_size; e++) {
     if (alphas[e] != 0.0d) {
       out.writeInt(atts[e].length);
       for (int a = 0; a < atts[e].length; a++) {
         out.writeInt(index[e][a]);
         out.writeDouble(atts[e][a]);
       }
       out.writeDouble(alphas[e]);
       out.writeDouble(ys[e]);
     }
   }
 }
Beispiel #17
0
  @Test
  public void testExternalizeRenderResultPartial() throws IOException, ClassNotFoundException {
    RenderResultCache.setAltCache(new AltCacheSimpleImpl());
    Map<String, String> map = new HashMap<String, String>();
    map.put("a", "aa");
    map.put("b", "bb");
    StringBuilder sb = new StringBuilder("hello");
    long rt = 1000;
    CacheableRunner cr =
        new CacheableRunner("1m", "anything") {
          @Override
          protected RenderResult render() {
            return null;
          }
        };
    Map<Integer, ActionRunner> runners = new HashMap<Integer, ActionRunner>();
    runners.put(2, cr);
    RenderResultPartial rr = new RenderResultPartial(map, sb, rt, runners);
    byte[] ba = write(rr);
    rr = (RenderResultPartial) read(ba);

    assertNotNull(rr);
    Map<String, String> headers = rr.getHeaders();
    assertEquals(2, headers.size());
    assertEquals("aa", headers.get("a"));
    assertEquals("hello", rr.getText());
    assertEquals(rt, rr.renderTime);
    Map<Integer, ActionRunner> map2 = rr.getActionRunners();
    assertEquals(1, map2.size());
  }
  @Test
  public void testInitNormal() throws Exception {
    ProductLineConfigManager manager = new MockProductLineConfigManager();

    ((MockProductLineConfigManager) manager).setConfigDao(new MockConfigDao1());
    manager.initialize();

    ProductLine line1 = new ProductLine("Test1");
    ProductLine line2 = new ProductLine("Test2");
    String[] domains1 = {"domain1", "domain2"};
    String[] domains2 = {"domain3", "domain4"};

    manager.insertProductLine(line1, domains1);
    manager.insertProductLine(line2, domains2);

    Assert.assertEquals(2, s_storeCount);
    Assert.assertEquals("Default", manager.queryProductLineByDomain("domain"));
    Assert.assertEquals("Test1", manager.queryProductLineByDomain("domain1"));
    List<String> pDomains = manager.queryProductLineDomains("Test1");
    Assert.assertEquals(2, pDomains.size());
    Map<String, ProductLine> productLines = manager.queryProductLines();

    Assert.assertEquals(3, productLines.size());

    manager.enableLogging(new MockLog());
    manager.refreshProductLineConfig();
    productLines = manager.queryProductLines();
    Assert.assertEquals(1, productLines.size());
  }
  @Test
  public void testValidateMissingReferences() throws Exception {
    String xml = replaceParameters(getContent("missing_references.txt"), _fileEntry);

    ZipWriter zipWriter = ZipWriterFactoryUtil.getZipWriter();

    zipWriter.addEntry("/manifest.xml", xml);

    MissingReferences missingReferences =
        ExportImportHelperUtil.validateMissingReferences(
            TestPropsValues.getUserId(),
            _stagingGroup.getGroupId(),
            new HashMap<String, String[]>(),
            zipWriter.getFile());

    Map<String, MissingReference> dependencyMissingReferences =
        missingReferences.getDependencyMissingReferences();

    Map<String, MissingReference> weakMissingReferences =
        missingReferences.getWeakMissingReferences();

    Assert.assertEquals(2, dependencyMissingReferences.size());
    Assert.assertEquals(1, weakMissingReferences.size());

    FileUtil.delete(zipWriter.getFile());
  }
  public void testBulkGetAfterLifespanExpire() throws InterruptedException {
    Map<String, String> dataIn = new HashMap<String, String>();
    dataIn.put("aKey", "aValue");
    dataIn.put("bKey", "bValue");
    final long startTime = System.currentTimeMillis();
    final long lifespan = 10000;
    remoteCache.putAll(dataIn, lifespan, TimeUnit.MILLISECONDS);

    Set<Object> dataOut = new HashSet<Object>();
    while (true) {
      dataOut = remoteCache.keySet();
      if (System.currentTimeMillis() >= startTime + lifespan) break;
      assert dataOut.size() == dataIn.size()
          : String.format(
              "Data size not the same, put in %s elements, keySet has %s elements",
              dataIn.size(), dataOut.size());
      for (Object outKey : dataOut) {
        assert dataIn.containsKey(outKey);
      }
      Thread.sleep(100);
    }

    // Make sure that in the next 30 secs data is removed
    while (System.currentTimeMillis() < startTime + lifespan + 30000) {
      dataOut = remoteCache.keySet();
      if (dataOut.size() == 0) return;
    }

    assert dataOut.size() == 0
        : String.format("Data not empty, it contains: %s elements", dataOut.size());
  }
    /** @generated */
    private static boolean buildElement2ViewMap(
        View parentView, Map<EObject, View> element2ViewMap, Set<? extends EObject> elements) {
      if (elements.size() == element2ViewMap.size()) {
        return true;
      }

      if (parentView.isSetElement()
          && !element2ViewMap.containsKey(parentView.getElement())
          && elements.contains(parentView.getElement())) {
        element2ViewMap.put(parentView.getElement(), parentView);
        if (elements.size() == element2ViewMap.size()) {
          return true;
        }
      }
      boolean complete = false;
      for (Iterator<?> it = parentView.getChildren().iterator(); it.hasNext() && !complete; ) {
        complete = buildElement2ViewMap((View) it.next(), element2ViewMap, elements);
      }
      for (Iterator<?> it = parentView.getSourceEdges().iterator(); it.hasNext() && !complete; ) {
        complete = buildElement2ViewMap((View) it.next(), element2ViewMap, elements);
      }
      for (Iterator<?> it = parentView.getTargetEdges().iterator(); it.hasNext() && !complete; ) {
        complete = buildElement2ViewMap((View) it.next(), element2ViewMap, elements);
      }
      return complete;
    }
Beispiel #22
0
  private CFMLFactoryImpl[] toFactories(CFMLFactoryImpl[] factories, Map contextes) {
    if (factories == null || factories.length != contextes.size())
      factories =
          (CFMLFactoryImpl[]) contextes.values().toArray(new CFMLFactoryImpl[contextes.size()]);

    return factories;
  }
  private boolean performCompaction(DegreeCachingNode node) {
    Map<DetachedRelationshipDescription, Integer> cachedDegrees = node.getCachedDegrees();

    // Not above the threshold => no need for compaction
    if (cachedDegrees.size() <= compactionThreshold) {
      return true;
    }

    // Not suitable generalization => bad luck
    DetachedRelationshipDescription generalization =
        generalizationStrategy.produceGeneralization(cachedDegrees);
    if (generalization == null) {
      return false;
    }

    // Find all the candidates to be eliminated by the generalization
    Set<DetachedRelationshipDescription> candidates = new HashSet<>();
    for (DetachedRelationshipDescription potentialCandidate : cachedDegrees.keySet()) {
      if (generalization.isMoreGeneralThan(potentialCandidate)) {
        candidates.add(potentialCandidate);
      }
    }

    int candidateCachedCount = 0;
    for (DetachedRelationshipDescription candidate : candidates) {
      int count = cachedDegrees.get(candidate);
      candidateCachedCount += count;
      node.decrementDegree(candidate, count);
    }

    node.incrementDegree(generalization, candidateCachedCount, true);

    // enough? => return, otherwise try again
    return cachedDegrees.size() <= compactionThreshold || performCompaction(node);
  }
 public Application get(final URI uri) throws InvalidApplicationException, RedirectException {
   Application app = getCached(uri);
   if (LOG.isDebugEnabled()) {
     LOG.debug(
         toString() + " found app=" + app + " for url=" + uri + ",_cache.size=" + _cache.size());
   }
   if (app == null) {
     app = findApplication(uri);
     putCached(uri, app);
     if (LOG.isDebugEnabled()) {
       LOG.debug(
           toString()
               + " cached app="
               + app
               + " for url="
               + uri
               + ",_cache.size="
               + _cache.size());
     }
   } else {
     Utils.setLogContext(app, null);
     LOG.info(app + " has been found.");
   }
   return app;
 }
  @Test
  public void testEvictUpdateExpiration() throws Exception {
    CyclicBarrier loadBarrier = new CyclicBarrier(2);
    CountDownLatch preFlushLatch = new CountDownLatch(1);
    CountDownLatch postEvictLatch = new CountDownLatch(1);
    CountDownLatch flushLatch = new CountDownLatch(1);
    CountDownLatch commitLatch = new CountDownLatch(1);

    Future<Boolean> first = evictWait(itemId, loadBarrier, null, postEvictLatch);
    Future<Boolean> second =
        updateFlushWait(itemId, loadBarrier, preFlushLatch, flushLatch, commitLatch);
    awaitOrThrow(postEvictLatch);

    Map contents = Caches.entrySet(entityCache).toMap();
    assertEquals(Collections.EMPTY_MAP, contents);
    assertNull(contents.get(itemId));

    preFlushLatch.countDown();
    awaitOrThrow(flushLatch);
    contents = Caches.entrySet(entityCache).toMap();
    assertEquals(1, contents.size());
    assertEquals(FutureUpdate.class, contents.get(itemId).getClass());

    commitLatch.countDown();
    first.get(WAIT_TIMEOUT, TimeUnit.SECONDS);
    second.get(WAIT_TIMEOUT, TimeUnit.SECONDS);

    contents = Caches.entrySet(entityCache).toMap();
    assertEquals(1, contents.size());
    Object value = contents.get(itemId);
    assertNotNull(value);
    assertEquals(StandardCacheEntryImpl.class, value.getClass());
    TIME_SERVICE.advance(timeout + 1);
    assertEquals(value, entityCache.get(itemId));
  }
  @Test
  public void testDetermineFileGroupIdentifierToFileGroupMap() throws Exception {
    //
    String base = getBase();
    List<File> fileList = new ArrayList<File>();
    fileList.add(new File("C:\\temp\\admin_de_DE.properties"));
    fileList.add(new File("C:\\temp\\admin_en_US.properties"));
    fileList.add(new File("C:\\temp\\article_de_DE.properties"));
    fileList.add(new File("C:\\temp\\article_en_US.properties"));
    fileList.add(new File("C:\\temp\\article_de.properties"));
    fileList.add(new File("C:\\temp\\article_en.properties"));

    //
    FileGrouper fileGrouper = new FileGrouper();
    try {
      fileGrouper.addAllFiles(fileList);
      fileGrouper.setGroupingPatternReplacementToken("{locale}");
      fileGrouper.setGroupingPatternString("(.*?_(\\w{2,3}_\\w{2,3}|\\w{2,3})|.*())\\.\\w*");
      fileGrouper.setGroupingPatternGroupingGroupIndexList(Arrays.asList(2, 3));
    } catch (Exception e) {
      e.printStackTrace();
      Assert.fail();
    }

    //
    Map<String, FileGroup> fileGroupIdentifierToFileGroupMap =
        fileGrouper.determineFileGroupIdentifierToFileGroupMap();

    //
    assertEquals(2, fileGroupIdentifierToFileGroupMap.size());
    Set<String> fileGroupIdentifierSet = fileGroupIdentifierToFileGroupMap.keySet();
    assertTrue(fileGroupIdentifierSet.contains(base + "C:\\temp\\admin_{locale}.properties"));
    assertTrue(fileGroupIdentifierSet.contains(base + "C:\\temp\\article_{locale}.properties"));

    //
    {
      //
      FileGroup fileGroup =
          fileGroupIdentifierToFileGroupMap.get(base + "C:\\temp\\admin_{locale}.properties");

      //
      Map<String, File> groupTokenToFileMap = fileGroup.getGroupTokenToFileMap();
      assertEquals(2, groupTokenToFileMap.size());
      assertTrue(groupTokenToFileMap.containsKey("de_DE"));
      assertTrue(groupTokenToFileMap.containsKey("en_US"));
    }
    {
      //
      FileGroup fileGroup =
          fileGroupIdentifierToFileGroupMap.get(base + "C:\\temp\\article_{locale}.properties");

      //
      Map<String, File> groupTokenToFileMap = fileGroup.getGroupTokenToFileMap();
      assertEquals(4, groupTokenToFileMap.size());
      assertTrue(groupTokenToFileMap.containsKey("de_DE"));
      assertTrue(groupTokenToFileMap.containsKey("en_US"));
      assertTrue(groupTokenToFileMap.containsKey("de"));
      assertTrue(groupTokenToFileMap.containsKey("en"));
    }
  }
  @Test
  public void testRemoveUpdateExpiration() throws Exception {
    CyclicBarrier loadBarrier = new CyclicBarrier(2);
    CountDownLatch preFlushLatch = new CountDownLatch(1);
    CountDownLatch flushLatch = new CountDownLatch(1);
    CountDownLatch commitLatch = new CountDownLatch(1);

    Future<Boolean> first = removeFlushWait(itemId, loadBarrier, null, flushLatch, commitLatch);
    Future<Boolean> second = updateFlushWait(itemId, loadBarrier, preFlushLatch, null, commitLatch);
    awaitOrThrow(flushLatch);

    Map contents = Caches.entrySet(entityCache).toMap();
    assertEquals(1, contents.size());
    assertEquals(Tombstone.class, contents.get(itemId).getClass());

    preFlushLatch.countDown();
    commitLatch.countDown();
    first.get(WAIT_TIMEOUT, TimeUnit.SECONDS);
    second.get(WAIT_TIMEOUT, TimeUnit.SECONDS);

    contents = Caches.entrySet(entityCache).toMap();
    assertEquals(1, contents.size());
    assertEquals(Tombstone.class, contents.get(itemId).getClass());

    TIME_SERVICE.advance(timeout + 1);
    assertNull(entityCache.get(itemId)); // force expiration
    contents = Caches.entrySet(entityCache).toMap();
    assertEquals(Collections.EMPTY_MAP, contents);
  }
 private static boolean isDomainRunning(
     final DomainClient client,
     final Map<ServerIdentity, ServerStatus> servers,
     boolean shutdown) {
   try {
     final Map<ServerIdentity, ServerStatus> statuses = client.getServerStatuses();
     for (ServerIdentity id : statuses.keySet()) {
       final ServerStatus status = statuses.get(id);
       switch (status) {
         case DISABLED:
         case STARTED:
           {
             servers.put(id, status);
             break;
           }
       }
     }
     if (shutdown) {
       return statuses.isEmpty();
     }
     return statuses.size() == servers.size();
   } catch (Exception e) {
     LOGGER.debug("Interrupted determining if domain is running", e);
   }
   return false;
 }
Beispiel #29
0
  static void itTest4(Map s, int size, int pos) {
    IdentityHashMap seen = new IdentityHashMap(size);
    reallyAssert(s.size() == size);
    int sum = 0;
    timer.start("Iter XEntry            ", size);
    Iterator it = s.entrySet().iterator();
    Object k = null;
    Object v = null;
    for (int i = 0; i < size - pos; ++i) {
      Map.Entry x = (Map.Entry) (it.next());
      k = x.getKey();
      v = x.getValue();
      seen.put(k, k);
      if (x != MISSING) ++sum;
    }
    reallyAssert(s.containsKey(k));
    it.remove();
    reallyAssert(!s.containsKey(k));
    while (it.hasNext()) {
      Map.Entry x = (Map.Entry) (it.next());
      Object k2 = x.getKey();
      seen.put(k2, k2);
      if (x != MISSING) ++sum;
    }

    reallyAssert(s.size() == size - 1);
    s.put(k, v);
    reallyAssert(seen.size() == size);
    timer.finish();
    reallyAssert(sum == size);
    reallyAssert(s.size() == size);
  }
  /** Tests the setArticleToOpenIssue function */
  public void testLinkArticles() {

    Newsletter newsletter = new Newsletter("BFH Newsletter", "blablabla", 6);
    Issue issue = newsletter.compileIssueAndCreateNew(2007, new Date());

    Article article1 =
        newsletter.addArticle("Article 1", "The newsletter project is really funny ;)");
    Article article2 =
        newsletter.addArticle("Article 2", "The newsletter project is really funny ;)");
    Article article3 =
        newsletter.addArticle("Article 3", "The newsletter project is really funny ;)");

    newsletter.setArticleToOpenIssue(article1.getId());
    newsletter.setArticleToOpenIssue(article2.getId());

    Map unlinkedArticles = newsletter.getUnlinkedArticles();
    Map issueArticles = issue.getArticles();

    assertFalse(unlinkedArticles.containsValue(article1));
    assertFalse(unlinkedArticles.containsValue(article2));
    assertTrue(unlinkedArticles.containsValue(article3));
    assertTrue(unlinkedArticles.size() == 1);

    assertTrue(issueArticles.containsValue(article1));
    assertTrue(issueArticles.containsValue(article2));
    assertFalse(issueArticles.containsValue(article3));
    assertTrue(issueArticles.size() == 2);
  }