public int findFirstViewWithTag(String testTag, int actualPosition) {
   int itemPosInMap = 0;
   for (int i = 0; i < pictureMap.size(); i++) {
     EndlessPagerModel item = pictureMap.get(i);
     Log.i(
         TAG,
         "findFirstViewWithTag (pos= "
             + String.valueOf(itemPosInMap)
             + "): "
             + item.getTag()
             + "\nTrue? - "
             + String.valueOf(item.getTag().equals(testTag)));
     if (item.getTag().equals(testTag)) {
       Log.i(TAG, "If-Case - Return of p: " + String.valueOf(itemPosInMap));
       break;
     }
     itemPosInMap++;
   }
   // Because of FakeItems + 2
   itemPosInMap += 1;
   int newPosition = itemPosInMap + ((actualPosition / getRealSizeOfData()) * pictureMap.size());
   Log.i(
       TAG,
       "End of Method - Return of p: "
           + String.valueOf(itemPosInMap)
           + "\nSwitchToPos: "
           + String.valueOf(newPosition));
   return newPosition;
 }
Exemple #2
1
  protected void killConnections(ibis.ipl.IbisIdentifier corpse) {
    SendPort[] sps;
    ReceivePort[] rps;

    synchronized (this) {
      sps = sendPorts.values().toArray(new SendPort[sendPorts.size()]);
      rps = receivePorts.values().toArray(new ReceivePort[receivePorts.size()]);
    }
    for (SendPort s : sps) {
      try {
        s.killConnectionsWith(corpse);
      } catch (Throwable e) {
        if (logger.isDebugEnabled()) {
          logger.debug("Got exception from killConnectionsWith", e);
        }
      }
    }
    for (ReceivePort p : rps) {
      try {
        p.killConnectionsWith(corpse);
      } catch (Throwable e) {
        if (logger.isDebugEnabled()) {
          logger.debug("Got exception from killConnectionsWith", e);
        }
      }
    }
  }
  /**
   * @param modelNodeToDataNode
   * @param mismatch
   * @throws IllegalArgumentException if all of the data nodes in the sample are not unique.
   */
  public Sample(
      HashMap<Integer, Integer> modelNodeToDataNode,
      HashMap<String, Integer> dataIDToDataNodeID,
      double mismatch,
      HashMap<String, RelationshipForSample> relationshipIDs) {
    this.dataIDToDataNodeID = new HashMap<String, Integer>(dataIDToDataNodeID.size());

    if (dataIDToDataNodeID != null)
      for (Entry<String, Integer> e : dataIDToDataNodeID.entrySet()) {
        this.dataIDToDataNodeID.put(new String(e.getKey()), new Integer(e.getValue()));
      }

    this.modelNodeToDataNode = new HashMap<Integer, Integer>(modelNodeToDataNode.size());
    this.dataNodeToModelNode = new HashMap<Integer, Integer>(modelNodeToDataNode.size());

    // Performing deep copy just in case
    for (Entry<Integer, Integer> e : modelNodeToDataNode.entrySet()) {
      this.modelNodeToDataNode.put(new Integer(e.getKey()), new Integer(e.getValue()));
      this.dataNodeToModelNode.put(new Integer(e.getValue()), new Integer(e.getKey()));
    }

    HashSet<Integer> dataNodes = new HashSet<Integer>(this.modelNodeToDataNode.values());
    if (dataNodes.size() != this.modelNodeToDataNode.size())
      throw new IllegalArgumentException(
          "Sample contains multiple instances of the same data node.");

    this.relationshipIDs = new HashMap<String, RelationshipForSample>();
    for (Entry<String, RelationshipForSample> e : relationshipIDs.entrySet()) {
      this.relationshipIDs.put(e.getKey(), e.getValue());
    }
    this.additionalRelationshipIDs = new HashMap<String, ArrayList<String>>();

    this.mismatch = mismatch;
  }
    synchronized void populate(List<ObaStop> stops, List<ObaRoute> routes) {
      int count = 0;

      if (mStopMarkers.size() >= FUZZY_MAX_MARKER_COUNT) {
        // We've exceed our max, so clear the current marker cache and start over
        Log.d(TAG, "Exceed max marker cache of " + FUZZY_MAX_MARKER_COUNT + ", clearing cache");
        removeMarkersFromMap();
        mStopMarkers.clear();
        mStops.clear();

        // Make sure the currently focused stop still exists on the map
        if (mCurrentFocusStop != null && mFocusedRoutes != null) {
          addMarkerToMap(mCurrentFocusStop, mFocusedRoutes);
          count++;
        }
      }

      for (ObaStop stop : stops) {
        if (!mStopMarkers.containsKey(stop.getId())) {
          addMarkerToMap(stop, routes);
          count++;
        }
      }

      Log.d(TAG, "Added " + count + " markers, total markers = " + mStopMarkers.size());
    }
  public static boolean isValid(HashMap<?, ?> expected, HashMap<?, ?> map) {
    if (map == null) {
      return false;
    }

    if (expected.size() != map.size()) {
      return false;
    }

    Set<?> entries = expected.entrySet();
    Iterator<?> entryIter = entries.iterator();
    while (entryIter.hasNext()) {
      Entry<?, ?> entry = (Entry<?, ?>) entryIter.next();

      Object value = map.get(entry.getKey());

      if (value != entry.getValue()) {
        if (value == null || entry.getValue() == null) {
          return false;
        }

        if (!map.get(entry.getKey()).equals(entry.getValue())) {
          return false;
        }
      }
    }

    return true;
  }
Exemple #6
1
  protected void computeEdgeAndThreadNo() {
    Iterator it = iterator();
    int numberOfEdge = 0;
    while (it.hasNext()) {
      List succList = (List) getSuccsOf(it.next());

      numberOfEdge = numberOfEdge + succList.size();
    }
    numberOfEdge = numberOfEdge + startToThread.size();

    System.err.println("**number of edges: " + numberOfEdge);

    System.err.println("**number of threads: " + (startToThread.size() + 1));

    /*	Set keySet = startToThread.keySet();
    Iterator keyIt = keySet.iterator();
    while (keyIt.hasNext()){
    List list = (List)startToThread.get(keyIt.next());
    System.out.println("********start thread:");
    Iterator itit = list.iterator();
    while (itit.hasNext()){
    System.out.println(it.next());
    }
    }
    */

  }
Exemple #7
1
 private static void a(File paramFile, HashMap paramHashMap)
 {
   if (paramHashMap.size() >= 23);
   label61: String str1;
   String str2;
   do
   {
     while (true)
     {
       return;
       if (!paramFile.isDirectory())
         break;
       File[] arrayOfFile = paramFile.listFiles();
       if (arrayOfFile != null)
         for (int i = 0; ; i++)
         {
           if (i >= arrayOfFile.length)
             break label61;
           a(arrayOfFile[i], paramHashMap);
           if (paramHashMap.size() >= 23)
             break;
         }
     }
     str1 = paramFile.getAbsolutePath();
     str2 = str1.substring(1 + str1.lastIndexOf("/"));
   }
   while (!str2.startsWith("snst"));
   paramHashMap.put(str2, str1);
 }
    void stopRandomServer() throws Exception {
      lock.writeLock().lock();
      TestRpcServer rpcServer = null;
      try {
        if (rpcServers.size() <= minServers) {
          return;
        }
        int size = rpcServers.size();
        int rand = random.nextInt(size);
        rpcServer = serverList.remove(rand);
        InetSocketAddress address = rpcServer.getListenerAddress();
        if (address == null) {
          // Throw exception here. We can't remove this instance from the server map because
          // we no longer have access to its map key
          throw new IOException("Listener channel is closed");
        }
        rpcServers.remove(address);

        if (rpcServer != null) {
          stopServer(rpcServer);
        }
      } finally {
        lock.writeLock().unlock();
      }
    }
  /** @param args the command line arguments */
  public static void main(String[] args) throws IOException {

    String snpMappings = "E:\\LLD_HRC_IMPUTATION\\SNPMappings.txt";
    String snps = "E:\\LLD_HRC_IMPUTATION\\SNPs.txt";

    String bedFolder = "E:\\dbSNP_147\\";

    String snpsOut = "E:\\LLD_HRC_IMPUTATION\\SNPs2.txt";
    String snpMappingsOut = "E:\\LLD_HRC_IMPUTATION\\SNPMappings2.txt";

    System.out.println("Processing original SNP file: ");
    LinkedHashSet<String> OrderingEst = readSnpOrderFile(snps);
    System.out.println("Done found: " + OrderingEst.size() + " SNPs");

    System.out.println("Processing original mapping file: ");
    HashMap<String, String> mappingEst = readSnpMappings(snpMappings, false, (OrderingEst.size()));
    System.out.println("Done found: " + mappingEst.size() + " mappings");

    System.out.println("Processing dbSNP bed folder: ");
    HashMap<String, String> mappingUmcg =
        readSnpMappingsBed(bedFolder, mappingEst, mappingEst.size());
    System.out.println("Done found: " + mappingUmcg.size() + " mappings");

    System.out.println("Remapping original data.");
    remapPositionsAndSnps(OrderingEst, mappingEst, mappingUmcg, snpsOut, snpMappingsOut);
  }
Exemple #10
0
  private Node getReferenceTable(Results r) {
    String[] subH = new String[r.getNumberOfClusters()];
    for (int i = 0; i < subH.length; i++) subH[i] = "Cluster " + i;

    DataTable refTable = new DataTable();
    refTable.addHiddenClusterHeaders(subH);

    // Fetch the reference labels
    String[] refLabels = r.getReferenceLabels();
    refTable.setLabelRowRelationship(getReferenceRowRelationship(refLabels));
    refTable.setTotalLabelCount(mRefLabelToRow.size());

    // For each cluster, we add the reference data means as labels, and link that data to the
    // corresponding label
    double[][] refMeans = r.getReferenceDataMeans();
    for (int col = 0; col < r.getNumberOfClusters(); col++) {
      ArrayList<Pair<String, String>> pairs = new ArrayList<Pair<String, String>>();
      double[] thisCol = refMeans[col];
      for (int j = 0; j < refLabels.length; j++) {
        Pair<String, String> pair =
            new Pair<String, String>(refLabels[j], String.valueOf(thisCol[j]));
        pairs.add(pair);
      }

      refTable.setColumnData(col, mRefLabelToRow.size(), pairs);
    }

    mRefDataTables.add(refTable);
    return refTable;
  }
  public void invalidateCompiledNativeCodeBlocks(int startAddress, int endAddress) {
    // Most common case: nothing to do.
    if (compiledNativeCodeBlocks.size() == 0) {
      return;
    }

    // What is the most efficient?
    // To scan all the addressed between startAddress and endAddres or
    // to scan all the compiledNativeCodeBlocks?
    if ((endAddress - startAddress) >>> 2 < compiledNativeCodeBlocks.size()) {
      for (int address = startAddress; address <= endAddress; address += 4) {
        compiledNativeCodeBlocks.remove(address);
      }
    } else {
      List<Integer> toBeRemoved = new LinkedList<Integer>();
      for (Integer address : compiledNativeCodeBlocks.keySet()) {
        if (startAddress >= address.intValue() && address.intValue() <= endAddress) {
          toBeRemoved.add(address);
        }
      }
      for (Integer address : toBeRemoved) {
        compiledNativeCodeBlocks.remove(address);
      }
      toBeRemoved.clear();
    }
  }
Exemple #12
0
  public static ArrayList<String> getRetunItemToList(
      HashMap<String, String> urlParam, HashMap<Integer, String> searchIndisItemHm) {

    ArrayList<String> li = new ArrayList<String>();

    Set key = urlParam.keySet();
    // 필수 항목 체크 (url에 필수적으로 들어가야 할 항목 체크)
    for (Iterator iterator = key.iterator(); iterator.hasNext(); ) {
      String keyName = (String) iterator.next();
      String valueName = (String) urlParam.get(keyName);
      int chkNum = urlParam.size(); // 필수항목 체크 숫자	
      for (int i = 0; i < searchIndisItemHm.size(); i++) {
        if (keyName.toUpperCase().equals(searchIndisItemHm.get(i).toUpperCase())) {
          chkNum = chkNum - urlParam.size();
        }
      }

      if (chkNum > 0) {

        li.add(keyName);
      }
    }

    return li;
  }
Exemple #13
0
  /**
   * *
   *
   * <p>검색 필수 유효성체크 필수항목체크
   *
   * @param HashMap<String,String> urlParam, HashMap<Integer,String> searchIndisItemHm
   * @return HashMap<String,String>
   */
  public static HashMap<String, String> getValidataIndisItem(
      HashMap<String, String> urlParam,
      HashMap<Integer, String> searchIndisItemHm,
      HashMap<String, String> errHm) {

    // 필수 항목 체크 (url에 필수적으로 들어가야 할 항목 체크)
    int allchkNum = 0;

    for (int i = 0; i < searchIndisItemHm.size(); i++) {
      Set key = urlParam.keySet();

      int chkNum = searchIndisItemHm.size(); // 필수항목 체크 숫자

      for (Iterator iterator = key.iterator(); iterator.hasNext(); ) {
        String keyName = (String) iterator.next();
        String valueName = (String) urlParam.get(keyName);

        if (searchIndisItemHm.get(i).toUpperCase().equals(keyName.toUpperCase())) {

          allchkNum = allchkNum + 1;
          chkNum = chkNum - searchIndisItemHm.size();
        }
      }

      if (chkNum > 0) {

        errHm.put(searchIndisItemHm.get(i), "필수 항목 변수를 존재하지 않습니다.");
      }
    }

    int num = searchIndisItemHm.size() - allchkNum;
    errHm.put("errCnt", String.valueOf(num));
    return errHm;
  }
Exemple #14
0
  private Double CosineSimilarity(HashMap<String, Double> table1, HashMap<String, Double> table2)
      throws Exception {
    if (table1.size() != table2.size()) {
      throw new Exception("Table sizes must be equal");
    }

    // length of table 1
    double length1 = 0;
    double length2 = 0;

    // Double firstValue;
    double secValue;

    // sum of vector multiplication
    double svMul = 0;

    for (Entry<String, Double> kv : table1.entrySet()) {
      length1 += Math.pow(kv.getValue(), 2);

      secValue = table2.get(kv.getKey());
      length2 += Math.pow(secValue, 2);

      svMul += secValue * kv.getValue();
    }

    length1 = Math.sqrt(length1);
    length2 = Math.sqrt(length2);

    return Double.parseDouble(NumericFormat.getNumberFormated(svMul / (length1 * length2)));
  }
  @Test
  public void testExecute() throws Exception {
    HashMap<UserOptions, String> configMap = new HashMap<UserOptions, String>();
    configMap.put(UserOptions.DisplayButtons, Boolean.toString(true));
    configMap.put(UserOptions.EditorPageSize, Integer.toString(25));
    configMap.put(UserOptions.EnterSavesApproved, Boolean.toString(true));

    SaveOptionsAction action = new SaveOptionsAction(configMap);

    SaveOptionsResult result = handler.execute(action, null);

    assertThat(result.isSuccess(), Matchers.equalTo(true));
    List<HAccountOption> accountOptions =
        getEm().createQuery("from HAccountOption").getResultList();

    assertThat(accountOptions, Matchers.hasSize(configMap.size()));
    Map<String, HAccountOption> editorOptions = authenticatedAccount.getEditorOptions();

    assertThat(editorOptions.values(), Matchers.containsInAnyOrder(accountOptions.toArray()));

    handler.execute(action, null); // save again should override previous
    // value
    accountOptions = getEm().createQuery("from HAccountOption").getResultList();

    assertThat(accountOptions, Matchers.hasSize(configMap.size()));
    assertThat(editorOptions.values(), Matchers.containsInAnyOrder(accountOptions.toArray()));
  }
  @Test
  public void readTest() {
    String insertKey = "user0";
    Map<String, ByteIterator> insertMap = insertRow(insertKey);
    HashSet<String> readFields = new HashSet<>();
    HashMap<String, ByteIterator> readResultMap = new HashMap<>();

    // Test reading a single field
    readFields.add("FIELD0");
    orientDBClient.read(CLASS, insertKey, readFields, readResultMap);
    assertEquals(
        "Assert that result has correct number of fields", readFields.size(), readResultMap.size());
    for (String field : readFields) {
      assertEquals(
          "Assert " + field + " was read correctly",
          insertMap.get(field).toString(),
          readResultMap.get(field).toString());
    }

    readResultMap = new HashMap<>();

    // Test reading all fields
    readFields.add("FIELD1");
    readFields.add("FIELD2");
    orientDBClient.read(CLASS, insertKey, readFields, readResultMap);
    assertEquals(
        "Assert that result has correct number of fields", readFields.size(), readResultMap.size());
    for (String field : readFields) {
      assertEquals(
          "Assert " + field + " was read correctly",
          insertMap.get(field).toString(),
          readResultMap.get(field).toString());
    }
  }
  public ArrayList<String> getTop() {
    String most = "";
    ArrayList<String> top = new ArrayList<String>();
    HashMap<String, Integer> hold = new HashMap<String, Integer>();

    for (Entry<String, Integer> e : score.entrySet()) {
      hold.put(e.getKey(), e.getValue());
    }

    int theMax = 10;
    if (hold.size() < 10) theMax = hold.size();

    for (int i = 0; i < theMax; i++) {
      System.out.println("**" + i + "**");
      for (Entry<String, Integer> info : hold.entrySet()) {

        System.out.println(info.getKey());
        if (info.getValue() > getScore(most)) {
          most = info.getKey();
        }
      }

      top.add(most);
      hold.remove(most);
    }

    return top;
  }
  private ArrayList<StudentPersonalType> fetchStudents(
      HashMap<String, StudentPersonalType> studentMap, PagingInfo pagingInfo) {
    ArrayList<StudentPersonalType> studentList = new ArrayList<StudentPersonalType>();
    if (pagingInfo == null) // return all
    {
      studentList.addAll(studentMap.values());
    } else {
      pagingInfo.setTotalObjects(studentMap.size());
      if ((pagingInfo.getPageSize() * (pagingInfo.getCurrentPageNo())) > studentMap.size()) {
        return null; // Requested page outside of limits.
      }

      // retrieve applicable students
      Collection<StudentPersonalType> allStudent = studentMap.values();
      int i = 0;
      int startPos = pagingInfo.getPageSize() * pagingInfo.getCurrentPageNo();
      int endPos = startPos + pagingInfo.getPageSize();
      for (Iterator<StudentPersonalType> iter = allStudent.iterator(); iter.hasNext(); ) {
        StudentPersonalType student = iter.next();
        if ((i >= startPos) && (i < endPos)) {
          studentList.add(student);
        }
        i++;
      }
      // Set the number of object that are returned in the paging info. Will ensure HTTP headers are
      // set correctly.
      pagingInfo.setPageSize(studentList.size());
    }

    return studentList;
  }
Exemple #19
0
  private Node getDataTable(Results r, Pane pane) {
    // The grid part contains the cluster's centoid data, as well as the "X" button
    String[] subH = new String[r.getNumberOfClusters()];
    for (int i = 0; i < subH.length; i++) subH[i] = "Cluster " + i;

    DataTable dataTable = new DataTable("Run " + mNextRun++, subH);

    // Fetch the labels. Inform the data table of the label-row relationships
    String[] labels = r.getLabels();
    dataTable.setLabelRowRelationship(getLabelRowRelationship(labels));
    dataTable.setTotalLabelCount(mDataLabelToRow.size());

    // For each cluster, we create a list of Label - Data pairs, where the label denotes what
    // feature we are looking at
    // and the Data represents that clusters centoid for the current feature
    for (int col = 0; col < r.getNumberOfClusters(); col++) {
      ArrayList<Pair<String, String>> labelDataPairs = new ArrayList<Pair<String, String>>();
      double[] clusterCentoid = r.getCentoid(col);

      for (int j = 0; j < labels.length; j++) {
        Pair<String, String> pair =
            new Pair<String, String>(labels[j], String.valueOf(clusterCentoid[j]));
        labelDataPairs.add(pair);
      }

      dataTable.setColumnData(col, mDataLabelToRow.size(), labelDataPairs);
    }

    dataTable.addCloseButton(pane);

    mDataTables.add(dataTable);
    return dataTable;
  }
 public HeatMapData generateMeanHeatMapData(ArrayList<String> probeList) {
   HeatMapData hmd = new HeatMapData(HeatMapData.ONE_COLOR);
   double[][] ddata = new double[probeList.size()][tissues.size() * meanStrains.size()];
   int actualSize = 0;
   for (int i = 0; i < probeList.size(); i++) {
     // System.out.println("get data:"+probeList.get(i));
     HashMap tmp = (HashMap) data.get(probeList.get(i));
     if (tmp != null) {
       ArrayList<Double> row = (ArrayList<Double>) tmp.get("meanRow");
       for (int j = 0; j < row.size(); j++) {
         ddata[i][j] = row.get(j).doubleValue();
       }
       actualSize++;
     } else {
       // System.out.println();
       probeList.remove(i);
       i--;
     }
   }
   double[][] tmpdata = new double[actualSize][tissues.size() * meanStrains.size()];
   for (int i = 0; i < tmpdata.length; i++) {
     tmpdata[i] = ddata[i];
   }
   hmd.setDataString(tmpdata, probeList, false);
   return hmd;
 }
Exemple #21
0
  /**
   * Use for slider: limits the categories visualization from cat selected to cat
   * selected+numberscatsVisualization.
   *
   * @param dataset the dataset
   * @param categories the categories
   * @param catSelected the cat selected
   * @param numberCatsVisualization the number cats visualization
   * @return the dataset
   */
  public Dataset filterDataset(
      Dataset dataset, HashMap categories, int catSelected, int numberCatsVisualization) {
    logger.debug("IN");
    DefaultCategoryDataset catDataset = (DefaultCategoryDataset) dataset;

    int numCats = categories.size();
    Vector visCat = new Vector();
    // from the choice point to min(chose point+interval, end point)
    // int startPoint=((catSelected-1)*numberCatsVisualization)+1;
    int startPoint = catSelected;

    int endPoint;
    if ((startPoint + numberCatsVisualization - 1) <= (categories.size()))
      endPoint = startPoint + numberCatsVisualization - 1;
    else endPoint = categories.size();

    for (int i = (startPoint); i <= endPoint; i++) {
      String name = (String) categories.get(new Integer(i));
      visCat.add(name);
    }

    List columns = new Vector(catDataset.getColumnKeys());
    for (Iterator iterator = columns.iterator(); iterator.hasNext(); ) {
      String col = (String) iterator.next();
      if (!(visCat.contains(col))) {
        catDataset.removeColumn(col);
      }
    }
    logger.debug("OUT");

    return catDataset;
  }
  /**
   * getting a list of itemGrading for a publishedItemId is a lot of work, read the code in
   * GradingService.getItemScores() after we get the list, we are saving it in
   * QuestionScoreBean.itemScoresMap itemScoresMap = (publishedItemId, HashMap) = (Long
   * publishedItemId, (Long publishedItemId, Array itemGradings)) itemScoresMap will be refreshed
   * when the next QuestionScore link is click
   */
  private HashMap getItemScores(
      Long publishedId, Long itemId, String which, boolean isValueChange) {
    log.debug("getItemScores");
    GradingService delegate = new GradingService();
    QuestionScoresBean questionScoresBean =
        (QuestionScoresBean) ContextUtil.lookupBean("questionScores");
    HashMap itemScoresMap = questionScoresBean.getItemScoresMap();
    log.debug("getItemScores: itemScoresMap ==null ?" + itemScoresMap);
    log.debug("getItemScores: isValueChange ?" + isValueChange);

    if (itemScoresMap == null
        || isValueChange
        || questionScoresBean.getIsAnyItemGradingAttachmentListModified()) {
      log.debug("getItemScores: itemScoresMap == null or isValueChange == true ");
      log.debug("getItemScores: isValueChange = " + isValueChange);
      itemScoresMap = new HashMap();
      questionScoresBean.setItemScoresMap(itemScoresMap);
      // reset this anyway (because the itemScoresMap will be refreshed as well as the
      // attachment list)
      questionScoresBean.setIsAnyItemGradingAttachmentListModified(false);
    }
    log.debug("getItemScores: itemScoresMap.size() " + itemScoresMap.size());
    HashMap map = (HashMap) itemScoresMap.get(itemId);
    if (map == null) {
      log.debug("getItemScores: map == null ");
      map = delegate.getItemScores(publishedId, itemId, which, true);
      log.debug("getItemScores: map size " + map.size());
      itemScoresMap.put(itemId, map);
    }
    return map;
  }
Exemple #23
0
  synchronized void transfer(Object oldGroup, Object newGroup) {
    HashMap from = (HashMap) groups.get(oldGroup);
    if (from == null) return;

    HashMap to = (HashMap) groups.get(newGroup);
    if (to == null) {
      // simple case
      groups.put(newGroup, from);
      clearLimit(oldGroup);
      groups.remove(oldGroup);
      return;
    }

    if (to.size() < from.size()) {

      // place the contents of to into from
      mergeGroups(to, from);

      Object oldTo = groups.put(newGroup, from);
      if (SanityManager.DEBUG) {
        SanityManager.ASSERT(oldTo == to, "inconsistent state in LockSpace");
      }

    } else {
      mergeGroups(from, to);
    }

    clearLimit(oldGroup);
    groups.remove(oldGroup);
  }
Exemple #24
0
  /**
   * Shows the skill tree for the player. If the player has multiple trees, this will show the list
   * of skill trees they can view.
   *
   * @param player player to show the skill tree for
   * @return true if able to show the player, false otherwise
   */
  public boolean showSkills(Player player) {
    // Cannot show an invalid player, and cannot show no skills
    if (player == null || classes.size() == 0 || skills.size() == 0) {
      return false;
    }

    // Show skill tree of only class
    if (classes.size() == 1) {
      PlayerClass playerClass = classes.get(classes.keySet().toArray(new String[1])[0]);
      if (playerClass.getData().getSkills().size() == 0) {
        return false;
      }

      player.openInventory(
          ((InventoryTree) playerClass.getData().getSkillTree()).getInventory(this));
      return true;
    }

    // Show list of classes that have skill trees
    else {
      Inventory inv =
          InventoryManager.createInventory(
              TreeListener.CLASS_LIST_KEY, (classes.size() + 8) / 9, player.getName());
      for (PlayerClass c : classes.values()) {
        inv.addItem(c.getData().getIcon());
      }
      player.openInventory(inv);
      return true;
    }
  }
Exemple #25
0
 @Override
 public void onItemLongClickListener(View view, int position) {
   if (selectedApps.get(position) == null) {
     selectedApps.put(position, adapter.getModelList().get(position));
     adapter.setItemChecked(position, true);
   } else {
     selectedApps.remove(position);
     adapter.setItemChecked(position, false);
   }
   if (selectedApps.size() != 0) {
     if (actionMode == null) {
       actionMode = toolbar.startActionMode(this);
       AppHelper.actionModeColor(this);
     }
     if (selectedApps.size() > 1) {
       actionMode.setTitle("Backup ( " + selectedApps.size() + " Apps )");
     } else {
       actionMode.setTitle("Backup ( " + selectedApps.size() + " App )");
     }
     enableDisableMenuItem(false);
   } else {
     actionMode.finish();
     actionMode = null;
   }
 }
Exemple #26
0
 // Compute a compressed double buffer
 private Chunk chunkD() {
   HashMap<Long, Byte> hs = new HashMap<>(CUDChunk.MAX_UNIQUES);
   Byte dummy = 0;
   final byte[] bs = MemoryManager.malloc1(_len * 8, true);
   int j = 0;
   boolean fitsInUnique = true;
   for (int i = 0; i < _len; ++i) {
     double d = 0;
     if (_id == null || _id.length == 0 || (j < _id.length && _id[j] == i)) {
       d =
           _ds != null
               ? _ds[j]
               : (isNA2(j) || isCategorical(j)) ? Double.NaN : _ls[j] * PrettyPrint.pow10(_xs[j]);
       ++j;
     }
     if (fitsInUnique) {
       if (hs.size() < CUDChunk.MAX_UNIQUES) // still got space
       hs.put(
             Double.doubleToLongBits(d),
             dummy); // store doubles as longs to avoid NaN comparison issues during extraction
       else
         fitsInUnique =
             (hs.size() == CUDChunk.MAX_UNIQUES)
                 && // full, but might not need more space because of repeats
                 hs.containsKey(Double.doubleToLongBits(d));
     }
     UnsafeUtils.set8d(bs, 8 * i, d);
   }
   assert j == sparseLen() : "j = " + j + ", _len = " + sparseLen();
   if (fitsInUnique && CUDChunk.computeByteSize(hs.size(), len()) < 0.8 * bs.length)
     return new CUDChunk(bs, hs, len());
   else return new C8DChunk(bs);
 }
Exemple #27
0
  private String toStringSingle() {
    String indent = "       ";
    if (sf == null && children.size() > 0)
      return children.values().iterator().next().toStringSingle();
    if (children.size() == 0) return indent + sf;

    return children.values().iterator().next().toStringSingle() + "\n" + indent + sf;
  }
 @Test
 public void testAddGraph() {
   final String graphName = "bo";
   assertEquals(0, managedGraphs.size());
   manager.addGraph(graphName);
   assertEquals(1, managedGraphs.size());
   assertNotNull(managedGraphs.get(graphName));
 }
Exemple #29
0
 /**
  * returns true, if the query block contains any query, or subquery without a source table Like
  * select current_user(), select current_database()
  *
  * @return true, if the query block contains any query without a source table
  */
 public boolean containsQueryWithoutSourceTable() {
   for (QBExpr qbexpr : aliasToSubq.values()) {
     if (qbexpr.containsQueryWithoutSourceTable()) {
       return true;
     }
   }
   return aliasToTabs.size() == 0 && aliasToSubq.size() == 0;
 }
Exemple #30
0
 public String toString() {
   String s =
       String.format("NameableMap %d - %d - %d\n", _nameMap.size(), _UUIDMap.size(), _list.size());
   for (INameable n : _list) {
     s += "\t" + n.getQualifiedLabel() + "\n";
   }
   return s;
 }