// createPageString - // Create list of pages for search results private String createPageString( int numberOfItems, int itemsPerPage, int currentPage, String baseUrl) { StringBuffer pageString = new StringBuffer(); // Calculate the total number of pages int totalPages = 1; if (numberOfItems > itemsPerPage) { double pages = Math.ceil(numberOfItems / (double) itemsPerPage); totalPages = (int) Math.ceil(pages); } // if (totalPages > 1) { for (int i = 1; i <= totalPages; i++) { if (i == currentPage) { pageString.append(i); } else { pageString.append("<a href=\"" + baseUrl + i + "\" title=\"" + i + "\">" + i + "</a>"); } if (i != totalPages) pageString.append(" "); } } else { pageString.append("1"); } return pageString.toString(); }
public AnnotFloorPlan(FloorPlan fp, int ratio, int actualW, int actualH) { em = DatabaseService.getEntityManager(); floorPlan = fp; deadPoints = new HashSet<DeadPoint>(); this.ratio = ratio; this.actualW = actualW; this.actualH = actualH; // Width and height of each cell // unitW = fp.getWidth() * ratio / 100; // unitH = fp.getHeight() * ratio / 100; // Calc the number of rows and columns needed // rowCount = fp.getHeight() / unitH + 1; // colCount = fp.getWidth() / unitW + 1; rowCount = 50; colCount = 75; unitW = (int) Math.ceil(fp.getWidth() / colCount); unitH = (int) Math.ceil(fp.getHeight() / rowCount); cellContainer = new Cell[rowCount][colCount]; // init the graph g = new SimpleWeightedGraph<Cell, WeightedEdge>(WeightedEdge.class); initGraph(); }
/** @return true if number of instances are evenly distributed across the specified containers */ public static boolean isEvenlyDistributedAcrossContainers( ProcessingUnit pu, GridServiceContainer[] containers) { if (!isProcessingUnitIntact(pu, containers)) { return false; } boolean evenlyDistributed = true; int numberOfInstances = pu.getTotalNumberOfInstances(); int numberOfContainers = containers.length; if (numberOfInstances < numberOfContainers) { evenlyDistributed = false; } else { double expectedAverageNumberOfInstancesPerContainer = 1.0 * numberOfInstances / numberOfContainers; int numberOfServicesPerContainerUpperBound = (int) Math.ceil(expectedAverageNumberOfInstancesPerContainer); int numberOfServicesPerContainerLowerBound = (int) Math.floor(expectedAverageNumberOfInstancesPerContainer); for (GridServiceContainer container : containers) { int puNumberOfInstances = container.getProcessingUnitInstances(pu.getName()).length; if (puNumberOfInstances < numberOfServicesPerContainerLowerBound || puNumberOfInstances > numberOfServicesPerContainerUpperBound) { evenlyDistributed = false; break; } } } return evenlyDistributed; }
/** * <br> * In the first round,</br> <br> * In the paper the server is supposed to ask sensors for uncoded data.</br> <br> * We sent the maximum number of bits: 63 bits = "111111"</br> <br> * Sensor will encode in IEEE double format</br> <br> * Calculates the i (number of requested bits), as given in the paper</br> */ @Override public String sendRequest(int sensor_id) { if (is_first_round[sensor_id] || sensor_id == 0 || round_number < M) { requested_bits[sensor_id] = maximum_number_of_bits_askable; return "111111"; } double delta = code_book.getD(); int i = (int) Math.ceil( (0.5 * (Math.log((sigma[sensor_id] * sigma[sensor_id]) / (delta * delta * P_e)) / Math.log(2.0))) + 1); i = Math.min(i, maximum_number_of_bits_askable); // request not more that 63 bits i = Math.max(0, i); // request no less than 0 bits requested_bits[sensor_id] = i; String bit_string = tools.pad0ToFront( Integer.toBinaryString(i), bits_needed_to_represent_maximum_askable); // pad it to make length 6 return tools.reverse(bit_string); }
/** * counts the subintervals of the mutations in the answer object * * @param answer an answer object that was generated through a query * @return Integer array holding the amount of mutations that intersect the subintervals */ public static Integer[] count_mutations(QueryAnswer answer) { Integer[] pos = answer.position; float interval_length = pos[1] - pos[0]; int subinterval_length = answer.zoom_level; int number_subints = (int) Math.ceil(interval_length / subinterval_length); Integer[] counts = new Integer[number_subints]; for (int i = 0; i < counts.length; i++) { counts[i] = 0; } int[] subint_left_boundaries = new int[number_subints]; int left = pos[0]; for (int i = 0; i < number_subints; i++) { subint_left_boundaries[i] = left + i * subinterval_length; } for (IntervalST.Mutation mut : answer.mutations) { int mut_low = mut.i_Low; int mut_high = mut.i_High; int last_index = subint_left_boundaries.length - 1; for (int i = 0; i < last_index; i++) { int lower = Math.max(subint_left_boundaries[i], mut_low); int higher = Math.min(subint_left_boundaries[i + 1], mut_high); if (lower <= higher) { counts[i]++; } } if (mut_high >= subint_left_boundaries[last_index]) { counts[last_index]++; } } return counts; }
/** * @param list 数据源 * @param fieldMap 类的英文属性和Excel中的中文列名的对应关系 例:{id=编号} 如果需要的是引用对象的属性,则英文属性使用类似于EL表达式的格式 * 如:list中存放的都是student,student中又有college属性,而我们需要学院名称,则可以这样写 * fieldMap.put("college.collegeName","学院名称") * @param sheetName 工作表的名称 * @param sheetSize 每个工作表中记录的最大个数 * @param os 导出流 * @throws ExcelException @MethodName : listToExcel @Description : * 导出Excel(可以导出到本地文件系统,也可以导出到浏览器,可自定义工作表大小) */ public static <T> void listToExcel( List<T> list, LinkedHashMap<String, String> fieldMap, String sheetName, int sheetSize, OutputStream os) throws ExcelException { if (CollectionUtils.isEmpty(list)) { throw new ExcelException("数据源中没有任何数据"); } if (sheetSize < 1 || sheetSize > 65535) { sheetSize = 65535; } WritableWorkbook wwb; try { // 创建工作簿并发送到OutputStream指定的地方 wwb = Workbook.createWorkbook(os); // 因为2003的Excel一个工作表最多可以有65536条记录,除去列头剩下65535条 // 所以如果记录太多,需要放到多个工作表中,其实就是个分页的过程 // 1.计算一共有多少个工作表 double sheetNum = Math.ceil(list.size() / new Integer(sheetSize).doubleValue()); // 2.创建相应的工作表,并向其中填充数据 for (int i = 0; i < sheetNum; i++) { // 如果只有一个工作表的情况 if (sheetNum == 1) { WritableSheet sheet = wwb.createSheet(sheetName, i); // 向工作表中填充数据 fillSheet(sheet, list, fieldMap, 0, list.size() - 1); // 有多个工作表的情况 } else { WritableSheet sheet = wwb.createSheet(sheetName + (i + 1), i); // 获取开始索引和结束索引 int firstIndex = i * sheetSize; int lastIndex = (i + 1) * sheetSize - 1 > list.size() - 1 ? list.size() - 1 : (i + 1) * sheetSize - 1; // 填充工作表 fillSheet(sheet, list, fieldMap, firstIndex, lastIndex); } } wwb.write(); wwb.close(); } catch (Exception e) { e.printStackTrace(); // 如果是ExcelException,则直接抛出 if (e instanceof ExcelException) { throw (ExcelException) e; // 否则将其他异常包装成ExcelException再抛出 } else { throw new ExcelException("导出Excel失败"); } } }
public void onMessage(PlineMessage m, List<Message> out) { int slot = m.getSlot(); // no check for server messages if (slot < 1 || slot > 6) { out.add(m); return; } String text = m.getText(); float charsByLine = 70; int lineCount = (int) Math.ceil(text.length() / charsByLine); long now = System.currentTimeMillis(); boolean isRateExceeded = false; for (int i = 0; i < lineCount; i++) { isRateExceeded = isRateExceeded || isRateExceeded(slot - 1, now); } if (slot > 0 && isRateExceeded) { if ((now - lastWarning) > warningPeriod * 1000) { User user = getChannel().getPlayer(slot); out.add(new PlineMessage("filter.flood.blocked", user.getName())); lastWarning = now; } } else { out.add(m); } }
/** * @param gas_seq The sequence which we wish to map it against other sequence(s) * @param gas_targetSeqs The target sequence */ private HashMap<Integer, Integer> doMapSeq2Seq( GappedAlignmentString gas_seq, GappedAlignmentString[] gas_targetSeqs) { int N = gas_seq.gappedLength(); // making map allocation more efficient int initCapacity = (int) Math.ceil(1.5 * N); HashMap<Integer, Integer> map = new HashMap<Integer, Integer>(initCapacity); // When a gap position corresponds to a gap remove it from seq HashMap<Integer, Integer> gapped2UngappedMap_seq = gas_seq.getGapped2UngappedMap(); Integer[] gapPositions_seq = gas_seq.determineGapPositions(); for (Integer e : gapPositions_seq) gapped2UngappedMap_seq.remove(e); // When a gap position corresponds to a gap, put -1 as its value HashMap<Integer, Integer> gapped2UngappedMap_targetSeq = gas_targetSeqs[0].getGapped2UngappedMap(); Integer[] gapPositions_targetSeq = gas_targetSeqs[0].determineGapPositions(); for (Integer e : gapPositions_targetSeq) gapped2UngappedMap_targetSeq.put(e, -1); Integer[] gappedPositionsNoGaps_seq = gapped2UngappedMap_seq.keySet().toArray(new Integer[gapped2UngappedMap_seq.size()]); for (Integer gappedPosNoGaps : gappedPositionsNoGaps_seq) { Integer ungappedPos = gapped2UngappedMap_targetSeq.get(gappedPosNoGaps); if (ungappedPos.equals(null)) { ungappedPos = -1; } map.put(gapped2UngappedMap_seq.get(gappedPosNoGaps), ungappedPos); } return map; } // end of doMapSeq2Seq method
// Returns the distance between two planets, rounded up to the next highest // integer. This is the number of discrete time steps it takes to get // between the two planets. public int Distance(int sourcePlanet, int destinationPlanet) { Planet source = planets.get(sourcePlanet); Planet destination = planets.get(destinationPlanet); double dx = source.X() - destination.X(); double dy = source.Y() - destination.Y(); return (int) Math.ceil(Math.sqrt(dx * dx + dy * dy)); }
public static List<Pair<String, String[]>> sqlList(List<Integer> input, int maxArgs) { List<Pair<String, String[]>> ops = new ArrayList<Pair<String, String[]>>(); // figure out how many iterations we'll need int numIterations = (int) Math.ceil(((double) input.size()) / maxArgs); for (int currentRound = 0; currentRound < numIterations; ++currentRound) { int startPoint = currentRound * maxArgs; int lastIndex = Math.min((currentRound + 1) * maxArgs, input.size()); String ret = "("; for (int i = startPoint; i < lastIndex; ++i) { ret += "?" + ","; } String[] array = new String[lastIndex - startPoint]; int count = 0; for (int i = startPoint; i < lastIndex; ++i) { array[count++] = String.valueOf(input.get(i)); } ops.add(new Pair<String, String[]>(ret.substring(0, ret.length() - 1) + ")", array)); } return ops; }
/** Applies mutation in the new poblation */ public void mutate() { int posiciones, i, j; double m; posiciones = n_genes * long_poblacion; if (prob_mutacion > 0) while (Mu_next < posiciones) { /* Se determina el cromosoma y el gen que corresponden a la posicion que se va a mutar */ i = Mu_next / n_genes; j = Mu_next % n_genes; /* Se efectua la mutacion sobre ese gen */ poblacion[i].mutate(j); /* Se marca el cromosoma mutado para su posterior evaluacion */ poblacion[i].setEvaluated(false); /* Se calcula la siguiente posicion a mutar */ if (prob_mutacion < 1) { m = Randomize.Rand(); Mu_next += Math.ceil(Math.log(m) / Math.log(1.0 - prob_mutacion)); } else Mu_next += 1; } Mu_next -= posiciones; }
private void populatePosPreferences() { for (Question q : posPref.keySet()) { if (adversaryType == AdversaryType.INNER) { int filled = (int) Math.ceil((double) posPref.get(q).length / 2.0) - 1; int pieces = 2 * (int) Math.pow(2, filled) - 1; for (int i = 0; i <= filled; i++) { double prob = ((double) 1 + i) / (double) pieces; posPref.get(q)[i] = prob; int j = posPref.get(q).length - i - 1; if (posPref.get(q)[j] == UNSET) posPref.get(q)[j] = prob; else posPref.get(q)[j] += prob; } } else { for (int optionPos = 0; optionPos < posPref.get(q).length; optionPos++) { switch (adversaryType) { case UNIFORM: posPref.get(q)[optionPos] = (1.0 / (double) posPref.get(q).length); break; case FIRST: if (optionPos == 0) posPref.get(q)[optionPos] = 1.0; else posPref.get(q)[optionPos] = 0.0; break; case LAST: if (optionPos == posPref.get(q).length - 1) posPref.get(q)[optionPos] = 1.0; else posPref.get(q)[optionPos] = 0.0; break; default: break; } } } } }
private void updateEstimatedCompactionsByTasks(List<List<SSTableReader>> tasks) { int n = 0; for (List<SSTableReader> bucket : tasks) { if (bucket.size() >= cfs.getMinimumCompactionThreshold()) n += Math.ceil((double) bucket.size() / cfs.getMaximumCompactionThreshold()); } estimatedRemainingTasks = n; }
static int distance(int[] pointA, int[] pointB) { return (int) Math.ceil( // Math.sqrt( // (Math.pow(pointB[0] - pointA[0], 2) // + Math.pow(pointB[1] - pointA[1], 2)))); }
public int minkeys() { // if node is the root, minkey is 1 if (getParent() == null) { return 1; } else { return (int) (Math.ceil(degree / 2.0) - 1); } }
/** * Sets image to be processed. * * @param xsize width of image * @param ysize height of image * @param buf pixel data * @param rect the bounding rectangle defines the region of the image to be recognized. A * rectangle of zero dimension or <code>null</code> indicates the whole image. * @param bpp bits per pixel, represents the bit depth of the image, with 1 for binary bitmap, 8 * for gray, and 24 for color RGB. */ private void setImage(int xsize, int ysize, ByteBuffer buf, Rectangle rect, int bpp) { int bytespp = bpp / 8; int bytespl = (int) Math.ceil(xsize * bpp / 8.0); api.TessBaseAPISetImage(handle, buf, xsize, ysize, bytespp, bytespl); if (rect != null && !rect.isEmpty()) { api.TessBaseAPISetRectangle(handle, rect.x, rect.y, rect.width, rect.height); } }
/** * Compute the bitfield byte array from the isComplete BitSet * * @return byte[] */ public byte[] getBitField() { int l = (int) Math.ceil((double) this.nbPieces / 8.0); byte[] bitfield = new byte[l]; for (int i = 0; i < this.nbPieces; i++) if (this.isComplete.get(i)) { bitfield[i / 8] |= 1 << (7 - i % 8); } return bitfield; }
protected ArrayList<SquareZone> createSquaresGrid( int UTMZone, String hemisphere, Sector UTMZoneSector, double minEasting, double maxEasting, double minNorthing, double maxNorthing) { ArrayList<SquareZone> squares = new ArrayList<SquareZone>(); double startEasting = Math.floor(minEasting / ONEHT) * ONEHT; double startNorthing = Math.floor(minNorthing / ONEHT) * ONEHT; int cols = (int) Math.ceil((maxEasting - startEasting) / ONEHT); int rows = (int) Math.ceil((maxNorthing - startNorthing) / ONEHT); SquareZone[][] squaresArray = new SquareZone[rows][cols]; int col = 0; for (double easting = startEasting; easting < maxEasting; easting += ONEHT) { int row = 0; for (double northing = startNorthing; northing < maxNorthing; northing += ONEHT) { SquareZone sz = new SquareZone(UTMZone, hemisphere, UTMZoneSector, easting, northing, ONEHT); if (sz.boundingSector != null && !sz.isOutsideGridZone()) { squares.add(sz); squaresArray[row][col] = sz; } row++; } col++; } // Keep track of neighbors for (col = 0; col < cols; col++) { for (int row = 0; row < rows; row++) { SquareZone sz = squaresArray[row][col]; if (sz != null) { sz.setNorthNeighbor(row + 1 < rows ? squaresArray[row + 1][col] : null); sz.setEastNeighbor(col + 1 < cols ? squaresArray[row][col + 1] : null); } } } return squares; }
/** * @param container - the container for which planned min number of instances is requested * @param approvedContainers - the containers approved for deployment for the specified pu * @param pu - the processing unit * @return the planned minimum number of instances for the specified container */ public static int getPlannedMaximumNumberOfInstancesForContainer( GridServiceContainer container, GridServiceContainer[] approvedContainers, ProcessingUnit pu) { int max = 0; if (Arrays.asList(approvedContainers).contains(container)) { max = (int) Math.ceil(getAverageNumberOfInstancesPerContainer(approvedContainers, pu)); } return max; }
String visualize() { int ubDepth = (int) Math.ceil(Math.log(size) / Math.log(m_order)) * elemHeight; int ubWidth = size * elemWidth; java.io.StringWriter sw = new java.io.StringWriter(); java.io.PrintWriter pw = new java.io.PrintWriter(sw); pw.println("<html><head></head><body>"); visualize(root, pw, 0, 0, ubWidth, ubDepth); pw.println("</body></html>"); pw.flush(); return sw.toString(); }
public static void main(String[] args) { double delta = 0.0001; double sigma = 0.005; double P_e = 0.01; int i = (int) Math.ceil( (0.5 * (Math.log((sigma * sigma) / (delta * delta * P_e)) / Math.log(2.0))) + 1); i = Math.min(i, 63); // request not more that 63 bits i = Math.max(0, i); // request no less than 0 bits }
public static void main(String[] args) throws IOException { br = new BufferedReader(new InputStreamReader(System.in)); out = new PrintWriter(new OutputStreamWriter(System.out)); // br = new BufferedReader(new FileReader("in.txt")); // out = new PrintWriter(new FileWriter("out.txt")); l = readInt(); x = readInt(); out.println((int) Math.ceil(round(l * x / (1.0 / (1.0 - 1.0 / x))))); out.close(); }
public RTree(boolean linear, int m_order, int dimension) throws DimensionalException, FileNotFoundException { this.linear = linear; M_order = m_order; m_order = (int) Math.ceil(M_order / 2.0); this.dimension = dimension; root = makeRoot(true); setNumOfPartitions(0); setNumOfNodes(1); visitedNodes = BigInteger.ZERO; file = new RandomAccessFile("archivo.bin", "rw"); }
/** * Creates a new LRU cache. * * @param cacheSize the maximum number of entries that will be kept in this cache. */ public LRUCache(int cacheSize) { this.cacheSize = cacheSize; int hashTableCapacity = (int) Math.ceil(cacheSize / hashTableLoadFactor) + 1; map = new LinkedHashMap<K, V>(hashTableCapacity, hashTableLoadFactor, true) { // (an anonymous inner class) private static final long serialVersionUID = 1; @Override protected boolean removeEldestEntry(Map.Entry<K, V> eldest) { return size() > LRUCache.this.cacheSize; } }; }
/** * This is a callback to be executed before resampleSingle is executed. The bilinear interpolation * coefficients are computed here and vary with the single, active slice. The offset into the * intermediate image is also computed since this also varies with the active slice. If encoding * of transparent data has occurred in the renderer constructor, then the current slice of the * encoding table is initialized for use by resampleSingle. */ protected void beforeResampleSingle() { // compute the 0-direction index ranges and weighting factors float fMin = (m_afShear[0] * m_iSlice) + m_afOffset[0]; m_afA[0] = fMin - (float) Math.floor(fMin); m_afB[0] = 1.0f - m_afA[0]; int iMin0 = (int) Math.ceil(fMin); // compute the 0-direction index ranges and weighting factors fMin = (m_afShear[1] * m_iSlice) + m_afOffset[1]; m_afA[1] = fMin - (float) Math.floor(fMin); m_afB[1] = 1.0f - m_afA[1]; int iMin1 = (int) Math.ceil(fMin); // offset into intermediate image of rendered voxel data m_iInterOffset = iMin0 + (m_iInterBound * iMin1); if (m_bDoEncodeSkip) { m_aasSliceEncode = m_aaasVolumeEncode[m_iSlice]; } }
/** * Is this read poorly modelled by all of the alleles in this map? * * <p>A read is poorly modeled when it's likelihood is below what would be expected for a read * originating from one of the alleles given the maxErrorRatePerBase of the reads in general. * * <p>This function makes a number of key assumptions. First, that the likelihoods reflect the * total likelihood of the read. In other words, that the read would be fully explained by one of * the alleles. This means that the allele should be something like the full haplotype from which * the read might originate. * * <p>It further assumes that each error in the read occurs with likelihood of -3 (Q30 confidence * per base). So a read with a 10% error rate with Q30 bases that's 100 bp long we'd expect to see * 10 real Q30 errors even against the true haplotype. So for this read to be well modelled by at * least one allele we'd expect a likelihood to be >= 10 * -3. * * @param read the read we want to evaluate * @param log10Likelihoods a list of the log10 likelihoods of the read against a set of * haplotypes. * @param maxErrorRatePerBase the maximum error rate we'd expect for this read per base, in real * space. So 0.01 means a 1% error rate * @return true if none of the log10 likelihoods imply that the read truly originated from one of * the haplotypes */ protected boolean readIsPoorlyModelled( final GATKSAMRecord read, final Collection<Double> log10Likelihoods, final double maxErrorRatePerBase) { final double maxErrorsForRead = Math.min(2.0, Math.ceil(read.getReadLength() * maxErrorRatePerBase)); final double log10QualPerBase = -4.0; final double log10MaxLikelihoodForTrueAllele = maxErrorsForRead * log10QualPerBase; for (final double log10Likelihood : log10Likelihoods) if (log10Likelihood >= log10MaxLikelihoodForTrueAllele) return false; return true; }
/** @param dis - Data input stream to be used to fetch header data. */ public Header(DataInputStream dis) { // We are allocating 1 bit per slot, storing data for 32 slots in one integer. int numHeaderInts = (int) Math.ceil((float) numSlots / 32); header = new int[numHeaderInts]; try { for (int index = 0; index < numHeaderInts; ++index) { header[index] = dis.readInt(); } } catch (IOException e) { e.printStackTrace(); } }
private void initState() { int numNodes = state.getNodeMap().size(); int numSlots = numNodes * slotsPerNode; this.legend = new Legend(null, null, 0); int latSides = (int) Math.ceil(Math.pow(numNodes, 1 / 3.0)); log.info("Lattice of " + numNodes + " requires " + latSides + " per side"); this.lattice = new Lattice<Lattice<Void>>(latSides); List<String> nodeNames = new ArrayList<String>(); List<Lattice<Void>> nodeLatti = new ArrayList<Lattice<Void>>(); for (GridNode node : state.getNodeMap().values()) { Lattice<Void> nodeLattice = new Lattice<Void>(nodeLatSides); List<String> slotNames = new ArrayList<String>(); List<Void> slots = new ArrayList<Void>(); for (int i = 0; i < slotsPerNode; i++) { slotNames.add("" + i); slots.add(null); } nodeLattice.addItems(slotNames, slots); nodeNames.add(node.getShortName()); nodeLatti.add(nodeLattice); // log.warn("Adding lattice for node "+node.getShortName()); } lattice.addItems(nodeNames, nodeLatti); // Initialize actors from the grid state for (GridNode node : state.getNodeMap().values()) { int s = 0; for (GridJob job : node.getSlots()) { if (job == null) continue; String slotName = s + ""; JobActor jobActor = createJobActor(job); jobActor.pos = getLatticePos(node.getShortName(), slotName); log.info( "Starting job {} on slot: {}", job.getFullJobId(), node.getShortName() + "#" + slotName); addJobActor(job.getFullJobId(), jobActor); s++; } } }
// http://jira.dotmarketing.net/browse/DOTCMS-2178 public static String getsize(File fileName) { String finalVal; long filesize = fileName.length(); BigDecimal size = new BigDecimal(filesize); BigDecimal byteVal = null; BigDecimal changedByteVal = null; finalVal = ""; if (filesize <= 0) { finalVal = ""; } else if (filesize < MEGA_BYTE) { byteVal = new BigDecimal(KILO_BYTE); if (size != null) { changedByteVal = size.divide(byteVal, MathContext.UNLIMITED); finalVal = Long.toString(Math.round(Math.ceil(changedByteVal.doubleValue()))) + " KB"; } } else if (filesize < GIGA_BYTE) { byteVal = new BigDecimal(MEGA_BYTE); if (size != null) { changedByteVal = size.divide(byteVal, MathContext.UNLIMITED); finalVal = Long.toString(Math.round(Math.ceil(changedByteVal.doubleValue()))) + " MB"; } } else if (filesize < TERA_BYTE) { byteVal = new BigDecimal(GIGA_BYTE); if (size != null) { changedByteVal = size.divide(byteVal, MathContext.UNLIMITED); finalVal = Long.toString(Math.round(Math.ceil(changedByteVal.doubleValue()))) + " GB"; } } else { byteVal = new BigDecimal(TERA_BYTE); if (size != null) { changedByteVal = size.divide(byteVal, MathContext.UNLIMITED); finalVal = Long.toString(Math.round(Math.ceil(changedByteVal.doubleValue()))) + " TB"; } } return finalVal; }
/** * Generates a synthetic network for provided vertices in the given graphh such that the provided * expected number of communities are generated with the specified expected number of edges. * * @param graph * @param vertices * @param expectedNumCommunities * @param expectedNumEdges * @return The actual number of edges generated. May be different from the expected number. */ public int generate( Graph graph, Iterable<Vertex> vertices, int expectedNumCommunities, int expectedNumEdges) { if (communitySize == null) throw new IllegalStateException("Need to initialize community size distribution"); if (edgeDegree == null) throw new IllegalStateException("Need to initialize degree distribution"); int numVertices = SizableIterable.sizeOf(vertices); Iterator<Vertex> iter = vertices.iterator(); ArrayList<ArrayList<Vertex>> communities = new ArrayList<ArrayList<Vertex>>(expectedNumCommunities); Distribution communityDist = communitySize.initialize(expectedNumCommunities, numVertices); while (iter.hasNext()) { int nextSize = communityDist.nextValue(random); ArrayList<Vertex> community = new ArrayList<Vertex>(nextSize); for (int i = 0; i < nextSize && iter.hasNext(); i++) { community.add(iter.next()); } if (!community.isEmpty()) communities.add(community); } double inCommunityPercentage = 1.0 - crossCommunityPercentage; Distribution degreeDist = edgeDegree.initialize(numVertices, expectedNumEdges); if (crossCommunityPercentage > 0 && communities.size() < 2) throw new IllegalArgumentException("Cannot have cross links with only one community"); int addedEdges = 0; // System.out.println("Generating links on communities: "+communities.size()); for (ArrayList<Vertex> community : communities) { for (Vertex v : community) { int degree = degreeDist.nextValue(random); degree = Math.min(degree, (int) Math.ceil((community.size() - 1) / inCommunityPercentage) - 1); Set<Vertex> inlinks = new HashSet<Vertex>(); for (int i = 0; i < degree; i++) { Vertex selected = null; if (random.nextDouble() < crossCommunityPercentage || (community.size() - 1 <= inlinks.size())) { // Cross community ArrayList<Vertex> othercomm = null; while (othercomm == null) { othercomm = communities.get(random.nextInt(communities.size())); if (othercomm.equals(community)) othercomm = null; } selected = othercomm.get(random.nextInt(othercomm.size())); } else { // In community while (selected == null) { selected = community.get(random.nextInt(community.size())); if (v.equals(selected) || inlinks.contains(selected)) selected = null; } inlinks.add(selected); } addEdge(graph, v, selected); addedEdges++; } } } return addedEdges; }