/** Extracts document and cluster lists before serialization. */ @Persist private void beforeSerialization() { /* * See http://issues.carrot2.org/browse/CARROT-693; this monitor does not save us * in multi-threaded environment anyway. A better solution would be to prepare * this eagerly in the constructor, but we try to balance overhead and full * correctness here. */ synchronized (this) { query = (String) attributes.get(AttributeNames.QUERY); if (getDocuments() != null) { documents = Lists.newArrayList(getDocuments()); } else { documents = null; } if (getClusters() != null) { clusters = Lists.newArrayList(getClusters()); } else { clusters = null; } otherAttributesForSerialization = MapUtils.asHashMap(SimpleXmlWrappers.wrap(attributes)); otherAttributesForSerialization.remove(AttributeNames.QUERY); otherAttributesForSerialization.remove(AttributeNames.CLUSTERS); otherAttributesForSerialization.remove(AttributeNames.DOCUMENTS); if (otherAttributesForSerialization.isEmpty()) { otherAttributesForSerialization = null; } } }
private static List<String> loadLines(InputStream stream, Charset charset) throws IOException { BufferedReader reader = new BufferedReader(new InputStreamReader(stream, charset)); boolean threw = true; try { List<String> strings = Lists.newArrayList(); String line = reader.readLine(); while (line != null) { strings.add(line); line = reader.readLine(); } threw = false; return strings; } finally { try { reader.close(); } catch (IOException e) { if (!threw) { throw e; // if there was an initial exception, don't hide it } } } }
@Override public void readFields(DataInput input) throws IOException { byte[] tableNameBytes = Bytes.readByteArray(input); PName tableName = new PNameImpl(tableNameBytes); PTableType tableType = PTableType.values()[WritableUtils.readVInt(input)]; long sequenceNumber = WritableUtils.readVLong(input); long timeStamp = input.readLong(); byte[] pkNameBytes = Bytes.readByteArray(input); String pkName = pkNameBytes.length == 0 ? null : Bytes.toString(pkNameBytes); int nColumns = WritableUtils.readVInt(input); List<PColumn> columns = Lists.newArrayListWithExpectedSize(nColumns); for (int i = 0; i < nColumns; i++) { PColumn column = new PColumnImpl(); column.readFields(input); columns.add(column); } Map<String, byte[][]> guidePosts = new HashMap<String, byte[][]>(); int size = WritableUtils.readVInt(input); for (int i = 0; i < size; i++) { String key = WritableUtils.readString(input); int valueSize = WritableUtils.readVInt(input); byte[][] value = new byte[valueSize][]; for (int j = 0; j < valueSize; j++) { value[j] = Bytes.readByteArray(input); } guidePosts.put(key, value); } PTableStats stats = new PTableStatsImpl(guidePosts); init(tableName, tableType, timeStamp, sequenceNumber, pkName, columns, stats); }
/** * 将原始数据切割后装入ThreadInfo,并以Key=WaitID,Value= ThreadInfo实体放入到Multimap<String, ThreadInfo>集合中 * ps:Multimap 类似于Map<key,collection>, key:value-> 1:n * * @param rawDatas * @return */ public Multimap<String, ThreadInfo> getThreadInfo(List<String[]> rawDatas) { Multimap<String, ThreadInfo> w_IdMap = HashMultimap.create(); List<ThreadInfo> threadsList = Lists.newArrayList(); for (String[] rawData : rawDatas) { ThreadInfo threadInfo = new ThreadInfo(); Pattern t_id = Pattern.compile("tid=(0x[\\d\\w]+)"); Pattern t_name = Pattern.compile("\"([\\d\\D]*)\""); Pattern w_Id = Pattern.compile("\\[(0x[\\d\\w]+)\\]"); Matcher tIdMatcher = t_id.matcher(rawData[0]); Matcher nameMatcher = t_name.matcher(rawData[0]); Matcher w_IdMatcher = w_Id.matcher(rawData[0]); if (tIdMatcher.find()) { threadInfo.setThreadId(tIdMatcher.group(1)); } if (nameMatcher.find()) { threadInfo.setThreadName(nameMatcher.group(1)); } if (w_IdMatcher.find()) { threadInfo.setWaitThreadId(w_IdMatcher.group(1)); } threadInfo.setThreadCondition(rawData[1]); w_IdMap.put(threadInfo.getWaitThreadId(), threadInfo); } return w_IdMap; }
/** * 清除掉没有处于waiting on condition状态的线程 * * @param rawDatas * @return */ public List<String[]> remove(List<String[]> rawDatas) { List<String[]> pickedData = Lists.newArrayList(); for (String[] temp : rawDatas) { if (temp[0].lastIndexOf("waiting on condition") > -1) pickedData.add(temp); } return pickedData; }
public static List<NameValuePair> convertAttributesToNameValuePair( Map<String, String> aAttributes) { List<NameValuePair> myNameValuePairs = Lists.newArrayList(); for (Map.Entry<String, String> myEntry : aAttributes.entrySet()) { myNameValuePairs.add(new BasicNameValuePair(myEntry.getKey(), myEntry.getValue())); } return myNameValuePairs; }
@Override public Iterable<DataKey> getSubKeys() { final Tag tag = this.findLastTag(this.path, false); if (!(tag instanceof CompoundTag)) { return (Iterable<DataKey>) Collections.emptyList(); } final List<DataKey> subKeys = (List<DataKey>) Lists.newArrayList(); for (final String name : ((CompoundTag) tag).getValue().keySet()) { subKeys.add(new NBTKey(this.createRelativeKey(name))); } return subKeys; }
@GET @Path("/files") public List<ToDo> files() { List<ToDo> response = Lists.newArrayList(); File[] files = new File("/lib").listFiles(); if (files != null) { for (File file : files) { if (file.isFile()) response.add( new ToDo( ++id, file.getName(), file.isFile() ? file.length() : -1, System.currentTimeMillis())); } } return response; }
protected void scan() { if (configuration.getUrls() == null || configuration.getUrls().isEmpty()) { if (log != null) log.warn("given scan urls are empty. set urls in the configuration"); return; } if (log != null && log.isDebugEnabled()) { log.debug("going to scan these urls:\n" + Joiner.on("\n").join(configuration.getUrls())); } long time = System.currentTimeMillis(); int scannedUrls = 0; ExecutorService executorService = configuration.getExecutorService(); List<Future<?>> futures = Lists.newArrayList(); for (final URL url : configuration.getUrls()) { try { if (executorService != null) { futures.add( executorService.submit( new Runnable() { public void run() { if (log != null && log.isDebugEnabled()) log.debug("[" + Thread.currentThread().toString() + "] scanning " + url); scan(url); } })); } else { scan(url); } scannedUrls++; } catch (ReflectionsException e) { if (log != null && log.isWarnEnabled()) log.warn("could not create Vfs.Dir from url. ignoring the exception and continuing", e); } } // todo use CompletionService if (executorService != null) { for (Future future : futures) { try { future.get(); } catch (Exception e) { throw new RuntimeException(e); } } } time = System.currentTimeMillis() - time; // gracefully shutdown the parallel scanner executor service. if (executorService != null) { executorService.shutdown(); } if (log != null) { int keys = 0; int values = 0; for (String index : store.keySet()) { keys += store.get(index).keySet().size(); values += store.get(index).size(); } log.info( format( "Reflections took %d ms to scan %d urls, producing %d keys and %d values %s", time, scannedUrls, keys, values, executorService != null && executorService instanceof ThreadPoolExecutor ? format( "[using %d cores]", ((ThreadPoolExecutor) executorService).getMaximumPoolSize()) : "")); } }
private void init( PName name, PTableType type, long timeStamp, long sequenceNumber, String pkName, List<PColumn> columns, PTableStats stats) { this.name = name; this.type = type; this.timeStamp = timeStamp; this.sequenceNumber = sequenceNumber; this.columnsByName = ArrayListMultimap.create(columns.size(), 1); this.pkName = pkName; List<PColumn> pkColumns = Lists.newArrayListWithExpectedSize(columns.size() - 1); PColumn[] allColumns = new PColumn[columns.size()]; RowKeySchemaBuilder builder = new RowKeySchemaBuilder(); for (int i = 0; i < allColumns.length; i++) { PColumn column = columns.get(i); allColumns[column.getPosition()] = column; PName familyName = column.getFamilyName(); if (familyName == null) { pkColumns.add(column); builder.addField(column); } columnsByName.put(column.getName().getString(), column); } this.pkColumns = ImmutableList.copyOf(pkColumns); this.rowKeySchema = builder.setMinNullable(pkColumns.size()).build(); this.allColumns = ImmutableList.copyOf(allColumns); // Two pass so that column order in column families matches overall column order // and to ensure that column family order is constant int maxExpectedSize = allColumns.length - pkColumns.size(); // Maintain iteration order so that column families are ordered as they are listed Map<PName, List<PColumn>> familyMap = Maps.newLinkedHashMap(); for (PColumn column : allColumns) { PName familyName = column.getFamilyName(); if (familyName != null) { List<PColumn> columnsInFamily = familyMap.get(familyName); if (columnsInFamily == null) { columnsInFamily = Lists.newArrayListWithExpectedSize(maxExpectedSize); familyMap.put(familyName, columnsInFamily); } columnsInFamily.add(column); } } Iterator<Map.Entry<PName, List<PColumn>>> iterator = familyMap.entrySet().iterator(); PColumnFamily[] families = new PColumnFamily[familyMap.size()]; ImmutableMap.Builder<String, PColumnFamily> familyByString = ImmutableMap.builder(); ImmutableSortedMap.Builder<byte[], PColumnFamily> familyByBytes = ImmutableSortedMap.orderedBy(Bytes.BYTES_COMPARATOR); for (int i = 0; i < families.length; i++) { Map.Entry<PName, List<PColumn>> entry = iterator.next(); PColumnFamily family = new PColumnFamilyImpl(entry.getKey(), entry.getValue()); families[i] = family; familyByString.put(family.getName().getString(), family); familyByBytes.put(family.getName().getBytes(), family); } this.families = ImmutableList.copyOf(families); this.familyByBytes = familyByBytes.build(); this.familyByString = familyByString.build(); this.stats = stats; }