/** * reads all methods by the action-annotations for building agent-actions * * @param p_class class * @param p_root root class * @return stream of all methods with inheritance */ private static Stream<Method> methods(final Class<?> p_class, final Class<?> p_root) { final Pair<Boolean, IAgentAction.EAccess> l_classannotation = CCommon.isActionClass(p_class); if (!l_classannotation.getLeft()) return p_class.getSuperclass() == null ? Stream.of() : methods(p_class.getSuperclass(), p_root); final Predicate<Method> l_filter = IAgentAction.EAccess.WHITELIST.equals(l_classannotation.getRight()) ? i -> !CCommon.isActionFiltered(i, p_root) : i -> CCommon.isActionFiltered(i, p_root); return Stream.concat( Arrays.stream(p_class.getDeclaredMethods()) .parallel() .map( i -> { i.setAccessible(true); return i; }) .filter(i -> !Modifier.isAbstract(i.getModifiers())) .filter(i -> !Modifier.isInterface(i.getModifiers())) .filter(i -> !Modifier.isNative(i.getModifiers())) .filter(i -> !Modifier.isStatic(i.getModifiers())) .filter(l_filter), methods(p_class.getSuperclass(), p_root)); }
public static <T extends Comparable<T>> Range<T> parse(String value, Function<String, T> parser) { Validate.notNull(parser, "Parser is required"); if (StringUtils.isBlank(value) || StringUtils.equals(SEPARATOR, value)) { return Ranges.all(); } else if (!StringUtils.contains(value, SEPARATOR)) { T element = parser.apply(value); return Ranges.atMost(element); } else { String lower = StringUtils.substringBefore(value, SEPARATOR); String upper = StringUtils.substringAfter(value, SEPARATOR); if (StringUtils.isBlank(lower)) { // ..n Pair<T, BoundType> boundary = parseUpperBoundary(upper, parser); return Ranges.upTo(boundary.getLeft(), boundary.getRight()); } else if (StringUtils.isBlank(upper)) { // n.. Pair<T, BoundType> boundary = parseLowerBoundary(lower, parser); return Ranges.downTo(boundary.getLeft(), boundary.getRight()); } else { // n..m Pair<T, BoundType> down = parseLowerBoundary(lower, parser); Pair<T, BoundType> up = parseUpperBoundary(upper, parser); return Ranges.range(down.getLeft(), down.getRight(), up.getLeft(), up.getRight()); } } }
@Override public void run(int runs) { try { Connection connection = dataSource.getConnection(); connection.setAutoCommit(false); long min = Long.MAX_VALUE; Pair<Long, Long> total = Pair.of(0l, 0l); long max = Long.MIN_VALUE; long start, elapsed = 0; for (int i = 0; i < runs; i++) { Map<String, String> values = ImmutableMap.of("email", RandomEmail.generateEmailAddress()); PreparedStatement insert = new Insert("emails", values).getStatement(connection); start = System.nanoTime(); insert.execute(); elapsed = (System.nanoTime() - start) / 1000; min = Math.min(elapsed, min); max = Math.max(elapsed, max); total = Pair.of(total.getLeft() + elapsed, total.getRight() + 1); } connection.commit(); connection.close(); log.info( "N. of Insert with deferred commit executed: {} Total Excution Time: {} us, Mean per query: {} us, Max: {} us, Min: {} us", total.getRight(), total.getLeft(), total.getLeft() / total.getRight(), max, min); } catch (Exception e) { log.error("An error occured while executing Insert Benchmark using manual commit", e); } }
@SuppressWarnings("unchecked") @Override protected final void completeInitializationOp(Object[] os) throws HiveException { if (os.length != 0) { Pair<MapJoinTableContainer[], MapJoinTableContainerSerDe[]> pair = (Pair<MapJoinTableContainer[], MapJoinTableContainerSerDe[]>) os[0]; boolean spilled = false; for (MapJoinTableContainer container : pair.getLeft()) { if (container != null) { spilled = spilled || container.hasSpill(); } } if (!loadCalled && spilled) { // we can't use the cached table because it has spilled. loadHashTable(getExecContext(), MapredContext.get()); } else { // let's use the table from the cache. mapJoinTables = pair.getLeft(); mapJoinTableSerdes = pair.getRight(); } hashTblInitedOnce = true; } if (this.getExecContext() != null) { // reset exec context so that initialization of the map operator happens // properly this.getExecContext().setLastInputPath(null); this.getExecContext().setCurrentInputPath(null); } }
@Override protected void transformStringContent( String reference, StringBuilder content, ImporterMetadata metadata, boolean parsed, int sectionIndex) { int flags = Pattern.DOTALL; if (!caseSensitive) { flags = flags | Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE; } for (Pair<String, String> pair : stripPairs) { List<Pair<Integer, Integer>> matches = new ArrayList<Pair<Integer, Integer>>(); Pattern leftPattern = Pattern.compile(pair.getLeft(), flags); Matcher leftMatch = leftPattern.matcher(content); while (leftMatch.find()) { Pattern rightPattern = Pattern.compile(pair.getRight(), flags); Matcher rightMatch = rightPattern.matcher(content); if (rightMatch.find(leftMatch.end())) { if (inclusive) { matches.add(new ImmutablePair<Integer, Integer>(leftMatch.start(), rightMatch.end())); } else { matches.add(new ImmutablePair<Integer, Integer>(leftMatch.end(), rightMatch.start())); } } else { break; } } for (int i = matches.size() - 1; i >= 0; i--) { Pair<Integer, Integer> matchPair = matches.get(i); content.delete(matchPair.getLeft(), matchPair.getRight()); } } }
@Test public void testGetMinAndMaxCharacter() { Pair<Character, Character> result = RangeSplitUtil.getMinAndMaxCharacter("abc%^&"); Assert.assertEquals('%', result.getLeft().charValue()); Assert.assertEquals('c', result.getRight().charValue()); result = RangeSplitUtil.getMinAndMaxCharacter("\tAabcZx"); Assert.assertEquals('\t', result.getLeft().charValue()); Assert.assertEquals('x', result.getRight().charValue()); }
private void installDownstreamRules(ForwardingObjective fwd) { List<Pair<Instruction, Instruction>> vlanOps = vlanOps(fwd, L2ModificationInstruction.L2SubType.VLAN_POP); if (vlanOps == null) { return; } Instruction output = fetchOutput(fwd, "downstream"); if (output == null) { return; } Pair<Instruction, Instruction> popAndRewrite = vlanOps.remove(0); TrafficSelector selector = fwd.selector(); Criterion outerVlan = selector.getCriterion(Criterion.Type.VLAN_VID); Criterion innerVlan = selector.getCriterion(Criterion.Type.INNER_VLAN_VID); Criterion inport = selector.getCriterion(Criterion.Type.IN_PORT); if (outerVlan == null || innerVlan == null || inport == null) { log.error("Forwarding objective is underspecified: {}", fwd); fail(fwd, ObjectiveError.BADPARAMS); return; } Criterion innerVid = Criteria.matchVlanId(((VlanIdCriterion) innerVlan).vlanId()); FlowRule.Builder outer = DefaultFlowRule.builder() .forDevice(deviceId) .fromApp(appId) .makePermanent() .withPriority(fwd.priority()) .withSelector(buildSelector(inport, outerVlan)) .withTreatment( buildTreatment(popAndRewrite.getLeft(), Instructions.transition(QQ_TABLE))); FlowRule.Builder inner = DefaultFlowRule.builder() .forDevice(deviceId) .fromApp(appId) .forTable(QQ_TABLE) .makePermanent() .withPriority(fwd.priority()) .withSelector(buildSelector(inport, innerVid)) .withTreatment(buildTreatment(popAndRewrite.getLeft(), output)); applyRules(fwd, inner, outer); }
public List<Pair<Integer, Integer>> solve( List<Pair<Integer, Integer>> ranges, Pair<Integer, Integer> targetRange) { List<Pair<Integer, Integer>> result = new ArrayList<>(); int target, i, maxTarget, maxIndex; boolean match; if (targetRange.getLeft() == targetRange.getRight()) { ranges.forEach( range -> { if (range.getLeft() <= targetRange.getLeft() && range.getRight() >= targetRange.getRight() && result.isEmpty()) { result.add(range); } }); return result; } Collections.sort(ranges, new RangeComparator()); target = targetRange.getLeft(); maxIndex = 0; while (target < targetRange.getRight()) { match = false; maxTarget = target; for (i = maxIndex; i < ranges.size(); i++) { final Pair<Integer, Integer> currentRange = ranges.get(i); if (currentRange.getLeft() <= target && currentRange.getRight() >= maxTarget) { match = true; maxTarget = currentRange.getRight(); maxIndex = i; } else if (currentRange.getLeft() > target) { break; } } if (!match) { result.clear(); break; } result.add(Pair.of(ranges.get(maxIndex).getLeft(), ranges.get(maxIndex).getRight())); target = maxTarget; maxIndex++; } return result; }
/** * Returns the count of squares reachable by the first player * * @param grid * @param startingLoc * @return */ public static boolean getConnectedSquareCount(GridChar grid, int startingLoc) { // int oldCount = (1+getConnectedSquareCountOld(grid,startingLoc) ); // The boolean means it is player 1, the connected square co Set<Pair<Integer, Boolean>> visitedNodes = Sets.newHashSet(); LinkedList<Pair<Integer, Boolean>> toVisit = new LinkedList<>(); toVisit.add(new ImmutablePair<>(startingLoc, true)); while (!toVisit.isEmpty()) { Pair<Integer, Boolean> loc = toVisit.poll(); if (visitedNodes.contains(loc)) continue; visitedNodes.add(loc); for (Direction dir : Direction.values()) { Integer childIdx = grid.getIndex(loc.getLeft(), dir); if (childIdx == null) continue; char sq = grid.getEntry(childIdx); if (sq == '#' || sq == 'K' || sq == 'T') continue; toVisit.add(new ImmutablePair<>(childIdx, !loc.getRight())); } } Set<Integer> fpPoints = Sets.newHashSet(); Set<Integer> spPoints = Sets.newHashSet(); for (Pair<Integer, Boolean> p : visitedNodes) { if (p.getRight()) fpPoints.add(p.getLeft()); else spPoints.add(p.getLeft()); } Set<Integer> shared = Sets.intersection(fpPoints, spPoints); Set<Integer> union = Sets.union(fpPoints, spPoints); if (!shared.isEmpty()) { return (union.size() + 1) % 2 == 0; } else { // log.debug("size {} size {}", fpPoints.size(), spPoints.size()); return spPoints.size() < fpPoints.size(); } // return (countFP + countSP + 1) % 2 == 0; }
@Override public void call(MessageRunner runner) { ItemPattern pattern = runner.<ItemPattern>getValue("pattern"); WeightedItemList list = OrbAcquirableItems.idList; int size = list.size(), damageRemoved = 0; for (Iterator<WeightedItem> iter = list.iterator(); iter.hasNext(); ) { Pair<Integer, Boolean> info = iter.next().runBlacklistPattern(pattern); if (info.getRight()) iter.remove(); damageRemoved += info.getLeft(); } size = size - list.size(); if (size == 0 && damageRemoved == 0) MessageLogger.logWarn("Did not find any items to remove."); else if (size == 0) MessageLogger.logOk("Removed $0 damage value(s) from items.", damageRemoved); else MessageLogger.logOk( "Removed $0 item(s) and $1 damage value(s) in total.", size, damageRemoved); if (list.size() == 0) MessageLogger.logWarn("No items left in the list, falling back to generic chest loot."); }
protected void enumerate(CandidateHostInfo hostInfo) { log.debug( "Enumerating canditates hostId [{}] pools {}", hostInfo.getHostId(), hostInfo.getPoolIds()); Long candidateHostId = this.hosts ? hostInfo.getHostId() : null; Map<Pair<Class<?>, Long>, Object> cache = new HashMap<Pair<Class<?>, Long>, Object>(); if (volumeIds.size() == 0) { pushCandidate( new AllocationCandidate( objectManager, cache, candidateHostId, hostInfo.getHostUuid(), hostInfo.getUsedPorts(), Collections.<Long, Long>emptyMap())); } for (List<Pair<Long, Long>> pairs : traverse(volumeIds, hostInfo.getPoolIds())) { Map<Long, Long> volumeToPool = new HashMap<Long, Long>(); for (Pair<Long, Long> pair : pairs) { volumeToPool.put(pair.getLeft(), pair.getRight()); } pushCandidate( new AllocationCandidate( objectManager, cache, candidateHostId, hostInfo.getHostUuid(), hostInfo.getUsedPorts(), volumeToPool)); } }
public static ItemStack identifyQuality(ItemStack stack) { if (stack == null) return null; Item item = stack.getItem(); if (item instanceof ItemGemstone) { if (((ItemGemstone) item).getQuality(stack) != null) return stack; } @SuppressWarnings("unchecked") List<Pair<ItemStack, String>> gems = Lists.newArrayList( Pair.of(ElementsOfPower.gemRuby, "gemRuby"), Pair.of(ElementsOfPower.gemSapphire, "gemSapphire"), Pair.of(ElementsOfPower.gemCitrine, "gemCitrine"), Pair.of(ElementsOfPower.gemAgate, "gemAgate"), Pair.of(ElementsOfPower.gemQuartz, "gemQuartz"), Pair.of(ElementsOfPower.gemSerendibite, "gemSerendibite"), Pair.of(ElementsOfPower.gemEmerald, "gemEmerald"), Pair.of(ElementsOfPower.gemAmethyst, "gemAmethyst"), Pair.of(ElementsOfPower.gemDiamond, "gemDiamond")); int[] ids = OreDictionary.getOreIDs(stack); Set<String> names = Sets.newHashSet(); for (int i : ids) { names.add(OreDictionary.getOreName(i)); } for (Pair<ItemStack, String> target : gems) { if (names.contains(target.getRight())) { return setRandomQualityVariant(target.getLeft().copy()); } } return stack; }
public static int getReadCoordinateForReferenceCoordinate( final int alignmentStart, final Cigar cigar, final int refCoord, final ClippingTail tail, final boolean allowGoalNotReached) { final Pair<Integer, Boolean> result = getReadCoordinateForReferenceCoordinate( alignmentStart, cigar, refCoord, allowGoalNotReached); int readCoord = result.getLeft(); // Corner case one: clipping the right tail and falls on deletion, move to the next // read coordinate. It is not a problem for the left tail because the default answer // from getReadCoordinateForReferenceCoordinate is to give the previous read coordinate. if (result.getRight() && tail == ClippingTail.RIGHT_TAIL) { readCoord++; } // clipping the left tail and first base is insertion, go to the next read coordinate // with the same reference coordinate. Advance to the next cigar element, or to the // end of the read if there is no next element. final CigarElement firstElementIsInsertion = readStartsWithInsertion(cigar); if (readCoord == 0 && tail == ClippingTail.LEFT_TAIL && firstElementIsInsertion != null) { readCoord = Math.min(firstElementIsInsertion.getLength(), cigar.getReadLength() - 1); } return readCoord; }
@Override public void doGet(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { res.setContentType("application/json"); Pair<DateTime, DateTime> fromTo = fromTo(req); orla.writeHourlyPercentiles(res.getOutputStream(), fromTo.getLeft(), fromTo.getRight()); }
/** * Gets a {@link Pair} of Boolean, String that represents the block comment status and the current * string * * @param blockComment whether there is an active block comment * @param string the current string * @return the pair of boolean and string */ public Pair<Boolean, String> removeComments(boolean blockComment, String string) { if (blockComment) { int endBlock = indexOf(string, "*/", true); if (endBlock >= 0) { return removeComments(false, string.substring(endBlock + 2)); } return Pair.of(true, ""); } else { int startBlock = indexOf(string, "/*", true); int inlineBlock = indexOf(string, "//", true); if (startBlock < 0 && inlineBlock < 0) { return Pair.of(false, string); } if (startBlock >= 0 && (inlineBlock < 0 || startBlock < inlineBlock)) { Pair<Boolean, String> booleanStringPair = removeComments(true, string.substring(startBlock + 2)); return Pair.of( booleanStringPair.getLeft(), string.substring(0, startBlock) + booleanStringPair.getRight()); } if (inlineBlock >= 0 && (startBlock < 0 || inlineBlock < startBlock)) { return Pair.of(false, string.substring(0, inlineBlock)); } } return null; }
/** * Given a frontA with point [2,3] and a frontB with point [1,2], the value of the * setCoverage(frontA, frontB) == 0 and setCoverage(frontB, frontA) == 1 */ @Test public void shouldExecuteReturnTheRightValueIfTheFrontsContainOnePointWhichIsNotTheSame() { int numberOfPoints = 1; int numberOfDimensions = 2; Front frontA = new ArrayFront(numberOfPoints, numberOfDimensions); Front frontB = new ArrayFront(numberOfPoints, numberOfDimensions); Point point1 = new ArrayPoint(numberOfDimensions); point1.setDimensionValue(0, 2.0); point1.setDimensionValue(1, 3.0); Point point2 = new ArrayPoint(numberOfDimensions); point2.setDimensionValue(0, 1.0); point2.setDimensionValue(1, 2.0); frontA.setPoint(0, point1); frontB.setPoint(0, point2); Pair<Double, Double> result = setCoverage.evaluate( new ImmutablePair( FrontUtils.convertFrontToSolutionList(frontA), FrontUtils.convertFrontToSolutionList(frontB))); assertEquals(0.0, result.getLeft(), EPSILON); assertEquals(1.0, result.getRight(), EPSILON); }
/** * Extract the text from a HTML based string. This is similar to what HTML.fromHtml(...) does, but * this method also removes the embedded images instead of replacing them by a small rectangular * representation character. * * @param html * @return */ public static String extractText(CharSequence html) { String result = html.toString(); // recognize images in textview HTML contents if (html instanceof Spanned) { Spanned text = (Spanned) html; Object[] styles = text.getSpans(0, text.length(), Object.class); ArrayList<Pair<Integer, Integer>> removals = new ArrayList<Pair<Integer, Integer>>(); for (Object style : styles) { if (style instanceof ImageSpan) { int start = text.getSpanStart(style); int end = text.getSpanEnd(style); removals.add(Pair.of(start, end)); } } // sort reversed and delete image spans Collections.sort( removals, new Comparator<Pair<Integer, Integer>>() { @Override public int compare(Pair<Integer, Integer> lhs, Pair<Integer, Integer> rhs) { return rhs.getRight().compareTo(lhs.getRight()); } }); result = text.toString(); for (Pair<Integer, Integer> removal : removals) { result = result.substring(0, removal.getLeft()) + result.substring(removal.getRight()); } } // some line breaks are still in the text, source is unknown return StringUtils.replace(result, "<br />", "\n").trim(); }
private SpillableSetImpl<V> getHelper(@NotNull K key) { SpillableSetImpl<V> spillableSet = cache.get(key); if (spillableSet == null) { long keyTime = -1; Pair<Integer, V> meta; if (timeExtractor != null) { keyTime = timeExtractor.getTime(key); } meta = map.get(key); if (meta == null) { return null; } Slice keyPrefix = keyValueSerdeManager.serializeDataKey(key, false); if (timeExtractor != null) { spillableSet = new SpillableSetImpl<>( keyPrefix.toByteArray(), store, valueSerde, new FixedTimeExtractor(keyTime)); } else { spillableSet = new SpillableSetImpl<>(bucket, keyPrefix.toByteArray(), store, valueSerde); } spillableSet.setSize(meta.getLeft()); spillableSet.setHead(meta.getRight()); spillableSet.setup(context); } cache.put(key, spillableSet); return spillableSet; }
@Override public IFlexibleBakedModel bake( IModelState state, VertexFormat format, Function<ResourceLocation, TextureAtlasSprite> bakedTextureGetter) { IFlexibleBakedModel bakedBase = null; if (base != null) bakedBase = base.bake(state, format, bakedTextureGetter); ImmutableMap.Builder<String, IFlexibleBakedModel> mapBuilder = ImmutableMap.builder(); for (Entry<String, Pair<IModel, IModelState>> entry : parts.entrySet()) { Pair<IModel, IModelState> pair = entry.getValue(); mapBuilder.put( entry.getKey(), pair.getLeft().bake(pair.getRight(), format, bakedTextureGetter)); } if (bakedBase == null && parts.isEmpty()) { FMLLog.log( Level.ERROR, "MultiModel %s is empty (no base model or parts were provided/resolved)", location); IModel missing = ModelLoaderRegistry.getMissingModel(); return missing.bake(missing.getDefaultState(), format, bakedTextureGetter); } return new Baked(location, true, bakedBase, mapBuilder.build()); }
@Override public Pair<? extends IFlexibleBakedModel, Matrix4f> handlePerspective( TransformType cameraTransformType) { if (transforms.isEmpty()) return Pair.of(this, null); Pair<Baked, TRSRTransformation> p = transforms.get(cameraTransformType); return Pair.of(p.getLeft(), p.getRight().getMatrix()); }
@Override public boolean containsKey(Object key) { if (cache.contains((K) key)) { return true; } Pair<Integer, V> meta = map.get((K) key); return meta != null && meta.getLeft() > 0; }
/** * Set the api authorizations by parsing the String representation of the authorizations * * @param throwException true if an exception must be thrown in case of error * @throws ApiSignatureException */ private void fillAllowedApiPatterns(boolean throwException) throws ApiSignatureException { allowedApiPatterns = new ArrayList<Pair<ApiMethod, Pattern>>(); for (Pair<ApiMethod, String> apiAuthorization : parseAuthorization( getApplicationName(), getApiAuthorizationsAsString(), pattern, throwException)) { Pattern pattern = Pattern.compile(apiAuthorization.getRight()); allowedApiPatterns.add(Pair.of(apiAuthorization.getLeft(), pattern)); } }
public <M extends IModel, S extends IModelState> MultiModelState( ImmutableList<Pair<M, S>> states) { ImmutableMap.Builder<MultiModelPart, S> builder = ImmutableMap.builder(); for (int i = 0; i < states.size(); i++) { Pair<M, S> pair = states.get(i); builder.put(new MultiModelPart(pair.getLeft(), i), pair.getRight()); } this.states = builder.build(); }
private TaskCluster[] computeTaskClusters( ActivityCluster ac, JobRun jobRun, Map<ActivityId, ActivityPlan> activityPlanMap) { Set<ActivityId> activities = ac.getActivityMap().keySet(); Map<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> taskConnectivity = computeTaskConnectivity(jobRun, activityPlanMap, activities); TaskCluster[] taskClusters = ac.getActivityClusterGraph().isUseConnectorPolicyForScheduling() ? buildConnectorPolicyAwareTaskClusters(ac, activityPlanMap, taskConnectivity) : buildConnectorPolicyUnawareTaskClusters(ac, activityPlanMap); for (TaskCluster tc : taskClusters) { Set<TaskCluster> tcDependencyTaskClusters = tc.getDependencyTaskClusters(); for (Task ts : tc.getTasks()) { TaskId tid = ts.getTaskId(); List<Pair<TaskId, ConnectorDescriptorId>> cInfoList = taskConnectivity.get(tid); if (cInfoList != null) { for (Pair<TaskId, ConnectorDescriptorId> p : cInfoList) { Task targetTS = activityPlanMap.get(p.getLeft().getActivityId()) .getTasks()[p.getLeft().getPartition()]; TaskCluster targetTC = targetTS.getTaskCluster(); if (targetTC != tc) { ConnectorDescriptorId cdId = p.getRight(); PartitionId pid = new PartitionId( jobRun.getJobId(), cdId, tid.getPartition(), p.getLeft().getPartition()); tc.getProducedPartitions().add(pid); targetTC.getRequiredPartitions().add(pid); partitionProducingTaskClusterMap.put(pid, tc); } } } for (TaskId dTid : ts.getDependencies()) { TaskCluster dTC = getTaskCluster(dTid); dTC.getDependentTaskClusters().add(tc); tcDependencyTaskClusters.add(dTC); } } } return taskClusters; }
@Override public Collection<ResourceLocation> getTextures() { Set<ResourceLocation> deps = Sets.newHashSet(); if (base != null) deps.addAll(base.getTextures()); for (Pair<IModel, IModelState> pair : parts.values()) deps.addAll(pair.getLeft().getTextures()); return deps; }
private BlockchainImpl createBlockchain(Genesis genesis) { IndexedBlockStore blockStore = new IndexedBlockStore(); blockStore.init(new HashMapDB(), new HashMapDB()); Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); ProgramInvokeFactoryImpl programInvokeFactory = new ProgramInvokeFactoryImpl(); EthereumListenerAdapter listener = new EthereumListenerAdapter(); BlockchainImpl blockchain = new BlockchainImpl(blockStore, repository); blockchain.setParentHeaderValidator(new DependentBlockHeaderRuleAdapter()); blockchain.setProgramInvokeFactory(programInvokeFactory); programInvokeFactory.setBlockchain(blockchain); blockchain.byTest = true; PendingStateImpl pendingState = new PendingStateImpl(listener, blockchain); pendingState.init(); pendingState.setBlockchain(blockchain); blockchain.setPendingState(pendingState); Repository track = repository.startTracking(); for (ByteArrayWrapper key : genesis.getPremine().keySet()) { track.createAccount(key.getData()); track.addBalance(key.getData(), genesis.getPremine().get(key).getBalance()); } for (Pair<byte[], BigInteger> acc : initialBallances) { track.createAccount(acc.getLeft()); track.addBalance(acc.getLeft(), acc.getRight()); } track.commit(); blockStore.saveBlock(genesis, genesis.getCumulativeDifficulty(), true); blockchain.setBestBlock(genesis); blockchain.setTotalDifficulty(genesis.getCumulativeDifficulty()); return blockchain; }
/** * Export the model (ABox) in a legacy format, such as GAF or GPAD. * * @param model * @param curieHandler * @param useModuleReasoner * @param format format name or null for default * @return modelContent * @throws IOException * @throws OWLOntologyCreationException */ public String exportModelLegacy( ModelContainer model, CurieHandler curieHandler, boolean useModuleReasoner, String format) throws IOException, OWLOntologyCreationException { final OWLOntology aBox = model.getAboxOntology(); OWLReasoner r; if (useModuleReasoner) { r = model.getModuleReasoner(); } else { r = model.getReasoner(); } LegoAllIndividualToGeneAnnotationTranslator translator = new LegoAllIndividualToGeneAnnotationTranslator( new OWLGraphWrapper(model.getTboxOntology()), curieHandler, r, ecoMapper); Pair<GafDocument, BioentityDocument> pair = translator.translate(model.getModelId().toString(), aBox, null); ByteArrayOutputStream outputStream = null; try { outputStream = new ByteArrayOutputStream(); if (format == null || "gaf".equalsIgnoreCase(format)) { // GAF GafWriter writer = new GafWriter(); try { writer.setStream(new PrintStream(outputStream)); GafDocument gafdoc = pair.getLeft(); writer.write(gafdoc); } finally { writer.close(); } } else if ("gpad".equalsIgnoreCase(format)) { // GPAD version 1.2 GpadWriter writer = new GpadWriter(new PrintWriter(outputStream), 1.2); writer.write(pair.getLeft()); } else { throw new IOException("Unknown legacy format: " + format); } return outputStream.toString(); } finally { IOUtils.closeQuietly(outputStream); } }
@SuppressWarnings("unchecked") public <TE extends TileEntity> TE getTinkerTE(Class<TE> clazz) { for (Pair<BlockPos, IBlockState> pair : tinkerStationBlocks) { TileEntity te = this.world.getTileEntity(pair.getLeft()); if (te != null && clazz.isAssignableFrom(te.getClass())) { return (TE) te; } } return null; }
/* Converts a relative Url into the URL that is usable by the Lunex RequestProcessor. */ private String resolveUrl(final Document document) { try { String resourceSpecificUri = null; switch (snapType) { case Create: resourceSpecificUri = ServiceURIInfo.CR_URI_LIST.get(resource); break; case Read: resourceSpecificUri = ServiceURIInfo.RR_URI_LIST.get(resource); break; case Delete: resourceSpecificUri = ServiceURIInfo.DR_URI_LIST.get(resource); break; case Update: resourceSpecificUri = ServiceURIInfo.UR_URI_LIST.get(resource); break; } if (queryParams != null) { CharSequence source, target; for (Pair<String, ExpressionProperty> paramPair : queryParams) { source = new StringBuilder() .append(OPENTAG) .append(paramPair.getLeft()) .append(CLOSETAG) .toString(); target = paramPair.getRight().eval(document).toString(); if (target == null) { target = StringUtils.EMPTY; } resourceSpecificUri = resourceSpecificUri.replace(source, target); } } return new StringBuilder() .append(HTTP) .append(COLON) .append(DOUBLE_SLASH) .append(IPAddress) .append(resourceSpecificUri) .toString(); } catch (Exception e) { String msg = String.format( INVALID_URI, new StringBuilder() .append(HTTP) .append(COLON) .append(DOUBLE_SLASH) .append(IPAddress) .toString()); throw new ExecutionException(e, msg).withReason(msg).withResolution(INVALID_URI_RESOLUTION); } }
@Override public Map<Long, String> validate(List<K> listOfLoadedObjects) throws IOException { Map<Long, String> invalidRows = new HashMap<Long, String>(); if (listOfLoadedObjects != null) { for (K loadedObject : listOfLoadedObjects) { Pair<Boolean, String> result = loadedObject.validateAndComplete(); if (!result.getLeft()) { invalidRows.put(loadedObject.getSourceRowNumber(), result.getRight()); } } } return invalidRows; }