public void turlRetrievalFailed( String surl, String reason, String remoteRequestId, String remoteFileId) { synchronized (remoteSurlToFileReqIds) { Collection<Long> fileRequestSet = remoteSurlToFileReqIds.get(surl); if (fileRequestSet == null || fileRequestSet.isEmpty()) { LOG.error("turlArrived for unknown SURL = " + surl); return; } for (long id : fileRequestSet) { CopyFileRequest cfr = getFileRequest(id); try { String type = isSourceSrm() && !isSourceLocal() ? "source" : "destination"; String error = "retrieval of " + type + " TURL failed with error " + reason; LOG.error(error); cfr.setState(State.FAILED, error); } catch (IllegalStateTransition ist) { LOG.error("Illegal State Transition : " + ist.getMessage()); } cfr.saveJob(); remoteSurlToFileReqIds.remove(surl, id); } } remoteFileRequestDone(surl, remoteRequestId, remoteFileId); }
/** * get a task editor extension for a specific task attribute * * @param taskRepository * @param taskAttribute * @return the extension, or null if there is none * @see #getTaskEditorExtension(TaskRepository); * @since 3.11 */ public static AbstractTaskEditorExtension getTaskEditorExtension( TaskRepository taskRepository, TaskAttribute taskAttribute) { init(); String input = taskAttribute.getMetaData().getMediaType(); if (input != null) { try { MediaType media = MediaType.parse(input); Multimap<String, String> parameters = media.parameters(); if (parameters.containsKey(MARKUP_KEY)) { Iterator<String> iter = parameters.get(MARKUP_KEY).iterator(); String markup = iter.next(); Iterator<String> baseMarkupIterator = parameters.get(BASE_MARKUP_KEY).iterator(); String baseMarkup = (baseMarkupIterator.hasNext() ? baseMarkupIterator.next() : ""); // $NON-NLS-1$ SortedSet<RegisteredTaskEditorExtension> extensions = getTaskEditorExtensions(); for (RegisteredTaskEditorExtension extension : extensions) { if (markup.equals(extension.getName()) || baseMarkup.equals(extension.getName())) { return extension.getExtension(); } } } } catch (IllegalArgumentException e) { StatusHandler.log( new Status( IStatus.ERROR, TasksUiPlugin.ID_PLUGIN, String.format( "Unable to parse markup type for attribute %s", taskAttribute.toString()), e)); //$NON-NLS-1$ } } return getTaskEditorExtension(taskRepository); }
@Override public void finish() { for (String brand : brandUriToSeriesUrisMap.keySet()) { if (brandUriToSeriesUrisMap.get(brand).size() == 1) { String series = Iterables.getOnlyElement(brandUriToSeriesUrisMap.get(brand)); if (seriesUriToEpisodeUrisMap.get(series).size() == 1) { String episode = Iterables.getOnlyElement(seriesUriToEpisodeUrisMap.get(series)); if (contentUriToTitleMap.get(brand).equals(contentUriToTitleMap.get(series))) { if (contentUriToTitleMap.get(brand).equals(contentUriToTitleMap.get(episode))) { brandUriToTypeMap.put(brand, BrandType.STAND_ALONE_EPISODE); } else { brandUriToTypeMap.put(brand, BrandType.TOP_LEVEL_SERIES); } } else { brandUriToTypeMap.put(brand, BrandType.BRAND_SERIES_EPISODE); } } else { if (contentUriToTitleMap.get(brand).equals(contentUriToTitleMap.get(series))) { brandUriToTypeMap.put(brand, BrandType.TOP_LEVEL_SERIES); } else { brandUriToTypeMap.put(brand, BrandType.BRAND_SERIES_EPISODE); } } } else { brandUriToTypeMap.put(brand, BrandType.BRAND_SERIES_EPISODE); } } }
boolean areAbstractSuccessors( AbstractState pElement, CFAEdge pCfaEdge, Collection<? extends AbstractState> pSuccessors, ProofChecker wrappedProofChecker) throws CPATransferException, InterruptedException { ARGState element = (ARGState) pElement; assert Iterables.elementsEqual(element.getChildren(), pSuccessors); AbstractState wrappedState = element.getWrappedState(); Multimap<CFAEdge, AbstractState> wrappedSuccessors = HashMultimap.create(); for (AbstractState absElement : pSuccessors) { ARGState successorElem = (ARGState) absElement; wrappedSuccessors.put(element.getEdgeToChild(successorElem), successorElem.getWrappedState()); } if (pCfaEdge != null) { return wrappedProofChecker.areAbstractSuccessors( wrappedState, pCfaEdge, wrappedSuccessors.get(pCfaEdge)); } CFANode loc = AbstractStates.extractLocation(element); for (CFAEdge edge : leavingEdges(loc)) { if (!wrappedProofChecker.areAbstractSuccessors( wrappedState, edge, wrappedSuccessors.get(edge))) { return false; } } return true; }
@AddCuries @Override public Response apply(ContainerRequestContext context) { logger.fine("Serving dynamic request"); Multimap<String, Object> paramMap = MultivaluedMapUtils.merge(context.getUriInfo()); paramMap = resolveCuries(paramMap); try (Transaction tx = graphDb.beginTx()) { long start = System.currentTimeMillis(); start = System.currentTimeMillis(); Result result = cypherUtil.execute(config.getQuery(), paramMap); logger.fine((System.currentTimeMillis() - start) + " to execute query"); start = System.currentTimeMillis(); TinkerGraph graph = TinkerGraphUtil.resultToGraph(result); logger.fine((System.currentTimeMillis() - start) + " to convert to graph"); start = System.currentTimeMillis(); for (String key : aspectMap.keySet()) { if ("true".equals(getFirst(paramMap.get(key), "false"))) { aspectMap.get(key).invoke(graph); } } if (paramMap.containsKey("project")) { @SuppressWarnings("unchecked") Collection<String> projection = (Collection<String>) (Collection<?>) paramMap.get("project"); TinkerGraphUtil.project(graph, projection); } ArrayPropertyTransformer.transform(graph); tx.success(); return Response.ok(graph).cacheControl(config.getCacheControl()).build(); } }
private void applyStart(GridJob job) { String fullJobId = job.getFullJobId(); Collection<JobActor> actors = jobActorMap.get(fullJobId); if (actors.isEmpty()) { JobActor jobActor = createJobActor(job); addJobActor(fullJobId, jobActor); actors = jobActorMap.get(fullJobId); } if (job.getNode() == null) { log.warn("No node for job being started: {}", fullJobId); return; } log.debug("Starting job {} on {}", fullJobId, job.getNode().getShortName()); String nodeName = job.getNode().getShortName(); if (actors.size() > 1) { log.warn("More than one actor for job being started: " + fullJobId); } JobActor jobActor = actors.iterator().next(); int i = 0; GridJob[] nodeJobs = job.getNode().getSlots(); for (int s = 0; s < nodeJobs.length; s++) { GridJob nodeJob = nodeJobs[s]; if (nodeJob == null) continue; if (!nodeJob.getFullJobId().equals(fullJobId)) continue; if (i > 0) { jobActor = cloneJobActor(fullJobId); } PVector endPos = getLatticePos(nodeName, s + ""); // TODO: random outside location in direction of vector from 0,0,0 PVector startPos = new PVector(1000, 1000, 1000); jobActor.pos = startPos; if (tweenChanges) { // scale duration to the distance that needs to be traveled float distance = jobActor.pos.dist(endPos); float duration = (DURATION_JOB_START * distance / DISTANCE_JOB_START) * 0.6f; Tween tween = new Tween("start_job_" + fullJobId + "#" + i, getTweenDuration(duration)) .addPVector(jobActor.pos, endPos) .call(jobActor, "jobStarted") .setEasing(Tween.SINE_OUT) .noAutoUpdate(); jobActor.tweens.add(tween); } else { jobActor.pos.set(endPos); } i++; } }
public void testGetAllEntities() throws IOException { File tmpDir = generateCouchApp(); couchAppUtil.generateFilter(tmpDir, "my_filter"); couchAppUtil.generateFilter(tmpDir, "my_second_filter"); couchAppUtil.generateList(tmpDir, "my_list"); couchAppUtil.generateShow(tmpDir, "my_show"); couchAppUtil.generateUpdate(tmpDir, "my_update"); couchAppUtil.generateView(tmpDir, "my_view"); Multimap<String, String> entries = couchAppUtil.getAllEntities(tmpDir); Collection<String> filters = entries.get(ICouchAppUtil.FOLDER_FILTERS); assertEquals(2, filters.size()); assertTrue(filters.contains("my_filter")); assertTrue(filters.contains("my_second_filter")); Collection<String> lists = entries.get(ICouchAppUtil.FOLDER_LISTS); assertEquals(1, lists.size()); assertTrue(lists.contains("my_list")); Collection<String> shows = entries.get(ICouchAppUtil.FOLDER_SHOWS); assertEquals(1, shows.size()); assertTrue(shows.contains("my_show")); Collection<String> updates = entries.get(ICouchAppUtil.FOLDER_UPDATES); assertEquals(1, updates.size()); assertTrue(updates.contains("my_update")); Collection<String> views = entries.get(ICouchAppUtil.FOLDER_VIEWS); assertEquals(2, views.size()); assertTrue(views.contains("my_view")); assertTrue(views.contains("recent-items")); }
public Collection<String> getAllIdentifiers(String accession) throws IOException, MissingLocationException { String primary = getPrimaryIdentifier(accession); if (childMap.get(primary).size() == 0) { System.out.println(primary); } return childMap.get(primary); }
private BoundRequestBuilder getBuilderWithHeaderAndQuery( final String verb, final String url, @Nullable final String username, @Nullable final String password, final Multimap<String, String> options) { BoundRequestBuilder builder; if (verb.equals("GET")) { builder = httpClient.prepareGet(url); } else if (verb.equals("POST")) { builder = httpClient.preparePost(url); } else if (verb.equals("PUT")) { builder = httpClient.preparePut(url); } else if (verb.equals("DELETE")) { builder = httpClient.prepareDelete(url); } else if (verb.equals("HEAD")) { builder = httpClient.prepareHead(url); } else if (verb.equals("OPTIONS")) { builder = httpClient.prepareOptions(url); } else { throw new IllegalArgumentException("Unrecognized verb: " + verb); } if (username != null && password != null) { final Realm realm = new RealmBuilder().setPrincipal(username).setPassword(password).build(); builder.setRealm(realm); } final Collection<String> acceptHeaders = options.removeAll(HTTP_HEADER_ACCEPT); final String acceptHeader; if (!acceptHeaders.isEmpty()) { acceptHeader = CSV_JOINER.join(acceptHeaders); } else { acceptHeader = ACCEPT_JSON; } builder.addHeader(HTTP_HEADER_ACCEPT, acceptHeader); String contentTypeHeader = getUniqueValue(options, HTTP_HEADER_CONTENT_TYPE); if (contentTypeHeader == null) { contentTypeHeader = CONTENT_TYPE_JSON; } else { options.removeAll(HTTP_HEADER_CONTENT_TYPE); } builder.addHeader(HTTP_HEADER_CONTENT_TYPE, contentTypeHeader); builder.setBodyEncoding("UTF-8"); for (final String key : options.keySet()) { if (options.get(key) != null) { for (final String value : options.get(key)) { builder.addQueryParam(key, value); } } } return builder; }
@Test public void testPrefixAndDelimiterUrlEncodingQueryString() { ListBucketOptions options = new ListBucketOptions(); options.withPrefix("/test").delimiter("/"); Multimap<String, String> map = options.buildQueryParameters(); assertEquals(map.size(), 2); assertEquals(map.get("prefix"), ImmutableList.of("/test")); assertEquals(map.get("delimiter"), ImmutableList.of("/")); }
@Test public void testTwoOptionQueryString() { ListBucketOptions options = new ListBucketOptions(); options.withPrefix("test").maxResults(1); Multimap<String, String> map = options.buildQueryParameters(); assertEquals(map.size(), 2); assertEquals(map.get("prefix"), ImmutableList.of("test")); assertEquals(map.get("max-keys"), ImmutableList.of("1")); }
public Collection<MissingRange> getRangeCheckCausesByFieldPath(String id) { Collection<MissingRange> result = new ArrayList<MissingRange>(); result.addAll(gapsByFieldId.get(id)); result.addAll(missingNumberPatternsByFieldId.get(id)); return result; }
@Test public void testTwoOptionQueryString() { ListContainerOptions options = new ListContainerOptions(); options.withPrefix("test").maxResults(1); Multimap<String, String> map = options.buildQueryParameters(); assertEquals(map.size(), 2); assertEquals(map.get("prefix"), Collections.singletonList("test")); assertEquals(map.get("limit"), Collections.singletonList("1")); }
@Test public void testPrefixAndPathUrlEncodingQueryString() { ListContainerOptions options = new ListContainerOptions(); options.withPrefix("/cloudfiles/test").underPath("/"); Multimap<String, String> map = options.buildQueryParameters(); assertEquals(map.size(), 2); assertEquals(map.get("prefix"), Collections.singletonList("/cloudfiles/test")); assertEquals(map.get("path"), Collections.singletonList("/")); }
/** * ************************************************************************************************************ * Add properties like adjectives to the given noun. * * @param noun * @return */ private String addProperties(String noun) { if (entityProperties.isEmpty() || entityProperties.get(noun).isEmpty()) { return noun; } String retVal = noun; for (SumoProcessEntityProperty prop : entityProperties.get(noun)) { retVal = prop.getSurfaceFormForNoun(retVal, kb); } return retVal; }
private void handlePacket(Packet250CustomPayload packet, NetworkManager network, Player player) { String channel = packet.channel; for (IPacketHandler handler : Iterables.concat( universalPacketHandlers.get(channel), player instanceof EntityPlayerMP ? serverPacketHandlers.get(channel) : clientPacketHandlers.get(channel))) { handler.onPacketData(network, packet, player); } }
@Test public void whenCreateMultimap_thenCreated() { final Multimap<String, String> multimap = ArrayListMultimap.create(); multimap.put("fruit", "apple"); multimap.put("fruit", "banana"); multimap.put("pet", "cat"); multimap.put("pet", "dog"); assertThat(multimap.get("fruit"), containsInAnyOrder("apple", "banana")); assertThat(multimap.get("pet"), containsInAnyOrder("cat", "dog")); }
@Test public void testExampleCode() { try { RpmSpecParser parser = RpmSpecParser.createParser("src/test/resources/specs/p4bugzilla.spec"); Multimap<String, String> properties = parser.parse(); System.out.printf("RPM name: %s %n", properties.get("name")); System.out.printf( "RPM version: %s-%s %n", properties.get("version"), properties.get("release")); } catch (FileNotFoundException e) { // ... } }
public TextAnalysis analyse(String text) throws ModelException { String[] words = semanticService.getWords(text); semanticService.lowercaseWords(words); List<String> uniqueWords = Strings.asList(semanticService.getUniqueWords(words)); WordListPerspectiveQuery query = new WordListPerspectiveQuery().withWords(uniqueWords); List<WordListPerspective> list = modelService.list(query); List<String> unknownWords = Lists.newArrayList(); Set<String> knownWords = new HashSet<String>(); Multimap<String, String> wordsByLanguage = HashMultimap.create(); for (WordListPerspective perspective : list) { String word = perspective.getText().toLowerCase(); knownWords.add(word); if (perspective.getLanguage() != null) { wordsByLanguage.put(perspective.getLanguage(), word); } } Multiset<String> languages = wordsByLanguage.keys(); String language = null; for (String lang : languages) { if (language == null || (wordsByLanguage.get(lang).size() > wordsByLanguage.get(language).size())) { language = lang; } } for (String word : uniqueWords) { if (!knownWords.contains(word)) { unknownWords.add(word); } } Locale possibleLocale = Locale.ENGLISH; String[] sentences = semanticService.getSentences(text, possibleLocale); TextAnalysis analysis = new TextAnalysis(); analysis.setLanguage(language); analysis.setSentences(Strings.asList(sentences)); analysis.setWordsByLanguage(wordsByLanguage.asMap()); analysis.setUniqueWords(uniqueWords); analysis.setKnownWords(list); analysis.setUnknownWords(unknownWords); return analysis; }
@Override public void processOutput(Multimap<String, String> outputRecord) throws IOException { String articleTitle = csvField(outputRecord.get("articleTitle"), null); String articleUrl = csvField(outputRecord.get("articleUrl"), null); String companyName = csvField(outputRecord.get("companyName"), "n/a"); String companyWebsite = csvField(outputRecord.get("companyWebsite"), "n/a"); if (articleTitle != null && articleUrl != null) { synchronized (this) { writer.append( companyName + "," + companyWebsite + "," + articleTitle + "," + articleUrl + "\n"); } } }
public void rebuildMaps() { participantMap.clear(); reactionMap.clear(); for (MetabolicReaction rxn : this) { for (MetabolicParticipant m : rxn.getReactants()) { participantMap.get(m.getMolecule().getIdentifier()).add(rxn); } for (MetabolicParticipant m : rxn.getProducts()) { participantMap.get(m.getMolecule().getIdentifier()).add(rxn); } reactionMap.put(rxn.getIdentifier(), rxn); } }
@Override public boolean remove(MetabolicReaction rxn) { // remove links to metabolites for (MetabolicParticipant p : rxn.getParticipants()) { Metabolite m = p.getMolecule(); participantMap.get(m.getIdentifier()).remove(rxn); if (participantMap.get(m.getIdentifier()).isEmpty()) { participantMap.removeAll(m.getIdentifier()); } } reactionMap.remove(rxn.getIdentifier(), rxn); return super.remove(rxn); }
@Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { Path relativePath = workingDir.relativize(file); final HashCode fileHash = hashPath(file, hashFunction.newHasher()); if (!seen.get(relativePath).contains(fileHash)) { seen.get(relativePath).add(fileHash); // TODO(bazel-team): Change to a symlink when the AOSP merge code supports symlinks. Files.copy(file, newRoot.resolve(relativePath)); // Files.createSymbolicLink(newRoot.resolve(workingDir.relativize(file)), file); } else { LOGGER.warning(String.format("Duplicated file %s [%s]", relativePath, file)); } return super.visitFile(file, attrs); }
@Override public AbstractArrayDefinition createDefinition( @Nullable IDefinitionScope definitionScope, String fieldName, BitBuffer input) throws CTFException { IDefinition lenDef = null; if (definitionScope != null) { lenDef = definitionScope.lookupDefinition(getLengthName()); } if (lenDef == null) { throw new CTFException("Sequence length field not found"); // $NON-NLS-1$ } if (!(lenDef instanceof IntegerDefinition)) { throw new CTFException("Sequence length field not integer"); // $NON-NLS-1$ } IntegerDefinition lengthDefinition = (IntegerDefinition) lenDef; if (lengthDefinition.getDeclaration().isSigned()) { throw new CTFException("Sequence length must not be signed"); // $NON-NLS-1$ } long length = lengthDefinition.getValue(); if ((length > Integer.MAX_VALUE) || (!input.canRead((int) length * fElemType.getMaximumSize()))) { throw new CTFException("Sequence length too long " + length); // $NON-NLS-1$ } if (isAlignedBytes()) { // Don't create "useless" definitions byte[] data = new byte[(int) length]; input.get(data); return new ByteArrayDefinition(this, definitionScope, fieldName, data); } Collection<String> collection = fPaths.get(fieldName); while (collection.size() < length) { fPaths.put(fieldName, fieldName + '[' + collection.size() + ']'); } List<String> paths = (List<String>) fPaths.get(fieldName); Builder<Definition> definitions = new ImmutableList.Builder<>(); for (int i = 0; i < length; i++) { /* We should not have inserted any null values */ String elemName = checkNotNull(paths.get(i)); definitions.add(fElemType.createDefinition(definitionScope, elemName, input)); } List<Definition> list = checkNotNull(definitions.build()); return new ArrayDefinition(this, definitionScope, fieldName, list); }
/** * Method returns list of checks * * @param ast the node to notify for * @param astState state of AST. * @return list of visitors */ private Collection<Check> getListOfChecks(DetailAST ast, AstState astState) { Collection<Check> visitors = null; final String tokenType = Utils.getTokenName(ast.getType()); if (astState == AstState.WITH_COMMENTS) { if (tokenToCommentChecks.containsKey(tokenType)) { visitors = tokenToCommentChecks.get(tokenType); } } else { if (tokenToOrdinaryChecks.containsKey(tokenType)) { visitors = tokenToOrdinaryChecks.get(tokenType); } } return visitors; }
@Override protected void prepare() { super.prepare(); try { classMap = infoService.getMarkClassMap(currentUser); root = new DefaultTreeNode("Root", null); log.info("Start-------------------------Create Tree------------------------------Start"); for (String key : classMap.keySet()) { TreeNode node = new DefaultTreeNode(key, root); Collection<Tclass> entry = classMap.get(key); Tclass[] tclasses = new Tclass[entry.size()]; entry.toArray(tclasses); Set<ClassType> typeSet = new HashSet<>(); for (int i = 0; i < tclasses.length; i++) { if (!typeSet.contains(tclasses[i].getType())) { typeSet.add(tclasses[i].getType()); TreeNode childNode = new DefaultTreeNode(tclasses[i].getType().getLabel(), node); } } } log.info("End-------------------------Create Tree------------------------------End"); } catch (Exception ex) { log.error("教师的授课班级加载出错", ex); } items = new ArrayList<>(); classesForChoose = new ArrayList<>(); }
private void passStacktraceParams( final Multimap<ImFunction, ImFunctionCall> calls, Set<ImFunction> affectedFuncs) { // pass the stacktrace parameter at all cals for (ImFunction f : affectedFuncs) { for (ImFunctionCall call : calls.get(f)) { ImFunction caller = call.getNearestFunc(); ImExpr stExpr; if (isMainOrConfig(caller)) { stExpr = str(" " + f.getName()); } else { ImVar stackTraceVar = getStackTraceVar(caller); WPos source = call.attrTrace().attrSource(); String callPos; if (source.getFile().startsWith("<")) { callPos = ""; } else { callPos = "\n " + source.printShort(); } stExpr = JassIm.ImOperatorCall( WurstOperator.PLUS, JassIm.ImExprs(str(callPos), JassIm.ImVarAccess(stackTraceVar))); } call.getArguments().add(stExpr); } } }
/** * Get a list of all trips that pass through a stop during a single ServiceDate. Useful when * creating complete stop timetables for a single day. * * @param stop Stop object to perform the search for * @param serviceDate Return all departures for the specified date * @return */ public List<StopTimesInPattern> getStopTimesForStop(Stop stop, ServiceDate serviceDate) { List<StopTimesInPattern> ret = new ArrayList<>(); TimetableSnapshot snapshot = null; if (graph.timetableSnapshotSource != null) { snapshot = graph.timetableSnapshotSource.getTimetableSnapshot(); } Collection<TripPattern> patterns = patternsForStop.get(stop); for (TripPattern pattern : patterns) { StopTimesInPattern stopTimes = new StopTimesInPattern(pattern); Timetable tt; if (snapshot != null) { tt = snapshot.resolve(pattern, serviceDate); } else { tt = pattern.scheduledTimetable; } ServiceDay sd = new ServiceDay(graph, serviceDate, calendarService, pattern.route.getAgency().getId()); int sidx = 0; for (Stop currStop : pattern.stopPattern.stops) { if (currStop == stop) { for (TripTimes t : tt.tripTimes) { if (!sd.serviceRunning(t.serviceCode)) continue; stopTimes.times.add(new TripTimeShort(t, sidx, stop, sd)); } } sidx++; } ret.add(stopTimes); } return ret; }
/** Dynamically generate the set of Routes passing though a Stop on demand. */ public Set<Route> routesForStop(Stop stop) { Set<Route> routes = Sets.newHashSet(); for (TripPattern p : patternsForStop.get(stop)) { routes.add(p.route); } return routes; }
protected void updateSuperProjects(ReviewDb db, Set<Branch.NameKey> updatedBranches) throws SubmoduleException { try { // These (repo/branch) will be updated later with all the given // individual submodule subscriptions Multimap<Branch.NameKey, SubmoduleSubscription> targets = HashMultimap.create(); for (Branch.NameKey updatedBranch : updatedBranches) { for (SubmoduleSubscription sub : db.submoduleSubscriptions().bySubmodule(updatedBranch)) { targets.put(sub.getSuperProject(), sub); } } updatedSubscribers.addAll(updatedBranches); // Update subscribers. for (Branch.NameKey dest : targets.keySet()) { try { if (!updatedSubscribers.add(dest)) { log.error("Possible circular subscription involving " + dest); } else { updateGitlinks(db, dest, targets.get(dest)); } } catch (SubmoduleException e) { log.warn("Cannot update gitlinks for " + dest, e); } } } catch (OrmException e) { logAndThrowSubmoduleException("Cannot read subscription records", e); } }