public static void registerObjectsExportedDefinitions( @NotNull VirtualFile key, @NotNull final JsonSchemaExportedDefinitions definitionsObject, @NotNull final JsonSchemaObject object) { String id = object.getId(); if (!StringUtil.isEmptyOrSpaces(id)) { id = id.endsWith("#") ? id.substring(0, id.length() - 1) : id; final BiFunction<String, Map<String, JsonSchemaObject>, Map<String, JsonSchemaObject>> convertor = (s, map) -> { final Map<String, JsonSchemaObject> converted = new HashMap<>(); for (Map.Entry<String, JsonSchemaObject> entry : map.entrySet()) { String key1 = entry.getKey(); key1 = key1.startsWith("/") ? key1.substring(1) : key1; converted.put(s + key1, entry.getValue()); } return converted; }; final HashMap<String, JsonSchemaObject> map = new HashMap<>(); map.put("", object); final Map<String, JsonSchemaObject> definitions = object.getDefinitions(); if (definitions != null && !definitions.isEmpty()) { map.putAll(convertor.apply("#/definitions/", definitions)); } final Map<String, JsonSchemaObject> properties = object.getProperties(); if (properties != null && !properties.isEmpty()) { map.putAll(convertor.apply("#/properties/", properties)); } definitionsObject.register(key, id, map); } }
public List<Map<IndexEntry, Revision>> createCombinations( Map<IndexEntry, Collection<Revision>> moduleBranches) { if (moduleBranches.keySet().size() == 0) return new ArrayList<Map<IndexEntry, Revision>>(); // Get an entry: List<Map<IndexEntry, Revision>> thisLevel = new ArrayList<Map<IndexEntry, Revision>>(); IndexEntry e = moduleBranches.keySet().iterator().next(); for (Revision b : moduleBranches.remove(e)) { Map<IndexEntry, Revision> result = new HashMap<IndexEntry, Revision>(); result.put(e, b); thisLevel.add(result); } List<Map<IndexEntry, Revision>> children = createCombinations(moduleBranches); if (children.size() == 0) return thisLevel; // Merge the two together List<Map<IndexEntry, Revision>> result = new ArrayList<Map<IndexEntry, Revision>>(); for (Map<IndexEntry, Revision> thisLevelEntry : thisLevel) { for (Map<IndexEntry, Revision> childLevelEntry : children) { HashMap<IndexEntry, Revision> r = new HashMap<IndexEntry, Revision>(); r.putAll(thisLevelEntry); r.putAll(childLevelEntry); result.add(r); } } return result; }
private void parseIntent() { Intent intent = getIntent(); receiveUserAdds = intent.getStringArrayExtra(RECEIVE_ADDRESS); HashMap<Long, String> tmpUserMap = (HashMap<Long, String>) intent.getSerializableExtra(RECEIVE_SELECTUSERCIRCLE_NAME); HashMap<String, String> tmpPhoneEmailMap = (HashMap<String, String>) intent.getSerializableExtra(RECEIVE_SELECTPHONEEMAIL_NAME); if (tmpUserMap != null) { mSelectedUserCircleNameMap.clear(); mSelectedUserCircleNameMap.putAll(tmpUserMap); } if (tmpPhoneEmailMap != null) { mSelectedPhoneEmailNameMap.clear(); mSelectedPhoneEmailNameMap.putAll(tmpPhoneEmailMap); } mInviteId = intent.getLongExtra(CircleUtils.CIRCLE_ID, -1); if (mInviteId > 0) { overrideRightTextActionBtn(R.string.qiupu_invite, inviteClickListener); } else { overrideRightTextActionBtn(R.string.label_ok, pickClickListener); } if (intent.getIntExtra(PICK_FROM, -1) == PICK_FROM_COMPOSE) { setHeadTitle(R.string.string_select_user); } else { setHeadTitle(R.string.invite_people_title); } }
Map<String, Object> getValues( Individual ind, OntModel displayOntModel, OntModel assertionModel, Map<String, Object> baseValues) { if (ind == null) return Collections.emptyMap(); /* Figure out what ValueFactories are specified in the display ontology for this individual. */ Set<ValueFactory> valueFactories = new HashSet<ValueFactory>(); displayOntModel.enterCriticalSection(Model.READ); StmtIterator stmts = ind.listProperties(DisplayVocabulary.REQUIRES_VALUES); try { while (stmts.hasNext()) { Statement stmt = stmts.nextStatement(); RDFNode obj = stmt.getObject(); valueFactories.addAll(getValueFactory(obj, displayOntModel)); } } finally { stmts.close(); displayOntModel.leaveCriticalSection(); } /* Get values from the ValueFactories. */ HashMap<String, Object> values = new HashMap<String, Object>(); values.putAll(baseValues); for (ValueFactory vf : valueFactories) { values.putAll(vf.getValues(assertionModel, values)); } return values; }
public static int getDistance(String phrase, String rel, List<DependencyTriple> dtl) { HashMap<String, String> terms = new HashMap<String, String>(); String[] toks = phrase.split(" "); for (String tok : toks) terms.put(tok, tok); int cnt = 0; while (cnt < 5) { if (checkRelation(terms, rel, dtl)) { return cnt; } HashMap<String, String> newterms = new HashMap<String, String>(); Set<String> keyset = terms.keySet(); Iterator<String> iter = keyset.iterator(); while (iter.hasNext()) { String term = iter.next(); String orig = terms.get(term); HashMap<String, String> parents = getParents(term, orig, dtl); HashMap<String, String> children = getChildren(term, orig, dtl); newterms.putAll(parents); newterms.putAll(children); } if (newterms.size() <= 0) return -1; else { terms = newterms; } cnt++; } return -1; }
public HashMap<String, byte[]> digestDependencies(List<File> dependencies) throws IOException { Stopwatch stopwatch = Stopwatch.createStarted(); HashMap<String, byte[]> digest = new HashMap<String, byte[]>(); // scan dependencies backwards to properly deal with duplicate type definitions for (int i = dependencies.size() - 1; i >= 0; i--) { File file = dependencies.get(i); if (file.isFile()) { digest.putAll(digestJar(file)); } else if (file.isDirectory()) { digest.putAll(digestDirectory(file)); } else { // happens with reactor dependencies with empty source folders continue; } } log.debug( "Analyzed {} classpath dependencies ({} ms)", dependencies.size(), stopwatch.elapsed(TimeUnit.MILLISECONDS)); return digest; }
@Override public Double invoke(HashMap<DDVariable, Integer> varValues) { HashMap<DDVariable, Integer> varValuesAll = new HashMap<DDVariable, Integer>(); varValuesAll.putAll(varValues); varValuesAll.putAll(restrictVarValues); return dag.getValue(varValuesAll); }
/** * Updates the entity with the values you previously set using the setter methods, and any * additional specified arguments. The specified arguments take precedent over the values that * were set using the setter methods. * * @param args The arguments to update. */ public void update(Map<String, Object> args) { // Merge cached setters and live args together before updating. HashMap<String, Object> mergedArgs = new HashMap<String, Object>(); mergedArgs.putAll(toUpdate); mergedArgs.putAll(args); service.post(actionPath("edit"), mergedArgs); toUpdate.clear(); invalidate(); }
/** {@inheritDoc} */ @Override public void update(Map<String, Object> args) { // Merge cached setters and live args together before updating. HashMap<String, Object> mergedArgs = new HashMap<String, Object>(); mergedArgs.putAll(toUpdate); mergedArgs.putAll(args); service.post(path + "/settings", mergedArgs); toUpdate.clear(); invalidate(); }
@Override public Map<String, String[]> getParameterMap() { final HashMap<String, String[]> map = new HashMap<>(); final Map<String, String[]> m = fRequest.getParameterMap(); map.putAll(m); map.putAll(fParamOverrides); return map; }
public Map<String, Object> getAllJPQLParameter() { HashMap<String, Object> ret = new HashMap<String, Object>(); ret.putAll(datafileParameter); ret.putAll(datasetParameter); ret.putAll(sampleParameter); ret.putAll(jpqlParameter); return ret; }
/** * returns URIs of all super-properties of @param propertyUri * * @param propertyUri * @param recursive * @return */ public HashMap<String, Label> getSuperProperties(String propertyUri, boolean recursive) { HashMap<String, Label> direct = ontCache.getDirectSuperProperties().get(propertyUri); if (!recursive) return direct; HashMap<String, Label> all = new HashMap<String, Label>(); HashMap<String, Label> indirect = ontCache.getIndirectSuperProperties().get(propertyUri); if (direct != null) all.putAll(direct); if (indirect != null) all.putAll(indirect); return all; }
protected void buildMaps(PegGraph pg) { exceHandlers.addAll(pg.getExceHandlers()); startToThread.putAll(pg.getStartToThread()); startToAllocNodes.putAll(pg.getStartToAllocNodes()); startToBeginNodes.putAll(pg.getStartToBeginNodes()); waitingNodes.putAll(pg.getWaitingNodes()); notifyAll.putAll(pg.getNotifyAll()); canNotBeCompacted.addAll(pg.getCanNotBeCompacted()); synch.addAll(pg.getSynch()); threadNameToStart.putAll(pg.getThreadNameToStart()); specialJoin.addAll(pg.getSpecialJoin()); joinStmtToThread.putAll(pg.getJoinStmtToThread()); threadAllocSites.addAll(pg.getThreadAllocSites()); allocNodeToThread.putAll(pg.getAllocNodeToThread()); }
/** * Submit a request to the server which expects a list of execution items in the response, and * return a single QueuedItemResult parsed from the response. * * @param tempxml xml temp file (or null) * @param otherparams parameters for the request * @param requestPath * @return a single QueuedItemResult * @throws com.dtolabs.rundeck.core.dispatcher.CentralDispatcherException if an error occurs */ private QueuedItemResult submitExecutionRequest( final File tempxml, final HashMap<String, String> otherparams, final String requestPath) throws CentralDispatcherException { final HashMap<String, String> params = new HashMap<String, String>(); if (null != otherparams) { params.putAll(otherparams); } final WebserviceResponse response; try { response = serverService.makeRundeckRequest(requestPath, params, tempxml, null); } catch (MalformedURLException e) { throw new CentralDispatcherServerRequestException("Failed to make request", e); } validateResponse(response); final ArrayList<QueuedItem> list = parseExecutionListResult(response); if (null == list || list.size() < 1) { return QueuedItemResultImpl.failed("Server response contained no success information."); } else { final QueuedItem next = list.iterator().next(); return QueuedItemResultImpl.successful( "Succeeded queueing " + next.getName(), next.getId(), next.getUrl(), next.getName()); } }
public void applyStyle(String tag, HashMap props) { HashMap map = (HashMap) tagMap.get(tag.toLowerCase()); if (map != null) { HashMap temp = new HashMap(map); temp.putAll(props); props.putAll(temp); } String cm = (String) props.get("class"); if (cm == null) return; map = (HashMap) classMap.get(cm.toLowerCase()); if (map == null) return; props.remove("class"); HashMap temp = new HashMap(map); temp.putAll(props); props.putAll(temp); }
protected HashMap<String, Object> buildConfigParams(HostVO host) { HashMap<String, Object> params = new HashMap<String, Object>(host.getDetails().size() + 5); params.putAll(host.getDetails()); params.put("guid", host.getGuid()); params.put("zone", Long.toString(host.getDataCenterId())); if (host.getPodId() != null) { params.put("pod", Long.toString(host.getPodId())); } if (host.getClusterId() != null) { params.put("cluster", Long.toString(host.getClusterId())); String guid = null; ClusterVO cluster = _clusterDao.findById(host.getClusterId()); if (cluster.getGuid() == null) { guid = host.getDetail("pool"); } else { guid = cluster.getGuid(); } if (guid != null && !guid.isEmpty()) { params.put("pool", guid); } } params.put("ipaddress", host.getPrivateIpAddress()); params.put("secondary.storage.vm", "false"); params.put( "max.template.iso.size", _configDao.getValue(Config.MaxTemplateAndIsoSize.toString())); params.put("migratewait", _configDao.getValue(Config.MigrateWait.toString())); return params; }
public static HashMap<String, Integer> cutRanked(HashMap<String, Integer> content) { HashMap<String, Integer> elements = new HashMap<>(); elements.putAll(content); while (elements.size() > 15) { String minimumKey = (String) elements.keySet().toArray()[0]; int minimum = elements.get(minimumKey); for (String string : elements.keySet()) if (elements.get(string) < minimum || (elements.get(string) == minimum && string.compareTo(minimumKey) < 0)) { minimumKey = string; minimum = elements.get(string); } elements.remove(minimumKey); } for (String string : new ArrayList<>(elements.keySet())) if (string != null) if (string.length() > 40) { int value = elements.get(string); elements.remove(string); elements.put(string.substring(0, 40), value); } return elements; }
public final Map<String, Object> a() { HashMap localHashMap = new HashMap(); localHashMap.put("event_name", "PROFILE_ADDRESS_BOOK_PAGE_EXIT"); if (exitEvent != null) { localHashMap.put("exit_event", exitEvent); } if (nonSnapchatterCount != null) { localHashMap.put("non_snapchatter_count", nonSnapchatterCount); } if (nonSnapchatterInviteCount != null) { localHashMap.put("non_snapchatter_invite_count", nonSnapchatterInviteCount); } if (nonSnapchatterInviteInSearchCount != null) { localHashMap.put("non_snapchatter_invite_in_search_count", nonSnapchatterInviteInSearchCount); } if (snapchatterAddCount != null) { localHashMap.put("snapchatter_add_count", snapchatterAddCount); } if (snapchatterAddInSearchCount != null) { localHashMap.put("snapchatter_add_in_search_count", snapchatterAddInSearchCount); } if (snapchatterCount != null) { localHashMap.put("snapchatter_count", snapchatterCount); } localHashMap.putAll(super.a()); return localHashMap; }
// Sort HashMap by value and return top 1000 private static HashMap sortByValues(HashMap map) { List list = new LinkedList(map.entrySet()); // Defined Custom Comparator here Collections.sort( list, new Comparator() { public int compare(Object o1, Object o2) { return ((Comparable) ((Map.Entry) (o2)).getValue()) .compareTo(((Map.Entry) (o1)).getValue()); } }); // Here I am copying the sorted list in HashMap // using LinkedHashMap to preserve the insertion order HashMap sortedHashMap = new LinkedHashMap(); for (Iterator it = list.iterator(); it.hasNext(); ) { Map.Entry entry = (Map.Entry) it.next(); sortedHashMap.put(entry.getKey(), entry.getValue()); } HashMap newresult = new LinkedHashMap<>(); Iterator atr = sortedHashMap.keySet().iterator(); int id = 0; if (sortedHashMap.size() < 1000) newresult.putAll(sortedHashMap); else { while (atr.hasNext() && id < 1000) { id++; String next = atr.next().toString(); newresult.put(next, sortedHashMap.get(next)); } } return newresult; }
@Override public ASTNode transform(org.kframework.kil.MapBuiltin node) throws TransformerException { assert node.isLHSView() : "unsupported map " + node; HashMap<Term, Term> entries = new HashMap<Term, Term>(node.elements().size()); for (Map.Entry<org.kframework.kil.Term, org.kframework.kil.Term> entry : node.elements().entrySet()) { Term key = (Term) entry.getKey().accept(this); Term value = (Term) entry.getValue().accept(this); entries.put(key, value); } if (node.hasViewBase()) { Term base = (Term) node.viewBase().accept(this); if (base instanceof MapUpdate) { MapUpdate mapUpdate = (MapUpdate) base; /* TODO(AndreiS): check key uniqueness */ entries.putAll(mapUpdate.updateMap()); return new MapUpdate(mapUpdate.map(), mapUpdate.removeSet(), entries); } else { /* base instanceof Variable */ return new BuiltinMap(entries, (Variable) base); } } else { return new BuiltinMap(entries); } }
/** * 下载验证码图片并保存到手机内部空间<br /> * 下载完后会发送一条广播给《一键签到》主程序,让其显示验证码输入框 * @param captchaUrl * @param ua * @param cookies * @param siteName * @param user * @param reason * @return */ private static boolean downloadCaptchaPic(String captchaUrl, String ua, HashMap<String, String> cookies, String siteName, String user, String reason) { Response res; boolean isSucceed = false; for(int i=0;i<RETRY_TIMES;i++) { try { res = Jsoup.connect(captchaUrl).cookies(cookies).userAgent(ua).timeout(TIME_OUT).ignoreContentType(true).referrer(captchaUrl).method(Method.GET).execute(); cookies.putAll(res.cookies()); try { deleteCaptchaFile();//删除遗留的验证码 saveCaptchaToFile(res.bodyAsBytes());//保存验证码到文件 sendShowCaptchaDialogBC(siteName, user, reason);//给《一键签到》主程序发送广播,让其显示验证码 isSucceed = true; pauseThread();//用线程锁暂停签到线程,如果按下了验证码输入窗口的“确定”或“取消”,程序会对签到线程进行解锁 break;//跳出重试 } catch (Exception e) { //保存验证码到文件失败 isSucceed = false; e.printStackTrace(); } } catch (IOException e) { //拉取验证码失败 isSucceed = false; e.printStackTrace(); } } return isSucceed; }
/** * Draw a complex graph to an image file in the requested format. * * @param baseFilePathNoExt the img base file name, no file extension * @param drawBlackAndWhite whether to draw in grayscale only * @param formats a list of img formats to write * @param cg the complex graph to draw * @param molInfoForChains info mapping chain IDs (like "A") to their macromolecule (MOL_ID in PDB * file, e.g., "1"). Give an empty one if you dont know * @return a list of file names that were written to disk, (as a map of formats to file names) */ public static HashMap<DrawTools.IMAGEFORMAT, String> drawComplexGraph( String baseFilePathNoExt, Boolean drawBlackAndWhite, DrawTools.IMAGEFORMAT[] formats, ComplexGraph cg, Map<String, String> molInfoForChains) { DrawResult drawRes = ComplexGraph.drawChainLevelComplexGraphG2D(drawBlackAndWhite, cg, molInfoForChains); // System.out.println("drawProteinGraph: Basefilepath is '" + baseFilePathNoExt + "'."); String svgFilePath = baseFilePathNoExt + ".svg"; HashMap<DrawTools.IMAGEFORMAT, String> resultFilesByFormat = new HashMap<DrawTools.IMAGEFORMAT, String>(); try { DrawTools.writeG2dToSVGFile(svgFilePath, drawRes); resultFilesByFormat.put(DrawTools.IMAGEFORMAT.SVG, svgFilePath); resultFilesByFormat.putAll( DrawTools.convertSVGFileToOtherFormats(svgFilePath, baseFilePathNoExt, drawRes, formats)); } catch (IOException ex) { DP.getInstance().e("Could not write protein graph file : '" + ex.getMessage() + "'."); } if (!Settings.getBoolean("plcc_B_silent")) { StringBuilder sb = new StringBuilder(); sb.append(" Output complex graph files: "); for (DrawTools.IMAGEFORMAT format : resultFilesByFormat.keySet()) { String ffile = new File(resultFilesByFormat.get(format)).getName(); sb.append("(").append(format.toString()).append(" => ").append(ffile).append(") "); } System.out.println(sb.toString()); } return resultFilesByFormat; }
/** * Submit a request to the server which expects an execution id in response, and return a single * QueuedItemResult parsed from the response. * * @param tempxml xml temp file (or null) * @param otherparams parameters for the request * @param requestPath * @return a single QueuedItemResult * @throws com.dtolabs.rundeck.core.dispatcher.CentralDispatcherException if an error occurs */ private QueuedItemResult submitRunRequest( final File tempxml, final HashMap<String, String> otherparams, final String requestPath) throws CentralDispatcherException { final HashMap<String, String> params = new HashMap<String, String>(); if (null != otherparams) { params.putAll(otherparams); } final WebserviceResponse response; try { response = serverService.makeRundeckRequest(requestPath, params, tempxml, null); } catch (MalformedURLException e) { throw new CentralDispatcherServerRequestException("Failed to make request", e); } validateResponse(response); final Document resultDoc = response.getResultDoc(); if (null != resultDoc.selectSingleNode("/result/execution") && null != resultDoc.selectSingleNode("/result/execution/@id")) { final Node node = resultDoc.selectSingleNode("/result/execution/@id"); final String succeededId = node.getStringValue(); final String name = "adhoc"; String url = createExecutionURL(succeededId); url = makeAbsoluteURL(url); logger.info("\t[" + succeededId + "] <" + url + ">"); return QueuedItemResultImpl.successful("Succeeded queueing " + name, succeededId, url, name); } return QueuedItemResultImpl.failed("Server response contained no success information."); }
static { OPTIONS.putAll( ImmutableMap.<Integer, String>builder() .put(IF_MATCH, "IF MATCH (" + IF_MATCH + ")") .put(URI_HOST, "URI HOST (" + URI_HOST + ")") .put(ETAG, "ETAG (" + ETAG + ")") .put(IF_NONE_MATCH, "IF NONE MATCH (" + IF_NONE_MATCH + ")") .put(OBSERVE, "OBSERVE (" + OBSERVE + ")") .put(URI_PORT, "URI PORT (" + URI_PORT + ")") .put(LOCATION_PATH, "LOCATION PATH (" + LOCATION_PATH + ")") .put(URI_PATH, "URI PATH (" + URI_PATH + ")") .put(CONTENT_FORMAT, "CONTENT FORMAT (" + CONTENT_FORMAT + ")") .put(MAX_AGE, "MAX AGE (" + MAX_AGE + ")") .put(URI_QUERY, "URI QUERY (" + URI_QUERY + ")") .put(ACCEPT, "ACCEPT (" + ACCEPT + ")") .put(LOCATION_QUERY, "LOCATION QUERY (" + LOCATION_QUERY + ")") .put(BLOCK_2, "BLOCK 2 (" + BLOCK_2 + ")") .put(BLOCK_1, "BLOCK 1 (" + BLOCK_1 + ")") .put(SIZE_2, "SIZE 2 (" + SIZE_2 + ")") .put(PROXY_URI, "PROXY URI (" + PROXY_URI + ")") .put(PROXY_SCHEME, "PROXY SCHEME (" + PROXY_SCHEME + ")") .put(SIZE_1, "SIZE 1 (" + SIZE_1 + ")") .put(ENDPOINT_ID_1, "ENDPOINT ID 1 (" + ENDPOINT_ID_1 + ")") .put(ENDPOINT_ID_2, "ENDPOINT ID 2 (" + ENDPOINT_ID_2 + ")") .build()); }
@Override protected void createSearchControl( Composite searchComposite, HashMap<String, Control> bindedControls) { searchComposite.setLayout(new FormLayout()); FormData searchCompositeData = (FormData) searchComposite.getLayoutData(); searchCompositeData.bottom = new FormAttachment(0, 65); searchComposite.setLayoutData(searchCompositeData); Composite mainSearch = new Composite(searchComposite, SWT.NONE); FormData formData = new FormData(); formData.left = new FormAttachment(0, 0); formData.top = new FormAttachment(0, 0); formData.right = new FormAttachment(100, 0); formData.bottom = new FormAttachment(100, 0); mainSearch.setLayoutData(formData); String modelFilePath = "com/hhh/platform/ops/ui/run/LogRunViewSearcher.xml"; InputStream xmlis = getClass().getClassLoader().getResourceAsStream(modelFilePath); XMLModelControlsCreator controlsCreator = new XMLModelControlsCreator(xmlis); controlsCreator.createContents(mainSearch); bindedControls.putAll(controlsCreator.getBindedControls()); }
/** * Service method for backtracking with serialization-based saving of {@link ConstraintNetwork}s. * This method backs up {@link ConstraintNetwork}s before branching. */ private HashMap<ConstraintSolver, byte[]> backupCNs(MultiConstraintSolver conSol) { // Here we want to save the CNs ByteArrayOutputStream bos = null; ObjectOutputStream oos = null; HashMap<ConstraintSolver, byte[]> currentLevel = new HashMap<ConstraintSolver, byte[]>(); try { bos = new ByteArrayOutputStream(); oos = new ObjectOutputStream(bos); for (ConstraintSolver cs : conSol.getConstraintSolvers()) { logger.finest("Backing up CN of " + cs.getClass().getSimpleName()); ConstraintNetwork cn = cs.getConstraintNetwork(); oos.writeObject(cn); byte[] backup = bos.toByteArray(); currentLevel.put(cs, backup); if (cs instanceof MultiConstraintSolver) { // System.out.println("RECURSIVE on " + cs.getClass().getSimpleName()); HashMap<ConstraintSolver, byte[]> lower = backupCNs((MultiConstraintSolver) cs); currentLevel.putAll(lower); } } return currentLevel; } catch (NotSerializableException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } return null; }
private boolean createDefinedIndexesForPR( HashSet<Index> indexes, PartitionedRegion region, HashSet<IndexCreationData> icds, HashMap<String, Exception> exceptionsMap) { try { indexes.addAll(((PartitionedRegion) region).createIndexes(false, icds)); } catch (IndexCreationException e1) { logger.info( LocalizedMessage.create( LocalizedStrings .DefaultQueryService_EXCEPTION_WHILE_CREATING_INDEX_ON_PR_DEFAULT_QUERY_PROCESSOR), e1); } catch (CacheException e1) { logger.info( LocalizedMessage.create( LocalizedStrings .DefaultQueryService_EXCEPTION_WHILE_CREATING_INDEX_ON_PR_DEFAULT_QUERY_PROCESSOR), e1); return true; } catch (ForceReattemptException e1) { logger.info( LocalizedMessage.create( LocalizedStrings .DefaultQueryService_EXCEPTION_WHILE_CREATING_INDEX_ON_PR_DEFAULT_QUERY_PROCESSOR), e1); return true; } catch (MultiIndexCreationException e) { exceptionsMap.putAll(e.getExceptionsMap()); return true; } return false; }
@Override public int read( String table, String key, Set<String> fields, HashMap<String, ByteIterator> result) { logger.debug("readkey: " + key + " from table: " + table); GetItemRequest req = new GetItemRequest(table, createPrimaryKey(key)); req.setAttributesToGet(fields); req.setConsistentRead(consistentRead); GetItemResult res = null; try { res = dynamoDB.getItem(req); } catch (AmazonServiceException ex) { logger.error(ex.getMessage()); return SERVER_ERROR; } catch (AmazonClientException ex) { logger.error(ex.getMessage()); return CLIENT_ERROR; } if (null != res.getItem()) { result.putAll(extractResult(res.getItem())); logger.debug("Result: " + res.toString()); } return OK; }
@Override public void authRequest(Uri url, HashMap<String, String> doneSoFar) { if (mProgressDialog.isShowing()) { // should always be showing here mProgressDialog.dismiss(); } // add our list of completed uploads to "completed" // and remove them from our toSend list. ArrayList<Long> workingSet = new ArrayList<Long>(); Collections.addAll(workingSet, mInstancesToSend); if (doneSoFar != null) { Set<String> uploadedInstances = doneSoFar.keySet(); Iterator<String> itr = uploadedInstances.iterator(); while (itr.hasNext()) { Long removeMe = Long.valueOf(itr.next()); boolean removed = workingSet.remove(removeMe); if (removed) { Log.i(t, removeMe + " was already sent, removing from queue before restarting task"); } } mUploadedInstances.putAll(doneSoFar); } // and reconstruct the pending set of instances to send Long[] updatedToSend = new Long[workingSet.size()]; for (int i = 0; i < workingSet.size(); ++i) { updatedToSend[i] = workingSet.get(i); } mInstancesToSend = updatedToSend; mUrl = url.toString(); showDialog(AUTH_DIALOG); }
/** * Method to add the specified Map's values under their keys here. * * @param m The map */ public void putAll(java.util.Map m) { delegate.putAll(m); makeDirty(); if (ownerOP != null && !ownerOP.getExecutionContext().getTransaction().isActive()) { ownerOP.getExecutionContext().processNontransactionalUpdate(); } }