/** * @description: Lzw encodes the supplied input * @method encode * @param {string} s * @param {function} callback */ public static List<Integer> encode(String input) { // Build the dictionary. int dictSize = 256; Map<String, Integer> dictionary = new HashMap<String, Integer>(); for (int i = 0; i < 256; i++) { dictionary.put("" + (char) i, i); } String w = ""; List<Integer> result = new ArrayList<Integer>(); for (char c : input.toCharArray()) { String wc = w + c; if (dictionary.containsKey(wc)) { w = wc; } else { result.add(dictionary.get(w)); // Add wc to the dictionary. dictionary.put(wc, dictSize++); w = "" + c; } } // Output the code for w. if (!w.equals("")) { result.add(dictionary.get(w)); } return result; }
/* * This method builds up a multi dimensional hash structure from the parameters so that nested keys such as "book.author.name" * can be addressed like params['author'].name * * This also allows data binding to occur for only a subset of the properties in the parameter map */ private void processNestedKeys( HttpServletRequest request, Map requestMap, String key, String nestedKey, Map nestedLevel) { final int nestedIndex = nestedKey.indexOf('.'); if (nestedIndex > -1) { // We have at least one sub-key, so extract the first element // of the nested key as the prfix. In other words, if we have // 'nestedKey' == "a.b.c", the prefix is "a". final String nestedPrefix = nestedKey.substring(0, nestedIndex); // Let's see if we already have a value in the current map // for the prefix. Object prefixValue = nestedLevel.get(nestedPrefix); if (prefixValue == null) { // No value. So, since there is at least one sub-key, // we create a sub-map for this prefix. prefixValue = new GrailsParameterMap(new HashMap(), request); nestedLevel.put(nestedPrefix, prefixValue); } // If the value against the prefix is a map, then we store // the sub-keys in that map. if (prefixValue instanceof Map) { Map nestedMap = (Map) prefixValue; if (nestedIndex < nestedKey.length() - 1) { final String remainderOfKey = nestedKey.substring(nestedIndex + 1, nestedKey.length()); nestedMap.put(remainderOfKey, getParameterValue(requestMap, key)); if (remainderOfKey.indexOf('.') > -1) { processNestedKeys(request, requestMap, key, remainderOfKey, nestedMap); } } } } }
public void testAsCollectionWithMap() { Map map = new HashMap(); map.put("A", "abc"); map.put("B", "def"); map.put("C", "xyz"); assertAsCollection(map, 3); }
/** * @description: Decodes the supplied lzw encoded string * @method decode * @param {string} s * @param {function} callback */ public static String decode(List<Integer> encoded) { // Build the dictionary. int dictSize = 256; Map<Integer, String> dictionary = new HashMap<Integer, String>(); for (int i = 0; i < 256; i++) { dictionary.put(i, "" + (char) i); } String w = "" + (char) (int) encoded.remove(0); StringBuffer result = new StringBuffer(w); for (int k : encoded) { String entry; if (dictionary.containsKey(k)) { entry = dictionary.get(k); } else if (k == dictSize) { entry = w + w.charAt(0); } else { throw new IllegalArgumentException("Bad compressed k: " + k); } result.append(entry); // Add w+entry[0] to the dictionary. dictionary.put(dictSize++, w + entry.charAt(0)); w = entry; } return result.toString(); }
@Override public DDF loadFromJDBC(JDBCDataSourceDescriptor dataSource) throws DDFException { SparkDDFManager sparkDDFManager = (SparkDDFManager) mDDFManager; HiveContext sqlContext = sparkDDFManager.getHiveContext(); JDBCDataSourceCredentials cred = (JDBCDataSourceCredentials) dataSource.getDataSourceCredentials(); String fullURL = dataSource.getDataSourceUri().getUri().toString(); if (cred.getUsername() != null && !cred.getUsername().equals("")) { fullURL += String.format("?user=%s&password=%s", cred.getUsername(), cred.getPassword()); } Map<String, String> options = new HashMap<String, String>(); options.put("url", fullURL); options.put("dbtable", dataSource.getDbTable()); DataFrame df = sqlContext.load("jdbc", options); DDF ddf = sparkDDFManager.newDDF( sparkDDFManager, df, new Class<?>[] {DataFrame.class}, null, SparkUtils.schemaFromDataFrame(df)); // TODO? ddf.getRepresentationHandler().get(RDD.class, Row.class); ddf.getMetaDataHandler().setDataSourceDescriptor(dataSource); return ddf; }
@SuppressWarnings("unchecked") @ResponseBody @RequestMapping(value = "/admin/orders/list.ajax", method = RequestMethod.GET) public ResponseEntity<byte[]> list( @RequestParam(value = "page") int page, @RequestParam(value = "limit") int limit, @RequestParam(value = "sort", required = false) String sort, @RequestParam(value = "query", required = false) String query, HttpServletRequest req) throws Exception { PageRequest request; if (StringUtils.hasText(sort)) { List<Map<String, String>> sortParams = (List<Map<String, String>>) jsonUtils.deserialize(sort); Map<String, String> sortProperties = sortParams.get(0); String direction = sortProperties.get("direction"); String property = sortProperties.get("property"); request = new PageRequest(page - 1, limit, ("ASC".equals(direction) ? ASC : DESC), property); } else { request = new PageRequest(page - 1, limit); } List<Filter> filters = FilterUtils.extractFilters(req, filterDecoratorMap); List<Order> orders = orderRepository.pageByOrderId(request, query, filters); Map<String, Object> model = new HashMap<String, Object>(); model.put("orders", orders); model.put("count", orderRepository.count(query, filters)); String[] excludes = new String[] {"orders.orderDiscounts"}; return responseEntityUtils.buildResponse(model, excludes); }
@SuppressWarnings("unchecked") @ResponseBody @RequestMapping(value = "/order/updateAdditionalInstructions.ajax", method = RequestMethod.POST) public ResponseEntity<byte[]> updateAdditionalInstructions( @RequestParam(value = "orderId") String orderId, @RequestParam(value = "additionalInstructions") String additionalInstructions) throws Exception { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Updating additional instructions for orderId: " + orderId); } Map<String, Object> model = new HashMap<String, Object>(); try { Order order = orderRepository.findByOrderId(orderId); order.setAdditionalInstructions(additionalInstructions); order = orderRepository.saveOrder(order); model.put("success", true); model.put("order", order); } catch (Exception ex) { LOGGER.error("", ex); model.put("success", false); model.put("message", ex.getMessage()); } return buildOrderResponse(model); }
@SuppressWarnings("unchecked") @ResponseBody @RequestMapping(value = "/order/getOrder.ajax", method = RequestMethod.POST) public ResponseEntity<byte[]> getOrder(HttpServletRequest request) throws Exception { Map<String, Object> model = new HashMap<String, Object>(); try { HttpSession session = request.getSession(true); String orderId = (String) session.getAttribute("orderid"); String restaurantId = (String) session.getAttribute("restaurantid"); Order order = null; if (orderId != null) { order = orderRepository.findByOrderId(orderId); // If the current order has no items but is linked to another restauarant, update it now if (order.getOrderItems().size() == 0 && !order.getRestaurantId().equals(restaurantId)) { order.setRestaurant(restaurantRepository.findByRestaurantId(restaurantId)); } order.updateCosts(); // Update can checkout status of order session.setAttribute("cancheckout", order.getCanCheckout()); session.setAttribute("cansubmitpayment", order.getCanSubmitPayment()); } model.put("success", true); model.put("order", order); } catch (Exception ex) { LOGGER.error("", ex); model.put("success", false); model.put("message", ex.getMessage()); } return buildOrderResponse(model); }
protected Map<String, String> createInitialParameters(ASTNode s) throws Exception { Map<String, String> parameters = new LinkedHashMap<String, String>(); // Print offset information: parameters.put("start", Integer.toString(s.sourceStart())); parameters.put("end", Integer.toString(s.sourceEnd())); // Print modifiers: if (s instanceof Declaration) { Declaration declaration = (Declaration) s; StringBuilder buf = new StringBuilder(); if (declaration.isAbstract()) { buf.append(",abstract"); } if (declaration.isFinal()) { buf.append(",final"); } if (declaration.isPrivate()) { buf.append(",private"); } if (declaration.isProtected()) { buf.append(",protected"); } if (declaration.isPublic()) { buf.append(",public"); } if (declaration.isStatic()) { buf.append(",static"); } String modifiers = buf.toString(); parameters.put("modifiers", modifiers.length() > 0 ? modifiers.substring(1) : modifiers); } return parameters; }
public boolean visit(ArrayVariableReference s) throws Exception { Map<String, String> parameters = createInitialParameters(s); parameters.put("type", ArrayVariableReference.getArrayType(s.getArrayType())); parameters.put("name", s.getName()); xmlWriter.startTag("ArrayVariableReference", parameters); return true; }
public boolean visit(LambdaFunctionDeclaration s) throws Exception { Map<String, String> parameters = createInitialParameters(s); parameters.put("isReference", Boolean.toString(s.isReference())); if (s.isStatic()) { parameters.put("isStatic", Boolean.toString(s.isStatic())); } xmlWriter.startTag("LambdaFunctionDeclaration", parameters); xmlWriter.startTag("Arguments", new HashMap<String, String>()); for (FormalParameter p : s.getArguments()) { p.traverse(this); } xmlWriter.endTag("Arguments"); Collection<? extends Expression> lexicalVars = s.getLexicalVars(); if (lexicalVars != null) { xmlWriter.startTag("LexicalVars", new HashMap<String, String>()); for (Expression var : lexicalVars) { var.traverse(this); } xmlWriter.endTag("LexicalVars"); } s.getBody().traverse(this); return false; }
private void outputEntry(TraceEntry te) { ThreadData td = te.getThread(); if (td == null) return; // System.err.println("TRACE: " + te); BdynCallback cb = bdyn_factory.getCallback(te.getEntryLocation()); if (cb == null) return; if (cb.getCallbackType() == CallbackType.CONSTRUCTOR) { OutputTask ot = td.getCurrentTransaction(); if (ot == null) return; if (ot.isMainTask() && !BdynFactory.getOptions().useMainTask()) return; int i0 = te.getObject1(); if (i0 != 0) { // System.err.println("ASSOC TASK " + i0 + " " + te.getObject2() + " " + // ot.getTaskRoot().getDisplayName()); OutputTask ot1 = object_tasks.get(i0); if (ot1 == null) object_tasks.put(i0, ot); else if (ot1 != dummy_task && ot1 != ot) object_tasks.put(i0, dummy_task); } return; } td.beginTask(te); end_time = Math.max(end_time, te.getTime()); }
/** * 删除用户 * * @param request * @param response */ @RequestMapping("/user_del") public void delUser(HttpServletRequest request, HttpServletResponse response) { Map<String, Object> resMap = new HashMap<String, Object>(); String ids = request.getParameter("ids"); logger.debug("ids:" + ids); List<Integer> paramList = new ArrayList<Integer>(); if (StringUtils.isNoneBlank(ids)) { String[] s_array = ids.split(",", -1); if (s_array != null && s_array.length > 0) { for (String item : s_array) { paramList.add(new Integer(item)); } Integer[] i_array = new Integer[paramList.size()]; for (int i = 0; i < paramList.size(); i++) { i_array[i] = paramList.get(i); } userService.delUserById(i_array); resMap.put("msg", "ok"); } else { resMap.put("msg", "error"); } } try { response.setCharacterEncoding("utf-8"); response.setHeader("Content-type", "text/html;charset=UTF-8"); PrintWriter out = response.getWriter(); Gson gson = new Gson(); logger.debug("json输出:" + gson.toJson(resMap)); out.println(gson.toJson(resMap)); out.flush(); out.close(); } catch (IOException e) { e.printStackTrace(); } }
public void setTransientProperty(Object key1, Object key2, Object value) { synchronized (transient_properties) { Map<Object, Object> l1 = (Map<Object, Object>) transient_properties.get(key1); if (l1 == null) { if (value == null) { return; } l1 = new HashMap<Object, Object>(); transient_properties.put(key1, l1); } if (value == null) { l1.remove(key2); if (l1.size() == 0) { transient_properties.remove(key1); } } else { l1.put(key2, value); } } }
protected void encodeFrozenRows(FacesContext context, DataTable table) throws IOException { Collection<?> frozenRows = table.getFrozenRows(); if (frozenRows == null || frozenRows.isEmpty()) { return; } ResponseWriter writer = context.getResponseWriter(); String clientId = table.getClientId(context); String var = table.getVar(); String rowIndexVar = table.getRowIndexVar(); Map<String, Object> requestMap = context.getExternalContext().getRequestMap(); writer.startElement("tbody", null); writer.writeAttribute("class", DataTable.DATA_CLASS, null); int index = 0; for (Iterator<? extends Object> it = frozenRows.iterator(); it.hasNext(); ) { requestMap.put(var, it.next()); if (rowIndexVar != null) { requestMap.put(rowIndexVar, index); } encodeRow(context, table, clientId, index, rowIndexVar); } writer.endElement("tbody"); }
ISourceContainer[] getSourceContainers(String location, String id) throws CoreException { ISourceContainer[] containers = (ISourceContainer[]) fSourceContainerMap.get(location); if (containers != null) { return containers; } ArrayList result = new ArrayList(); ModelEntry entry = MonitorRegistry.findEntry(id); boolean match = false; IMonitorModelBase[] models = entry.getWorkspaceModels(); for (int i = 0; i < models.length; i++) { if (isPerfectMatch(models[i], new Path(location))) { IResource resource = models[i].getUnderlyingResource(); // if the plug-in matches a workspace model, // add the project and any libraries not coming via a container // to the list of source containers, in that order if (resource != null) { addProjectSourceContainers(resource.getProject(), result); } match = true; break; } } if (!match) { File file = new File(location); if (file.isFile()) { // in case of linked plug-in projects that map to an external JARd plug-in, // use source container that maps to the library in the linked project. ISourceContainer container = getArchiveSourceContainer(location); if (container != null) { containers = new ISourceContainer[] {container}; fSourceContainerMap.put(location, containers); return containers; } } models = entry.getExternalModels(); for (int i = 0; i < models.length; i++) { if (isPerfectMatch(models[i], new Path(location))) { // try all source zips found in the source code locations IClasspathEntry[] entries = MDEClasspathContainer.getExternalEntries(models[i]); for (int j = 0; j < entries.length; j++) { IRuntimeClasspathEntry rte = convertClasspathEntry(entries[j]); if (rte != null) result.add(rte); } break; } } } IRuntimeClasspathEntry[] entries = (IRuntimeClasspathEntry[]) result.toArray(new IRuntimeClasspathEntry[result.size()]); containers = JavaRuntime.getSourceContainers(entries); fSourceContainerMap.put(location, containers); return containers; }
public void testJIRA165b() { OptimizerFactory.setDefaultOptimizer("ASM"); A b = new B(); A a = new A(); ParserContext context = new ParserContext(); Object expression = MVEL.compileExpression("a.bar(value)", context); for (int i = 0; i < 100; i++) { System.out.println("i: " + i); System.out.flush(); { Map<String, Object> variables = new HashMap<String, Object>(); variables.put("a", b); variables.put("value", 123); executeExpression(expression, variables); } { Map<String, Object> variables = new HashMap<String, Object>(); variables.put("a", a); variables.put("value", 123); executeExpression(expression, variables); } } }
public boolean visit(PHPDocTag s) throws Exception { Map<String, String> parameters = createInitialParameters(s); parameters.put("tagKind", PHPDocTag.getTagKind(s.getTagKind())); parameters.put("value", s.getValue()); xmlWriter.startTag("PHPDocTag", parameters); return true; }
public OrderController() { filterDecoratorMap.put( "orderStatus", new FilterValueDecorator() { @Override public String[] decorateValues(String[] values) { List<String> ret = new ArrayList<String>(); for (String value : values) { ret.add(("ORDER STATUS " + value).replaceAll(" ", "_")); } return ret.toArray(new String[ret.size()]); } }); filterDecoratorMap.put( "orderNotificationStatus", new FilterValueDecorator() { @Override public String[] decorateValues(String[] values) { List<String> ret = new ArrayList<String>(); for (String value : values) { ret.add(value.replaceAll(" ", "_")); } return ret.toArray(new String[ret.size()]); } }); }
public boolean visit(Scalar s) throws Exception { Map<String, String> parameters = createInitialParameters(s); parameters.put("type", s.getType()); parameters.put("value", s.getValue()); xmlWriter.startTag("Scalar", parameters); return true; }
@SuppressWarnings("unchecked") @ResponseBody @RequestMapping(value = "/order/clearOrder.ajax", method = RequestMethod.POST) public ResponseEntity<byte[]> clearOrder( @RequestParam(value = "orderId") String orderId, @RequestParam(value = "restaurantId") String restaurantId) throws Exception { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Clearing order for orderId: " + orderId); } Map<String, Object> model = new HashMap<String, Object>(); try { Order order = orderRepository.findByOrderId(orderId); Restaurant restaurant = restaurantRepository.findByRestaurantId(restaurantId); order.setRestaurant(restaurant); order.getOrderItems().clear(); order.getOrderDiscounts().clear(); order = orderRepository.saveOrder(order); model.put("success", true); model.put("order", order); } catch (Exception ex) { LOGGER.error("", ex); model.put("success", false); model.put("message", ex.getMessage()); } return buildOrderResponse(model); }
public static Map computeLevels( Map vertexLevelMap, DigraphIteration digraph, Object root, boolean longest) { if (vertexLevelMap == null) vertexLevelMap = new HashMap(); MutableInteger rootLevel = (MutableInteger) vertexLevelMap.get(root); if (rootLevel == null) { rootLevel = new MutableInteger(0); vertexLevelMap.put(root, rootLevel); } for (ArcIterator i = digraph.outgoingIterator(root); i.hasNext(); ) { i.next(); Object child = i.getDestination(); int childLevelCandidate = rootLevel.intValue() + 1; MutableInteger childLevel = (MutableInteger) vertexLevelMap.get(child); if (childLevel == null) { childLevel = new MutableInteger(childLevelCandidate); vertexLevelMap.put(child, childLevel); computeLevels(vertexLevelMap, digraph, child, longest); } else if ((longest && childLevel.intValue() < childLevelCandidate) || (!longest && childLevel.intValue() > childLevelCandidate)) { childLevel.setValue(childLevelCandidate); computeLevels(vertexLevelMap, digraph, child, longest); } } return vertexLevelMap; }
/** {@inheritDoc} */ public List<Map<String, String>> getExportGroups(final String language) { final List<DataGroup> dataGroups = dataDescriptorResolver.getGroups(); final Set<Map<String, String>> out = new TreeSet<Map<String, String>>( new Comparator<Map<String, String>>() { @Override public int compare(final Map<String, String> o1, final Map<String, String> o2) { int comp = o1.get("label").compareToIgnoreCase(o2.get("label")); if (comp == 0) { comp = o1.get("name").compareToIgnoreCase(o2.get("name")); } return comp; } }); for (final DataGroup dataGroup : dataGroups) { if (StringUtils.isBlank(dataGroup.getQualifier()) || federationFacade.isManageable(dataGroup.getQualifier(), ShopDTO.class)) { final Map<String, String> grp = new HashMap<String, String>(); grp.put("name", dataGroup.getName()); final I18NModel model = new FailoverStringI18NModel(dataGroup.getDisplayName(), dataGroup.getName()); grp.put("label", model.getValue(language)); out.add(grp); } } return new ArrayList<Map<String, String>>(out); }
@Override protected Map<String, String> getTestConfig() { Map<String, String> cfg = super.getTestConfig(); cfg.put(Config.TIMEZONE, "PDT"); cfg.put(Config.ENTITY, "Account"); return cfg; }
/** * convert AccountCodeObjectKeyFilter to a Map (not including the root). * * @param in the filter to convert to the Map * @param record use this Map instead of creating a new one (Optional) * @return Map the output Map */ protected static Map getMap(AccountCodeObjectKeyFilter in, Map record) { Object[] list = null; Map map = null; if (record == null) record = new HashMap(); if (in == null) return record; if (in._fetch) record.put("Fetch", Boolean.TRUE); map = DataHelper.filterToMap( in.AcgTrackingId, in.AcgTrackingIdSort, in.AcgTrackingIdSortAscending, in.AcgTrackingIdFetch); if (map != null) record.put("AcgTrackingId", map); map = DataHelper.filterToMap( in.AcgTrackingIdServ, in.AcgTrackingIdServSort, in.AcgTrackingIdServSortAscending, in.AcgTrackingIdServFetch); if (map != null) record.put("AcgTrackingIdServ", map); map = DataHelper.filterToMap( in.AccountCode, in.AccountCodeSort, in.AccountCodeSortAscending, in.AccountCodeFetch, in.AccountCodeCaseInsensitive); if (map != null) record.put("AccountCode", map); return record; }
public void invokeServerOutbound(Source source, OutputStream os) { Map<String, DataHandler> attachments = new HashMap<String, DataHandler>(); Map<String, Object> httpProperties = new HashMap<String, Object>(); httpProperties.put(HTTP_RESPONSE_CODE, Integer.valueOf(200)); httpProperties.put(HTTP_RESPONSE_HEADERS, new HashMap<String, List<String>>()); prepare(httpProperties, /*request=*/ false); if (!invokeOutbound(source, attachments)) { if (getProtocolException() != null) { reverseDirection(); invokeOutboundFaultHandlers(); } /* else if (getRuntimeException() != null) { closeServer(); finish(response.getOutputStream()); throw getRuntimeException(); }*/ } // XXX closeServer(); finish(os); }
public InputStream invokeClientInbound(HttpURLConnection httpConnection) throws IOException { // XXX fill this in... Map<String, DataHandler> attachments = new HashMap<String, DataHandler>(); Map<String, Object> httpProperties = new HashMap<String, Object>(); httpProperties.put(HTTP_RESPONSE_CODE, Integer.valueOf(httpConnection.getResponseCode())); httpProperties.put(HTTP_RESPONSE_HEADERS, httpConnection.getHeaderFields()); prepare(httpProperties, /*request=*/ false); if (!invokeInbound(httpConnection.getInputStream(), attachments)) { if (getProtocolException() != null) { reverseDirection(); invokeInboundFaultHandlers(); if (getRuntimeException() != null) throw getRuntimeException(); } else if (getRuntimeException() != null) { closeClient(); throw getRuntimeException(); } } // XXX closeClient(); return finish(); }
static { specialClasses.put("org.apache.spark.repl.Main", "spark-shell"); specialClasses.put( "org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver", "spark-internal"); specialClasses.put( "org.apache.spark.sql.hive.thriftserver.HiveThriftServer2", "spark-internal"); }
protected Map getChannelMap(ConsoleInput ci) { Map channel_map = new HashMap(); PluginInterface[] pis = ci.azureus_core.getPluginManager().getPluginInterfaces(); for (int i = 0; i < pis.length; i++) { LoggerChannel[] logs = pis[i].getLogger().getChannels(); if (logs.length > 0) { if (logs.length == 1) { channel_map.put(pis[i].getPluginName(), logs[0]); } else { for (int j = 0; j < logs.length; j++) { channel_map.put(pis[i].getPluginName() + "." + logs[j].getName(), logs[j]); } } } } return (channel_map); }
@Test public void shouldMapMapElements() { // given MapperFactory mapperFactory = new DefaultMapperFactory.Builder().build(); mapperFactory .classMap(BasicPerson.class, BasicPersonDto.class) .field("namePartsMap[\"first\"]", "firstName") .field("namePartsMap['second']", "lastName") .register(); BasicPerson bp = new BasicPerson(); Map<String, String> nameParamsMap = new HashMap<String, String>(); nameParamsMap.put("first", "Jan"); nameParamsMap.put("second", "Kowalski"); bp.setNamePartsMap(nameParamsMap); // when MapperFacade mapperFacade = mapperFactory.getMapperFacade(); BasicPersonDto result = mapperFacade.map(bp, BasicPersonDto.class); // then assertThat(result.getFirstName()).isEqualTo("Jan"); assertThat(result.getLastName()).isEqualTo("Kowalski"); assertThat(result.getBirthDate()).isNull(); assertThat(result.getCurrentAge()).isEqualTo(0); assertThat(result.getFullName()).isNull(); }