/** * Write the contents of the JSONObject as JSON text to a writer. For compactness, no whitespace * is added. * * <p>Warning: This method assumes that the data structure is acyclical. * * @return The writer. * @throws JSONException */ @SuppressWarnings("unchecked") public Writer write(Writer writer) throws JSONException { try { boolean b = false; Iterator keys = keys(); writer.write('{'); while (keys.hasNext()) { if (b) { writer.write(','); } Object k = keys.next(); writer.write(quote(k.toString())); writer.write(':'); Object v = this.map.get(k); if (v instanceof JSONObject) { ((JSONObject) v).write(writer); } else if (v instanceof JSONArray) { ((JSONArray) v).write(writer); } else { writer.write(valueToString(v)); } b = true; } writer.write('}'); return writer; } catch (IOException exception) { throw new JSONException(exception); } }
@RequestMapping("/json/console/app/(*:appId)/(~:appVersion)/builder/actions") public void getBuilderDataActionList( ModelMap map, Writer writer, @RequestParam("appId") String appId, @RequestParam(required = false) String appVersion, HttpServletRequest request) throws Exception { appService.getAppDefinition(appId, appVersion); JSONObject jsonObject = new JSONObject(); // get available binders DataListAction[] actions = dataListService.getAvailableActions(); Collection<Object> collection = new ArrayList<Object>(); for (DataListAction action : actions) { Plugin p = (Plugin) action; HashMap hm = new HashMap(); hm.put("name", p.getName()); hm.put("label", action.getLinkLabel()); hm.put("className", action.getClassName()); if (action instanceof PropertyEditable) { String propertyOptions = ((PropertyEditable) action).getPropertyOptions(); if (propertyOptions != null && !propertyOptions.isEmpty()) { hm.put("propertyOptions", propertyOptions); } } hm.put("type", "text"); collection.add(hm); } jsonObject.accumulate("actions", collection); jsonObject.write(writer); }
/** * Write the contents of the JSONObject as JSON text to a writer. For compactness, no whitespace * is added. * * <p>Warning: This method assumes that the data structure is acyclical. * * @return The writer. * @throws JSONException */ public Writer write(Writer writer) throws JSONException { try { boolean commanate = false; Iterator keys = this.keys(); writer.write('{'); while (keys.hasNext()) { if (commanate) { writer.write(','); } Object key = keys.next(); writer.write(quote(key.toString())); writer.write(':'); Object value = this.map.get(key); if (value instanceof JSONObject) { ((JSONObject) value).write(writer); } else if (value instanceof JSONArray) { ((JSONArray) value).write(writer); } else { writer.write(valueToString(value)); } commanate = true; } writer.write('}'); return writer; } catch (IOException exception) { throw new JSONException(exception); } }
/** * Write the contents of the JSONArray as JSON text to a writer. For compactness, no whitespace is * added. * * <p>Warning: This method assumes that the data structure is acyclical. * * @return The writer. * @throws JSONException */ public Writer write(Writer writer) throws JSONException { try { boolean b = false; int len = length(); writer.write('['); for (int i = 0; i < len; i += 1) { if (b) { writer.write(','); } Object v = this.myArrayList.get(i); if (v instanceof JSONObject) { ((JSONObject) v).write(writer); } else if (v instanceof JSONArray) { ((JSONArray) v).write(writer); } else { writer.write(JSONObject.valueToString(v)); } b = true; } writer.write(']'); return writer; } catch (IOException e) { throw new JSONException(e); } }
static final Writer writeValue(Writer writer, Object value, int indentFactor, int indent) throws JSONException, IOException { if (value == null || value.equals(null)) { writer.write("null"); } else if (value instanceof JSONObject) { ((JSONObject) value).write(writer, indentFactor, indent); } else if (value instanceof JSONArray) { ((JSONArray) value).write(writer, indentFactor, indent); } else if (value instanceof Map) { new JSONObject((Map) value).write(writer, indentFactor, indent); } else if (value instanceof Collection) { new JSONArray((Collection) value).write(writer, indentFactor, indent); } else if (value.getClass().isArray()) { new JSONArray(value).write(writer, indentFactor, indent); } else if (value instanceof Number) { writer.write(numberToString((Number) value)); } else if (value instanceof Boolean) { writer.write(value.toString()); } else if (value instanceof JSONString) { Object o; try { o = ((JSONString) value).toJSONString(); } catch (Exception e) { throw new JSONException(e); } writer.write(o != null ? o.toString() : quote(value.toString())); } else { quote(value.toString(), writer); } return writer; }
public void doTag() throws JspException, IOException { JspContext jctxt = getJspContext(); try { ZMailbox mbox = getMailbox(); ZContact group = mbox.getContact(id); if (json) { JSONArray jsonArray = new JSONArray(); Map<String, ZContact> members = group.getMembers(); for (ZContact contact : members.values()) { Map<String, String> attrs = contact.getAttrs(); String addr = attrs.get("email"); if (addr != null) { jsonArray.put(addr); } } JSONObject top = new JSONObject(); top.put("Result", jsonArray); top.write(jctxt.getOut()); } } catch (JSONException e) { throw new JspTagException(e); } catch (ServiceException e) { throw new JspTagException(e); } }
/** * Write Terrain to a JSON file * * @param file * @throws IOException */ public static void save(Terrain terrain, File file) throws IOException { JSONObject json = new JSONObject(); Dimension size = terrain.size(); json.put("width", size.width); json.put("depth", size.height); JSONArray jsonSun = new JSONArray(); float[] sunlight = terrain.getSunlight(); jsonSun.put(sunlight[0]); jsonSun.put(sunlight[1]); jsonSun.put(sunlight[2]); json.put("sunlight", jsonSun); JSONArray altitude = new JSONArray(); for (int i = 0; i < size.width; i++) { for (int j = 0; j < size.height; j++) { altitude.put(terrain.getGridAltitude(i, j)); } } json.put("altitude", altitude); JSONArray trees = new JSONArray(); for (Tree t : terrain.trees()) { JSONObject j = new JSONObject(); double[] position = t.getPosition(); j.put("x", position[0]); j.put("z", position[2]); trees.put(j); } json.put("trees", trees); JSONArray roads = new JSONArray(); for (Road r : terrain.roads()) { JSONObject j = new JSONObject(); j.put("width", r.width()); JSONArray spine = new JSONArray(); int n = r.size(); for (int i = 0; i <= n * 3; i++) { double[] p = r.controlPoint(i); spine.put(p[0]); spine.put(p[1]); } j.put("spine", spine); roads.put(j); } json.put("roads", roads); FileWriter out = new FileWriter(file); json.write(out); out.close(); }
/* * Private Helper Method to send a true message to the client. * @param object A JSONObject type to be sent to the client * @param response An HttpServletResponse to be sent to the client */ private void putTrue(JSONObject object, HttpServletResponse response) { try { object.put("status", true); try { object.write(response.getWriter()); return; } catch (IOException e) { e.printStackTrace(); } } catch (JSONException e) { e.printStackTrace(); } }
/* * Private Helper Method to send an error message to the client * @param object A JSONObject type to be sent to the client * @param message A String that contains the message to be sent to the client * @param response An HttpServletResponse to be sent to the client */ private void putError(JSONObject object, String message, HttpServletResponse response) { try { object.put("status", "error"); object.put("error", message); try { object.write(response.getWriter()); return; } catch (IOException e) { e.printStackTrace(); } } catch (JSONException e) { e.printStackTrace(); } }
protected static void output( HttpServletResponse response, boolean isResponseTextHtml, JSONObject json) { if (isResponseTextHtml) { response.setContentType("text/html; charset=UTF-8"); } else { response.setContentType("application/json; charset=UTF-8"); } System.out.println("******************* RESPONSE\n: " + json.toString()); try { json.write(response.getWriter()); } catch (Exception e) { logger.error("", e); } }
@RequestMapping( value = "/console/app/(*:appId)/(~:version)/datalist/builderSave/(*:id)", method = RequestMethod.POST) public void save( Writer writer, @RequestParam("appId") String appId, @RequestParam(value = "version", required = false) String version, @RequestParam("id") String id, @RequestParam("json") String json) throws Exception { AppDefinition appDef = appService.getAppDefinition(appId, version); DatalistDefinition datalist = datalistDefinitionDao.loadById(id, appDef); DataList dlist = dataListService.fromJson(json); datalist.setName(dlist.getName()); datalist.setDescription(dlist.getName()); datalist.setJson(PropertyUtil.propertiesJsonStoreProcessing(datalist.getJson(), json)); boolean success = datalistDefinitionDao.update(datalist); JSONObject jsonObject = new JSONObject(); jsonObject.accumulate("success", success); jsonObject.write(writer); }
public void writeRegionKeys(DS.CacheWriter cw, BoundingBox bb) { JSONObject json = new JSONObject(); JSONArray jarray = new JSONArray(); json.put("keys", jarray); for (Map.Entry<BoundingBox, List<Pair<BoundingBox, Key>>> entry : boxList.entrySet()) { if (bb.isIntersecting(entry.getKey())) { for (Pair<BoundingBox, Key> pair : entry.getValue()) { if (bb.isIntersecting(pair.getFirst())) { jarray.put(KeyFactory.keyToString(pair.getSecond())); } } } } for (Pair<GeoPt, Key> pair : locationList) { if (bb.isInside(pair.getFirst())) { jarray.put(KeyFactory.keyToString(pair.getSecond())); } } if (firstPlacemarkKey != null && bb.isIntersecting(placemarkBoundingBox)) { jarray.put(KeyFactory.keyToString(firstPlacemarkKey)); } json.write(cw); cw.cache(); }
private void run(String[] args) throws Exception { LlapOptionsProcessor optionsProcessor = new LlapOptionsProcessor(); LlapOptions options = optionsProcessor.processOptions(args); if (options == null) { // help return; } Path tmpDir = new Path(options.getDirectory()); if (conf == null) { throw new Exception("Cannot load any configuration to run command"); } FileSystem fs = FileSystem.get(conf); FileSystem lfs = FileSystem.getLocal(conf).getRawFileSystem(); // needed so that the file is actually loaded into configuration. for (String f : NEEDED_CONFIGS) { conf.addResource(f); if (conf.getResource(f) == null) { throw new Exception("Unable to find required config file: " + f); } } for (String f : OPTIONAL_CONFIGS) { conf.addResource(f); } conf.reloadConfiguration(); if (options.getName() != null) { // update service registry configs - caveat: this has nothing to do with the actual settings // as read by the AM // if needed, use --hiveconf llap.daemon.service.hosts=@llap0 to dynamically switch between // instances conf.set(ConfVars.LLAP_DAEMON_SERVICE_HOSTS.varname, "@" + options.getName()); } if (options.getSize() != -1) { if (options.getCache() != -1) { Preconditions.checkArgument( options.getCache() < options.getSize(), "Cache has to be smaller than the container sizing"); } if (options.getXmx() != -1) { Preconditions.checkArgument( options.getXmx() < options.getSize(), "Working memory has to be smaller than the container sizing"); } if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.LLAP_ALLOCATOR_DIRECT)) { Preconditions.checkArgument( options.getXmx() + options.getCache() < options.getSize(), "Working memory + cache has to be smaller than the containing sizing "); } } final long minAlloc = conf.getInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, -1); if (options.getSize() != -1) { final long containerSize = options.getSize() / (1024 * 1024); Preconditions.checkArgument( containerSize >= minAlloc, "Container size should be greater than minimum allocation(%s)", minAlloc + "m"); conf.setLong(ConfVars.LLAP_DAEMON_YARN_CONTAINER_MB.varname, containerSize); } if (options.getExecutors() != -1) { conf.setLong(ConfVars.LLAP_DAEMON_NUM_EXECUTORS.varname, options.getExecutors()); // TODO: vcpu settings - possibly when DRFA works right } if (options.getCache() != -1) { conf.setLong(HiveConf.ConfVars.LLAP_IO_MEMORY_MAX_SIZE.varname, options.getCache()); } if (options.getXmx() != -1) { // Needs more explanation here // Xmx is not the max heap value in JDK8 // You need to subtract 50% of the survivor fraction from this, to get actual usable memory // before it goes into GC conf.setLong( ConfVars.LLAP_DAEMON_MEMORY_PER_INSTANCE_MB.varname, (long) (options.getXmx()) / (1024 * 1024)); } for (Entry<Object, Object> props : options.getConfig().entrySet()) { conf.set((String) props.getKey(), (String) props.getValue()); } URL logger = conf.getResource("llap-daemon-log4j2.properties"); if (null == logger) { throw new Exception("Unable to find required config file: llap-daemon-log4j2.properties"); } Path home = new Path(System.getenv("HIVE_HOME")); Path scripts = new Path(new Path(new Path(home, "scripts"), "llap"), "bin"); if (!lfs.exists(home)) { throw new Exception("Unable to find HIVE_HOME:" + home); } else if (!lfs.exists(scripts)) { LOG.warn("Unable to find llap scripts:" + scripts); } Path libDir = new Path(tmpDir, "lib"); String tezLibs = conf.get("tez.lib.uris"); if (tezLibs == null) { LOG.warn("Missing tez.lib.uris in tez-site.xml"); } if (LOG.isDebugEnabled()) { LOG.debug("Copying tez libs from " + tezLibs); } lfs.mkdirs(libDir); fs.copyToLocalFile(new Path(tezLibs), new Path(libDir, "tez.tar.gz")); CompressionUtils.unTar(new Path(libDir, "tez.tar.gz").toString(), libDir.toString(), true); lfs.delete(new Path(libDir, "tez.tar.gz"), false); lfs.copyFromLocalFile(new Path(Utilities.jarFinderGetJar(LlapInputFormat.class)), libDir); lfs.copyFromLocalFile(new Path(Utilities.jarFinderGetJar(HiveInputFormat.class)), libDir); // copy default aux classes (json/hbase) for (String className : DEFAULT_AUX_CLASSES) { localizeJarForClass(lfs, libDir, className, false); } if (options.getIsHBase()) { try { localizeJarForClass(lfs, libDir, HBASE_SERDE_CLASS, true); Job fakeJob = new Job(new JobConf()); // HBase API is convoluted. TableMapReduceUtil.addDependencyJars(fakeJob); Collection<String> hbaseJars = fakeJob.getConfiguration().getStringCollection("tmpjars"); for (String jarPath : hbaseJars) { if (!jarPath.isEmpty()) { lfs.copyFromLocalFile(new Path(jarPath), libDir); } } } catch (Throwable t) { String err = "Failed to add HBase jars. Use --auxhbase=false to avoid localizing them"; LOG.error(err); System.err.println(err); throw new RuntimeException(t); } } String auxJars = options.getAuxJars(); if (auxJars != null && !auxJars.isEmpty()) { // TODO: transitive dependencies warning? String[] jarPaths = auxJars.split(","); for (String jarPath : jarPaths) { if (!jarPath.isEmpty()) { lfs.copyFromLocalFile(new Path(jarPath), libDir); } } } Path confPath = new Path(tmpDir, "conf"); lfs.mkdirs(confPath); for (String f : NEEDED_CONFIGS) { copyConfig(options, lfs, confPath, f); } for (String f : OPTIONAL_CONFIGS) { try { copyConfig(options, lfs, confPath, f); } catch (Throwable t) { LOG.info("Error getting an optional config " + f + "; ignoring: " + t.getMessage()); } } lfs.copyFromLocalFile(new Path(logger.toString()), confPath); // extract configs for processing by the python fragments in Slider JSONObject configs = new JSONObject(); configs.put( ConfVars.LLAP_DAEMON_YARN_CONTAINER_MB.varname, HiveConf.getIntVar(conf, ConfVars.LLAP_DAEMON_YARN_CONTAINER_MB)); configs.put( HiveConf.ConfVars.LLAP_IO_MEMORY_MAX_SIZE.varname, HiveConf.getLongVar(conf, HiveConf.ConfVars.LLAP_IO_MEMORY_MAX_SIZE)); configs.put( HiveConf.ConfVars.LLAP_ALLOCATOR_DIRECT.varname, HiveConf.getBoolVar(conf, HiveConf.ConfVars.LLAP_ALLOCATOR_DIRECT)); configs.put( ConfVars.LLAP_DAEMON_MEMORY_PER_INSTANCE_MB.varname, HiveConf.getIntVar(conf, ConfVars.LLAP_DAEMON_MEMORY_PER_INSTANCE_MB)); configs.put( ConfVars.LLAP_DAEMON_VCPUS_PER_INSTANCE.varname, HiveConf.getIntVar(conf, ConfVars.LLAP_DAEMON_VCPUS_PER_INSTANCE)); configs.put( ConfVars.LLAP_DAEMON_NUM_EXECUTORS.varname, HiveConf.getIntVar(conf, ConfVars.LLAP_DAEMON_NUM_EXECUTORS)); configs.put( YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, conf.getInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, -1)); configs.put( YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES, conf.getInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES, -1)); FSDataOutputStream os = lfs.create(new Path(tmpDir, "config.json")); OutputStreamWriter w = new OutputStreamWriter(os); configs.write(w); w.close(); os.close(); lfs.close(); fs.close(); if (LOG.isDebugEnabled()) { LOG.debug("Exiting successfully"); } }
private void _handleRequest( Params params, HttpServletRequest request, HttpServletResponse response, PrintWriter out) throws Exception { logger.debug("params={}", params.toString()); String g_country = params.trimParam("country"); double g_lat = params.getDoubleParam("lat", -999); double g_lng = params.getDoubleParam("lng", -999); Date g_begin = params.getDateParam("begin_time"); Date g_end = params.getDateParam("end_time"); String sample_rate = params.getParam("sample_rate"); DateRange drange = null; if (null == g_begin && null == g_end) { } else if (null == g_begin) { drange = DateRange.is(g_end); } else if (null == g_end) { drange = DateRange.is(g_begin); } else { drange = DateRange.between(g_begin, g_end); } HashMap<String, List<?>> map = new HashMap<String, List<?>>(4); String g_columns = params.getParam("fields", ""); String[] columns = g_columns.split(","); ArrayList<Long> idxList = new ArrayList<Long>(); map.put("index", idxList); ArrayList<ArrayList<?>> dataList = new ArrayList<ArrayList<?>>(); map.put("data", dataList); MongoAPI api = new MongoAPI(Console.mongo.getDB("weather1")); ArrayList<Station> stations = stationService.queryList(g_country, g_lat, g_lng); ArrayList<String> exp_cols = new ArrayList<String>(3 * columns.length); exp_cols.add("station"); for (int i = 0, lenCol = columns.length; i < lenCol; ++i) { String col = columns[i]; if ("date".equals(col)) { exp_cols.add(col); } else if ("time".equals(col)) { } else { exp_cols.add("min_" + col); exp_cols.add("max_" + col); exp_cols.add("avg_" + col); } } map.put("columns", exp_cols); if ("d".equals(sample_rate)) { // daily base for (Station s : stations) { String station = s.getId(); for (int year = drange.first.year.value, end = drange.last.year.value; year <= end; ++year) { Result<ArrayList<ArrayList<Object>>> result = api.queryDailyWeatherByStation(year, station, drange, columns); if (result.positive()) { for (int i = 0, lenData = result.data.size(); i < lenData; ++i) { ArrayList<Object> arr = result.data.get(i); String datetime = String.valueOf(arr.remove(arr.size() - 1)); DateTime dt = DateTime.valueOf(datetime); idxList.add(dt.timestamp); dataList.add(arr); } } } } } else if ("w".equals(sample_rate)) { // weekly base for (Station s : stations) { String station = s.getId(); for (int year = drange.first.year.value, end = drange.last.year.value; year <= end; ++year) { Result<ArrayList<ArrayList<Object>>> result = api.queryWeeklyWeatherByStation(year, station, drange, columns); if (result.positive()) { for (int i = 0, lenData = result.data.size(); i < lenData; ++i) { ArrayList<Object> arr = result.data.get(i); String datetime = String.valueOf(arr.remove(arr.size() - 1)); DateTime dt = DateTime.valueOf(datetime); idxList.add(dt.timestamp); dataList.add(arr); } } } } } else if ("m".equals(sample_rate)) { for (Station s : stations) { String station = s.getId(); for (int year = drange.first.year.value, end = drange.last.year.value; year <= end; ++year) { Result<ArrayList<ArrayList<Object>>> result = api.queryMonthlyWeatherByStation(year, station, drange, columns); if (result.positive()) { for (int i = 0, lenData = result.data.size(); i < lenData; ++i) { ArrayList<Object> arr = result.data.get(i); String datetime = String.valueOf(arr.remove(arr.size() - 1)); DateTime dt = DateTime.valueOf(datetime); idxList.add(dt.timestamp); dataList.add(arr); } } } } } else { for (Station s : stations) { String station = s.getId(); for (int year = drange.first.year.value, end = drange.last.year.value; year <= end; ++year) { Result<ArrayList<ArrayList<Object>>> result = api.queryDailyWeatherByStation(year, station, drange, columns); if (result.positive()) { for (int i = 0, lenData = result.data.size(); i < lenData; ++i) { ArrayList<Object> arr = result.data.get(i); String datetime = String.valueOf(arr.remove(arr.size() - 1)); DateTime dt = DateTime.valueOf(datetime); idxList.add(dt.timestamp); dataList.add(arr); } } } } // // raw base // map.put("columns", Arrays.asList(columns)); // // for (Station s: stations) { // String station = s.getId(); // for (int year = drange.first.year.value, end = drange.last.year.value; year <= end; // ++year) { // Result<ArrayList<ArrayList<Object>>> result = api.queryWeatherByStation(year, station, // drange, columns); // if (result.positive()) { // for (int i = 0, lenData = result.data.size(); i < lenData; ++i) { // ArrayList<Object> arr = result.data.get(i); // String datetime = String.valueOf(arr.remove(arr.size()-1)); // DateTime dt = DateTime.valueOf(datetime); // idxList.add(dt.timestamp); // dataList.add(arr); // } // } // } // } } JSONObject json = JsonUtil.getBasicJson(ErrorCode.ok()); String resStr = "{}"; if (!dataList.isEmpty()) { resStr = JsonUtil.makeJsonize(map).toString(); } JsonUtil.addField(json, "data", resStr); JsonUtil.addField(json, "version", VERSION); json.write(out); }
public void outputToJSON(String filePath, int numTopWords, double boundaryFactor) { System.out.println("Writing down the model..."); JSONObject featureCollection = new JSONObject(); featureCollection.put("type", "FeatureCollection"); JSONArray features = new JSONArray(); Queue<Node> nodeQueue = new LinkedList<>(); nodeQueue.offer(root); while (!nodeQueue.isEmpty()) { Node currentNode = nodeQueue.poll(); JSONObject feature = new JSONObject(); feature.put("type", "Feature"); feature.put("id", currentNode.hashCode()); JSONObject properties = new JSONObject(); properties.put("level", currentNode.level + 1); properties.put("num_documents", currentNode.numCustomers); if (currentNode.parent != null) { properties.put("parent", currentNode.parent.hashCode()); } if (!currentNode.isLeaf()) { JSONArray children = new JSONArray(); for (Node child : currentNode.children) { children.put(child.hashCode()); } properties.put("children", children); } JSONArray center = new JSONArray(); double longitude = currentNode.location.longitude; center.put(longitude); double latitude = currentNode.location.latitude; center.put(latitude); properties.put("center", center); JSONArray deviation = new JSONArray(); double longitudeDeviation = Math.sqrt(currentNode.location.longitudeVariance); deviation.put(longitudeDeviation); double latitudeDeviation = Math.sqrt(currentNode.location.latitudeVariance); deviation.put(latitudeDeviation); properties.put("deviation", deviation); JSONArray topWords = new JSONArray(); PriorityQueue<Map.Entry<Integer, Integer>> wordMinHeap = new PriorityQueue<>( numTopWords, new Comparator<Map.Entry<Integer, Integer>>() { @Override public int compare(Map.Entry<Integer, Integer> o1, Map.Entry<Integer, Integer> o2) { return o1.getValue() - o2.getValue(); } }); for (Map.Entry<Integer, Integer> entry : currentNode.wordCounts.entrySet()) { if (wordMinHeap.size() < numTopWords) { wordMinHeap.offer(entry); } else { if (entry.getValue() > wordMinHeap.peek().getValue()) { wordMinHeap.poll(); wordMinHeap.offer(entry); } } } while (!wordMinHeap.isEmpty()) { topWords.put(this.id2Word.get(wordMinHeap.poll().getKey())); } properties.put("top_words", topWords); feature.put("properties", properties); JSONObject geometry = new JSONObject(); geometry.put("type", "Polygon"); JSONArray coordinates = new JSONArray(); JSONArray boundary = new JSONArray(); double east = longitude + boundaryFactor * longitudeDeviation; double west = longitude - boundaryFactor * longitudeDeviation; double north = latitude + boundaryFactor * latitudeDeviation; double south = latitude - boundaryFactor * latitudeDeviation; boundary.put(new JSONArray(new double[] {west, north})); boundary.put(new JSONArray(new double[] {east, north})); boundary.put(new JSONArray(new double[] {east, south})); boundary.put(new JSONArray(new double[] {west, south})); boundary.put(new JSONArray(new double[] {west, north})); coordinates.put(boundary); geometry.put("coordinates", coordinates); feature.put("geometry", geometry); features.put(feature); for (Node child : currentNode.children) { nodeQueue.offer(child); } } featureCollection.put("features", features); try { PrintWriter writer = new PrintWriter(filePath); featureCollection.write(writer); writer.close(); } catch (FileNotFoundException e) { System.err.println("Model JSON cannot be created."); } }
@RequestMapping( value = "/json/console/app/(*:appId)/(~:appVersion)/builder/binder/columns", method = RequestMethod.POST) public void getBuilderDataColumnList( ModelMap map, Writer writer, @RequestParam("appId") String appId, @RequestParam(required = false) String appVersion, @RequestParam String id, @RequestParam String binderId, HttpServletRequest request) throws Exception { AppDefinition appDef = appService.getAppDefinition(appId, appVersion); JSONObject jsonObject = new JSONObject(); // get data list DataList dataList = new DataList(); // parse JSON from request if available dataList = parseFromJsonParameter(map, dataList, id, request); // get binder from request DataListBinder binder = createDataListBinderFromRequestInternal(appDef, id, binderId, request); if (binder != null) { dataList.setBinder(binder); } DataListColumn[] sourceColumns = (binder != null) ? binder.getColumns() : new DataListColumn[0]; // sort columns by label List<DataListColumn> binderColumnList = Arrays.asList(sourceColumns); Collections.sort( binderColumnList, new Comparator<DataListColumn>() { public int compare(DataListColumn o1, DataListColumn o2) { return o1.getLabel().toLowerCase().compareTo(o2.getLabel().toLowerCase()); } }); Collection<String> columnNameList = new HashSet<String>(); DataListColumn[] targetColumns = dataList.getColumns(); if (targetColumns != null) { for (DataListColumn selectedColumn : targetColumns) { columnNameList.add(selectedColumn.getName()); } } for (Iterator i = binderColumnList.iterator(); i.hasNext(); ) { DataListColumn column = (DataListColumn) i.next(); if (columnNameList.contains(column.getName())) { i.remove(); } } sourceColumns = (DataListColumn[]) binderColumnList.toArray(new DataListColumn[0]); DataList sourceDataList = new DataList(); sourceDataList.setColumns(sourceColumns); Collection<Object> collection = new ArrayList<Object>(); for (DataListColumn sourceColumn : sourceColumns) { HashMap hm = new HashMap(); hm.put("name", sourceColumn.getName()); hm.put("label", sourceColumn.getLabel()); hm.put("sortable", true); hm.put("filterable", true); hm.put("type", sourceColumn.getType()); collection.add(hm); } jsonObject.accumulate("columns", collection); jsonObject.write(writer); }