/** * Annotate the number of columns and rows of the training data set in the job parameter JSON * * @return JsonObject annotated with num_cols and num_rows of the training data set */ @Override public JsonObject toJSON() { JsonObject jo = super.toJSON(); if (source != null) { jo.getAsJsonObject("source").addProperty("num_cols", source.numCols()); jo.getAsJsonObject("source").addProperty("num_rows", source.numRows()); } return jo; }
/** * Annotate the number of columns and rows of the validation data set in the job parameter JSON * * @return JsonObject annotated with num_cols and num_rows of the validation data set */ @Override public JsonObject toJSON() { JsonObject jo = super.toJSON(); if (validation != null) { jo.getAsJsonObject("validation").addProperty("num_cols", validation.numCols()); jo.getAsJsonObject("validation").addProperty("num_rows", validation.numRows()); } return jo; }
private static void addFolder(FileSystem fs, Path p, JsonArray succeeded, JsonArray failed) { try { if (fs == null) return; for (FileStatus file : fs.listStatus(p)) { Path pfs = file.getPath(); if (file.isDir()) { addFolder(fs, pfs, succeeded, failed); } else { Key k = Key.make(pfs.toString()); long size = file.getLen(); Value val = null; if (pfs.getName().endsWith(Extensions.JSON)) { JsonParser parser = new JsonParser(); JsonObject json = parser.parse(new InputStreamReader(fs.open(pfs))).getAsJsonObject(); JsonElement v = json.get(Constants.VERSION); if (v == null) throw new RuntimeException("Missing version"); JsonElement type = json.get(Constants.TYPE); if (type == null) throw new RuntimeException("Missing type"); Class c = Class.forName(type.getAsString()); OldModel model = (OldModel) c.newInstance(); model.fromJson(json); } else if (pfs.getName().endsWith(Extensions.HEX)) { // Hex file? FSDataInputStream s = fs.open(pfs); int sz = (int) Math.min(1L << 20, size); // Read up to the 1st meg byte[] mem = MemoryManager.malloc1(sz); s.readFully(mem); // Convert to a ValueArray (hope it fits in 1Meg!) ValueArray ary = new ValueArray(k, 0).read(new AutoBuffer(mem)); val = new Value(k, ary, Value.HDFS); } else if (size >= 2 * ValueArray.CHUNK_SZ) { val = new Value( k, new ValueArray(k, size), Value.HDFS); // ValueArray byte wrapper over a large file } else { val = new Value(k, (int) size, Value.HDFS); // Plain Value val.setdsk(); } DKV.put(k, val); Log.info("PersistHdfs: DKV.put(" + k + ")"); JsonObject o = new JsonObject(); o.addProperty(Constants.KEY, k.toString()); o.addProperty(Constants.FILE, pfs.toString()); o.addProperty(Constants.VALUE_SIZE, file.getLen()); succeeded.add(o); } } } catch (Exception e) { Log.err(e); JsonObject o = new JsonObject(); o.addProperty(Constants.FILE, p.toString()); o.addProperty(Constants.ERROR, e.getMessage()); failed.add(o); } }
public JsonObject toJson() { JsonObject res = new JsonObject(); JsonArray rows = new JsonArray(); for (int i = 0; i < _rows.length; ++i) rows.add(new JsonPrimitive(_rows[i])); JsonArray dist = new JsonArray(); for (int i = 0; i < _dist.length; ++i) dist.add(new JsonPrimitive(_dist[i])); res.add("rows_per_cluster", rows); res.add("sqr_error_per_cluster", dist); return res; }
public static void addFolder(Path p, JsonArray succeeded, JsonArray failed) throws IOException { FileSystem fs = FileSystem.get(p.toUri(), PersistHdfs.CONF); if (!fs.exists(p)) { JsonObject o = new JsonObject(); o.addProperty(Constants.FILE, p.toString()); o.addProperty(Constants.ERROR, "Path does not exist!"); failed.add(o); return; } addFolder(fs, p, succeeded, failed); }
/** * Annotate the name of the response column in the job parameter JSON * * @return JsonObject annotated with the name of the response column */ @Override public JsonObject toJSON() { JsonObject jo = super.toJSON(); if (source != null) { int idx = source.find(response); if (idx == -1) { Vec vm = response.masterVec(); if (vm != null) idx = source.find(vm); } jo.getAsJsonObject("response") .add("name", new JsonPrimitive(idx == -1 ? "null" : source._names[idx])); } return jo; }
@Override public JsonObject toJson() { JsonObject res = new JsonObject(); res.addProperty(Constants.VERSION, H2O.VERSION); res.addProperty(Constants.TYPE, KMeansModel.class.getName()); res.addProperty(Constants.ERROR, _error); JsonArray ary = new JsonArray(); for (double[] dd : clusters()) { JsonArray ary2 = new JsonArray(); for (double d : dd) ary2.add(new JsonPrimitive(d)); ary.add(ary2); } res.add(Constants.CLUSTERS, ary); return res; }
/** * Annotate the used and ignored columns in the job parameter JSON For both the used and the * ignored columns, the following rules apply: If the number of columns is less or equal than * 100, a dense list of used columns is reported. If the number of columns is greater than 100, * the number of columns is reported. If the number of columns is 0, a "N/A" is reported. * * @return JsonObject annotated with used/ignored columns */ @Override public JsonObject toJSON() { JsonObject jo = super.toJSON(); if (!jo.has("source") || source == null) return jo; HashMap<String, int[]> map = new HashMap<String, int[]>(); map.put("used_cols", cols); map.put("ignored_cols", ignored_cols); for (String key : map.keySet()) { int[] val = map.get(key); if (val != null) { if (val.length > 100) jo.getAsJsonObject("source").addProperty("num_" + key, val.length); else if (val.length > 0) { StringBuilder sb = new StringBuilder(); for (int c : val) sb.append(c + ","); jo.getAsJsonObject("source") .addProperty(key, sb.toString().substring(0, sb.length() - 1)); } else { jo.getAsJsonObject("source").add(key, JsonNull.INSTANCE); } } } return jo; }