static { InputStream resource = Boot._init.getResource2("/page.html"); try { _htmlTemplate = new String(ByteStreams.toByteArray(resource)).replace("%cloud_name", H2O.NAME); } catch (NullPointerException e) { Log.err(e); Log.die("page.html not found in resources."); } catch (Exception e) { Log.err(e); Log.die(e.getMessage()); } finally { Closeables.closeQuietly(resource); } }
private static void addFolder2( FileSystem fs, Path p, ArrayList<String> keys, ArrayList<String> failed) { try { if (fs == null) return; Futures futures = new Futures(); for (FileStatus file : fs.listStatus(p)) { Path pfs = file.getPath(); if (file.isDir()) { addFolder2(fs, pfs, keys, failed); } else { long size = file.getLen(); Key res; if (pfs.getName().endsWith(Extensions.JSON)) { throw H2O.unimpl(); } else if (pfs.getName().endsWith(Extensions.HEX)) { // Hex file? throw H2O.unimpl(); } else { Key k = null; keys.add((k = HdfsFileVec.make(file, futures)).toString()); Log.info("PersistHdfs: DKV.put(" + k + ")"); } } } } catch (Exception e) { Log.err(e); failed.add(p.toString()); } }
private static void addFolder(FileSystem fs, Path p, JsonArray succeeded, JsonArray failed) { try { if (fs == null) return; for (FileStatus file : fs.listStatus(p)) { Path pfs = file.getPath(); if (file.isDir()) { addFolder(fs, pfs, succeeded, failed); } else { Key k = Key.make(pfs.toString()); long size = file.getLen(); Value val = null; if (pfs.getName().endsWith(Extensions.JSON)) { JsonParser parser = new JsonParser(); JsonObject json = parser.parse(new InputStreamReader(fs.open(pfs))).getAsJsonObject(); JsonElement v = json.get(Constants.VERSION); if (v == null) throw new RuntimeException("Missing version"); JsonElement type = json.get(Constants.TYPE); if (type == null) throw new RuntimeException("Missing type"); Class c = Class.forName(type.getAsString()); OldModel model = (OldModel) c.newInstance(); model.fromJson(json); } else if (pfs.getName().endsWith(Extensions.HEX)) { // Hex file? FSDataInputStream s = fs.open(pfs); int sz = (int) Math.min(1L << 20, size); // Read up to the 1st meg byte[] mem = MemoryManager.malloc1(sz); s.readFully(mem); // Convert to a ValueArray (hope it fits in 1Meg!) ValueArray ary = new ValueArray(k, 0).read(new AutoBuffer(mem)); val = new Value(k, ary, Value.HDFS); } else if (size >= 2 * ValueArray.CHUNK_SZ) { val = new Value( k, new ValueArray(k, size), Value.HDFS); // ValueArray byte wrapper over a large file } else { val = new Value(k, (int) size, Value.HDFS); // Plain Value val.setdsk(); } DKV.put(k, val); Log.info("PersistHdfs: DKV.put(" + k + ")"); JsonObject o = new JsonObject(); o.addProperty(Constants.KEY, k.toString()); o.addProperty(Constants.FILE, pfs.toString()); o.addProperty(Constants.VALUE_SIZE, file.getLen()); succeeded.add(o); } } } catch (Exception e) { Log.err(e); JsonObject o = new JsonObject(); o.addProperty(Constants.FILE, p.toString()); o.addProperty(Constants.ERROR, e.getMessage()); failed.add(o); } }
private void cancel(final String msg, JobState resultingState) { if (resultingState == JobState.CANCELLED) { Log.info("Job " + self() + "(" + description + ") was cancelled."); } else { Log.err("Job " + self() + "(" + description + ") failed."); Log.err(msg); } exception = msg; state = resultingState; // replace finished job by a job handle replaceByJobHandle(); DKV.write_barrier(); final Job job = this; H2O.submitTask( new H2OCountedCompleter() { @Override public void compute2() { job.onCancelled(); } }); }
public static byte [] unzipBytes(byte [] bs, Compression cmp) { InputStream is = null; int off = 0; try { switch(cmp) { case NONE: // No compression return bs; case ZIP: { ZipInputStream zis = new ZipInputStream(new ByteArrayInputStream(bs)); ZipEntry ze = zis.getNextEntry(); // Get the *FIRST* entry // There is at least one entry in zip file and it is not a directory. if( ze != null && !ze.isDirectory() ) { is = zis; break; } zis.close(); return bs; // Don't crash, ignore file if cannot unzip } case GZIP: is = new GZIPInputStream(new ByteArrayInputStream(bs)); break; default: assert false:"cmp = " + cmp; } // If reading from a compressed stream, estimate we can read 2x uncompressed assert( is != null ):"is is NULL, cmp = " + cmp; bs = new byte[bs.length * 2]; // Now read from the (possibly compressed) stream while( off < bs.length ) { int len = is.read(bs, off, bs.length - off); if( len < 0 ) break; off += len; if( off == bs.length ) { // Dataset is uncompressing alot! Need more space... if( bs.length >= ValueArray.CHUNK_SZ ) break; // Already got enough bs = Arrays.copyOf(bs, bs.length * 2); } } } catch( IOException ioe ) { // Stop at any io error Log.err(ioe); } finally { Utils.close(is); } return bs; }
protected void err(Object tag, Object obj) { Log.err(obj); }
/** * Simple GLM wrapper to enable launching GLM from command line. * * <p>Example input: java -jar target/h2o.jar -name=test -runMethod water.util.GLMRunner * -file=smalldata/logreg/prostate.csv -y=CAPSULE -family=binomial * * @param args * @throws InterruptedException */ public static void main(String[] args) throws InterruptedException { try { GLMArgs ARGS = new GLMArgs(); new Arguments(args).extract(ARGS); System.out.println("==================<GLMRunner START>==================="); ValueArray ary = Utils.loadAndParseKey(ARGS.file); int ycol; try { ycol = Integer.parseInt(ARGS.y); } catch (NumberFormatException e) { ycol = ary.getColumnIds(new String[] {ARGS.y})[0]; } int ncols = ary.numCols(); if (ycol < 0 || ycol >= ary.numCols()) { System.err.println("invalid y column: " + ycol); H2O.exit(-1); } int[] xcols; if (ARGS.xs.equalsIgnoreCase("all")) { xcols = new int[ncols - 1]; for (int i = 0; i < ycol; ++i) xcols[i] = i; for (int i = ycol; i < ncols - 1; ++i) xcols[i] = i + 1; } else { System.out.println("xs = " + ARGS.xs); String[] names = ARGS.xs.split(","); xcols = new int[names.length]; try { for (int i = 0; i < names.length; ++i) xcols[i] = Integer.valueOf(names[i]); } catch (NumberFormatException e) { xcols = ary.getColumnIds(ARGS.xs.split(",")); } } for (int x : xcols) if (x < 0) { System.err.println("Invalid predictor specification " + ARGS.xs); H2O.exit(-1); } GLMJob j = DGLM.startGLMJob( DGLM.getData(ary, xcols, ycol, null, true), new ADMMSolver(ARGS.lambda, ARGS._alpha), new GLMParams(Family.valueOf(ARGS.family)), null, ARGS.xval, true); System.out.print("[GLM] computing model..."); int progress = 0; while (!j.isDone()) { int p = (int) (100 * j.progress()); int dots = p - progress; progress = p; for (int i = 0; i < dots; ++i) System.out.print('.'); Thread.sleep(250); } Log.debug(Sys.GENLM, "DONE."); GLMModel m = j.get(); String[] colnames = ary.colNames(); System.out.println("Intercept" + " = " + m._beta[ncols - 1]); for (int i = 0; i < xcols.length; ++i) { System.out.println(colnames[i] + " = " + m._beta[i]); } } catch (Throwable t) { Log.err(t); } finally { // we're done. shutdown the cloud Log.debug(Sys.GENLM, "==================<GLMRunner DONE>==================="); UDPRebooted.suicide(UDPRebooted.T.shutdown, H2O.SELF); } }
/** * Copy properties "of the same name" from one POJO to the other. If the fields are named * consistently (both sides have fields named "_foo" and/or "bar") this acts like Apache Commons * PojoUtils.copyProperties(). If one side has leading underscores and the other does not then the * names are conformed according to the field_naming parameter. * * @param dest Destination POJO * @param origin Origin POJO * @param field_naming Are the fields named consistently, or does one side have underscores? * @param skip_fields Array of origin or destination field names to skip * @param only_fields Array of origin or destination field names to include; ones not in this list * will be skipped */ public static void copyProperties( Object dest, Object origin, FieldNaming field_naming, String[] skip_fields, String[] only_fields) { if (null == dest || null == origin) return; Field[] dest_fields = Weaver.getWovenFields(dest.getClass()); Field[] orig_fields = Weaver.getWovenFields(origin.getClass()); for (Field orig_field : orig_fields) { String origin_name = orig_field.getName(); if (skip_fields != null & ArrayUtils.contains(skip_fields, origin_name)) continue; if (only_fields != null & !ArrayUtils.contains(only_fields, origin_name)) continue; String dest_name = null; if (field_naming == FieldNaming.CONSISTENT) { dest_name = origin_name; } else if (field_naming == FieldNaming.DEST_HAS_UNDERSCORES) { dest_name = "_" + origin_name; } else if (field_naming == FieldNaming.ORIGIN_HAS_UNDERSCORES) { dest_name = origin_name.substring(1); } if (skip_fields != null & ArrayUtils.contains(skip_fields, dest_name)) continue; if (only_fields != null & !ArrayUtils.contains(only_fields, dest_name)) continue; try { Field dest_field = null; for (Field fd : dest_fields) { if (fd.getName().equals(dest_name)) { dest_field = fd; break; } } if (dest_field != null) { dest_field.setAccessible(true); orig_field.setAccessible(true); // Log.info("PojoUtils.copyProperties, origin field: " + orig_field + "; destination // field: " + dest_field); if (null == orig_field.get(origin)) { // // Assigning null to dest. // dest_field.set(dest, null); } else if (dest_field.getType().isArray() && orig_field.getType().isArray() && (dest_field.getType().getComponentType() != orig_field.getType().getComponentType())) { // // Assigning an array to another array. // // You can't use reflection to set an int[] with an Integer[]. Argh. // TODO: other types of arrays. . . if (dest_field.getType().getComponentType() == double.class && orig_field.getType().getComponentType() == Double.class) { // // Assigning an Double[] to an double[] // double[] copy = (double[]) orig_field.get(origin); dest_field.set(dest, copy); } else if (dest_field.getType().getComponentType() == Double.class && orig_field.getType().getComponentType() == double.class) { // // Assigning an double[] to an Double[] // Double[] copy = (Double[]) orig_field.get(origin); dest_field.set(dest, copy); } else if (dest_field.getType().getComponentType() == int.class && orig_field.getType().getComponentType() == Integer.class) { // // Assigning an Integer[] to an int[] // int[] copy = (int[]) orig_field.get(origin); dest_field.set(dest, copy); } else if (dest_field.getType().getComponentType() == Integer.class && orig_field.getType().getComponentType() == int.class) { // // Assigning an int[] to an Integer[] // Integer[] copy = (Integer[]) orig_field.get(origin); dest_field.set(dest, copy); } else if (Schema.class.isAssignableFrom(dest_field.getType().getComponentType()) && (Schema.getImplClass( (Class<? extends Schema>) dest_field.getType().getComponentType())) .isAssignableFrom(orig_field.getType().getComponentType())) { // // Assigning an array of impl fields to an array of schema fields, e.g. a // DeepLearningParameters[] into a DeepLearningParametersV2[] // Class dest_component_class = dest_field.getType().getComponentType(); Schema[] translation = (Schema[]) Array.newInstance( dest_component_class, Array.getLength(orig_field.get(origin))); int i = 0; int version = ((Schema) dest).getSchemaVersion(); // Look up the schema for each element of the array; if not found fall back to the // schema for the base class. for (Iced impl : ((Iced[]) orig_field.get(origin))) { if (null == impl) { translation[i++] = null; } else { Schema s = null; try { s = Schema.schema(version, impl); } catch (H2ONotFoundArgumentException e) { s = ((Schema) dest_field.getType().getComponentType().newInstance()); } translation[i++] = s.fillFromImpl(impl); } } dest_field.set(dest, translation); } else if (Schema.class.isAssignableFrom(orig_field.getType().getComponentType()) && Iced.class.isAssignableFrom(dest_field.getType().getComponentType())) { // // Assigning an array of schema fields to an array of impl fields, e.g. a // DeepLearningParametersV2[] into a DeepLearningParameters[] // // We can't check against the actual impl class I, because we can't instantiate the // schema base classes to get the impl class from an instance: // dest_field.getType().getComponentType().isAssignableFrom(((Schema)f.getType().getComponentType().newInstance()).getImplClass())) { Class dest_component_class = dest_field.getType().getComponentType(); Iced[] translation = (Iced[]) Array.newInstance( dest_component_class, Array.getLength(orig_field.get(origin))); int i = 0; for (Schema s : ((Schema[]) orig_field.get(origin))) { translation[i++] = s.createImpl(); } dest_field.set(dest, translation); } else { throw H2O.fail( "Don't know how to cast an array of: " + orig_field.getType().getComponentType() + " to an array of: " + dest_field.getType().getComponentType()); } // end of array handling } else if (dest_field.getType() == Key.class && Keyed.class.isAssignableFrom(orig_field.getType())) { // // Assigning a Keyed (e.g., a Frame or Model) to a Key. // dest_field.set(dest, ((Keyed) orig_field.get(origin))._key); } else if (orig_field.getType() == Key.class && Keyed.class.isAssignableFrom(dest_field.getType())) { // // Assigning a Key (for e.g., a Frame or Model) to a Keyed (e.g., a Frame or Model). // Value v = DKV.get((Key) orig_field.get(origin)); dest_field.set(dest, (null == v ? null : v.get())); } else if (KeyV3.class.isAssignableFrom(dest_field.getType()) && Keyed.class.isAssignableFrom(orig_field.getType())) { // // Assigning a Keyed (e.g., a Frame or Model) to a KeyV1. // dest_field.set( dest, KeyV3.make( ((Class<? extends KeyV3>) dest_field.getType()), ((Keyed) orig_field.get(origin))._key)); } else if (KeyV3.class.isAssignableFrom(orig_field.getType()) && Keyed.class.isAssignableFrom(dest_field.getType())) { // // Assigning a KeyV1 (for e.g., a Frame or Model) to a Keyed (e.g., a Frame or Model). // KeyV3 k = (KeyV3) orig_field.get(origin); Value v = DKV.get(Key.make(k.name)); dest_field.set(dest, (null == v ? null : v.get())); } else if (KeyV3.class.isAssignableFrom(dest_field.getType()) && Key.class.isAssignableFrom(orig_field.getType())) { // // Assigning a Key to a KeyV1. // dest_field.set( dest, KeyV3.make( ((Class<? extends KeyV3>) dest_field.getType()), (Key) orig_field.get(origin))); } else if (KeyV3.class.isAssignableFrom(orig_field.getType()) && Key.class.isAssignableFrom(dest_field.getType())) { // // Assigning a KeyV1 to a Key. // KeyV3 k = (KeyV3) orig_field.get(origin); dest_field.set(dest, (null == k.name ? null : Key.make(k.name))); } else if (dest_field.getType() == Pattern.class && String.class.isAssignableFrom(orig_field.getType())) { // // Assigning a String to a Pattern. // dest_field.set(dest, Pattern.compile((String) orig_field.get(origin))); } else if (orig_field.getType() == Pattern.class && String.class.isAssignableFrom(dest_field.getType())) { // // We are assigning a Pattern to a String. // dest_field.set(dest, orig_field.get(origin).toString()); } else if (dest_field.getType() == FrameV3.ColSpecifierV3.class && String.class.isAssignableFrom(orig_field.getType())) { // // Assigning a String to a ColSpecifier. Note that we currently support only the // colname, not a frame name too. // dest_field.set(dest, new FrameV3.ColSpecifierV3((String) orig_field.get(origin))); } else if (orig_field.getType() == FrameV3.ColSpecifierV3.class && String.class.isAssignableFrom(dest_field.getType())) { // // We are assigning a ColSpecifierV2 to a String. The column_name gets copied. // dest_field.set(dest, ((FrameV3.ColSpecifierV3) orig_field.get(origin)).column_name); } else if (Enum.class.isAssignableFrom(dest_field.getType()) && String.class.isAssignableFrom(orig_field.getType())) { // // Assigning a String into an enum field. // Class<Enum> dest_class = (Class<Enum>) dest_field.getType(); dest_field.set(dest, Enum.valueOf(dest_class, (String) orig_field.get(origin))); } else if (Enum.class.isAssignableFrom(orig_field.getType()) && String.class.isAssignableFrom(dest_field.getType())) { // // Assigning an enum field into a String. // Object o = orig_field.get(origin); dest_field.set(dest, (o == null ? null : o.toString())); } else if (Schema.class.isAssignableFrom(dest_field.getType()) && Schema.getImplClass((Class<? extends Schema>) dest_field.getType()) .isAssignableFrom(orig_field.getType())) { // // Assigning an impl field into a schema field, e.g. a DeepLearningParameters into a // DeepLearningParametersV2. // dest_field.set( dest, Schema.schema( /* ((Schema)dest).getSchemaVersion() TODO: remove HACK!! */ 3, (Class<? extends Iced>) orig_field.get(origin).getClass()) .fillFromImpl((Iced) orig_field.get(origin))); } else if (Schema.class.isAssignableFrom(orig_field.getType()) && Schema.getImplClass((Class<? extends Schema>) orig_field.getType()) .isAssignableFrom(dest_field.getType())) { // // Assigning a schema field into an impl field, e.g. a DeepLearningParametersV2 into a // DeepLearningParameters. // Schema s = ((Schema) orig_field.get(origin)); dest_field.set(dest, s.fillImpl(s.createImpl())); } else if ((Schema.class.isAssignableFrom(dest_field.getType()) && Key.class.isAssignableFrom(orig_field.getType()))) { // // Assigning an impl field fetched via a Key into a schema field, e.g. a // DeepLearningParameters into a DeepLearningParametersV2. // Note that unlike the cases above we don't know the type of the impl class until we // fetch in the body of the if. // Key origin_key = (Key) orig_field.get(origin); Value v = DKV.get(origin_key); if (null == v || null == v.get()) { dest_field.set(dest, null); } else { if (((Schema) dest_field.get(dest)) .getImplClass() .isAssignableFrom(v.get().getClass())) { Schema s = ((Schema) dest_field.get(dest)); dest_field.set( dest, Schema.schema(s.getSchemaVersion(), s.getImplClass()).fillFromImpl(v.get())); } else { Log.err( "Can't fill Schema of type: " + dest_field.getType() + " with value of type: " + v.getClass() + " fetched from Key: " + origin_key); dest_field.set(dest, null); } } } else if (Schema.class.isAssignableFrom(orig_field.getType()) && Keyed.class.isAssignableFrom(dest_field.getType())) { // // Assigning a schema field into a Key field, e.g. a DeepLearningV2 into a // (DeepLearningParameters) key. // Schema s = ((Schema) orig_field.get(origin)); dest_field.set(dest, ((Keyed) s.fillImpl(s.createImpl()))._key); } else { // // Normal case: not doing any type conversion. // dest_field.set(dest, orig_field.get(origin)); } } } catch (IllegalAccessException e) { Log.err( "Illegal access exception trying to copy field: " + origin_name + " of class: " + origin.getClass() + " to field: " + dest_name + " of class: " + dest.getClass()); } catch (InstantiationException e) { Log.err( "Instantiation exception trying to copy field: " + origin_name + " of class: " + origin.getClass() + " to field: " + dest_name + " of class: " + dest.getClass()); } } }