Exemplo n.º 1
0
  private static void addFolder2(
      FileSystem fs, Path p, ArrayList<String> keys, ArrayList<String> failed) {
    try {
      if (fs == null) return;

      Futures futures = new Futures();
      for (FileStatus file : fs.listStatus(p)) {
        Path pfs = file.getPath();
        if (file.isDir()) {
          addFolder2(fs, pfs, keys, failed);
        } else {
          long size = file.getLen();
          Key res;
          if (pfs.getName().endsWith(Extensions.JSON)) {
            throw H2O.unimpl();
          } else if (pfs.getName().endsWith(Extensions.HEX)) { // Hex file?
            throw H2O.unimpl();
          } else {
            Key k = null;
            keys.add((k = HdfsFileVec.make(file, futures)).toString());
            Log.info("PersistHdfs: DKV.put(" + k + ")");
          }
        }
      }
    } catch (Exception e) {
      Log.err(e);
      failed.add(p.toString());
    }
  }
Exemplo n.º 2
0
 private static void ignoreAndWait(final Exception e, boolean printException) {
   H2O.ignore(e, "Hit HDFS reset problem, retrying...", printException);
   try {
     Thread.sleep(500);
   } catch (InterruptedException ie) {
   }
 }
Exemplo n.º 3
0
    public static Job run(final Key dest, final KMeansModel model, final ValueArray ary) {
      final ChunkProgressJob job = new ChunkProgressJob(ary.chunks(), dest);
      new ValueArray(dest, 0).delete_and_lock(job.self());
      final H2OCountedCompleter fjtask =
          new H2OCountedCompleter() {
            @Override
            public void compute2() {
              KMeansApply kms = new KMeansApply();
              kms._job = job;
              kms._arykey = ary._key;
              kms._cols = model.columnMapping(ary.colNames());
              kms._clusters = model._clusters;
              kms._normalized = model._normalized;
              kms.invoke(ary._key);

              Column c = new Column();
              c._name = Constants.RESPONSE;
              c._size = ROW_SIZE;
              c._scale = 1;
              c._min = 0;
              c._max = model._clusters.length;
              c._mean = Double.NaN;
              c._sigma = Double.NaN;
              c._domain = null;
              c._n = ary.numRows();
              ValueArray res = new ValueArray(dest, ary.numRows(), c._size, new Column[] {c});
              res.unlock(job.self());
              job.remove();
              tryComplete();
            }

            @Override
            public boolean onExceptionalCompletion(Throwable ex, CountedCompleter caller) {
              job.onException(ex);
              return super.onExceptionalCompletion(ex, caller);
            }
          };
      job.start(fjtask);
      H2O.submitTask(fjtask);
      return job;
    }
Exemplo n.º 4
0
 private void cancel(final String msg, JobState resultingState) {
   if (resultingState == JobState.CANCELLED) {
     Log.info("Job " + self() + "(" + description + ") was cancelled.");
   } else {
     Log.err("Job " + self() + "(" + description + ") failed.");
     Log.err(msg);
   }
   exception = msg;
   state = resultingState;
   // replace finished job by a job handle
   replaceByJobHandle();
   DKV.write_barrier();
   final Job job = this;
   H2O.submitTask(
       new H2OCountedCompleter() {
         @Override
         public void compute2() {
           job.onCancelled();
         }
       });
 }
Exemplo n.º 5
0
 /**
  * Forks computation of this job.
  *
  * <p>The call does not block.
  *
  * @return always returns this job.
  */
 public Job fork() {
   init();
   H2OCountedCompleter task =
       new H2OCountedCompleter() {
         @Override
         public void compute2() {
           try {
             try {
               // Exec always waits till the end of computation
               Job.this.exec();
               Job.this.remove();
             } catch (Throwable t) {
               if (!(t instanceof ExpectedExceptionForDebug)) Log.err(t);
               Job.this.cancel(t);
             }
           } finally {
             tryComplete();
           }
         }
       };
   start(task);
   H2O.submitTask(task);
   return this;
 }
Exemplo n.º 6
0
 /** Single row scoring, on a compatible ValueArray (when pushed throw the mapping) */
 @Override
 protected double score0(ValueArray data, int row) {
   throw H2O.unimpl();
 }
Exemplo n.º 7
0
 /** Bulk scoring API, on a compatible ValueArray (when pushed throw the mapping) */
 @Override
 protected double score0(ValueArray data, AutoBuffer ab, int row_in_chunk) {
   throw H2O.unimpl();
 }
Exemplo n.º 8
0
 /**
  * Cross-Validate this Job (to be overridden for each instance, which also calls
  * genericCrossValidation)
  *
  * @param splits Frames containing train/test splits
  * @param cv_preds Store the predictions for each cross-validation run
  * @param offsets Array to store the offsets of starting row indices for each cross-validation
  *     run
  * @param i Which fold of cross-validation to perform
  */
 public void crossValidate(Frame[] splits, Frame[] cv_preds, long[] offsets, int i) {
   throw H2O.unimpl();
 }