/** * JPASupport instances a and b are equals if either <strong>a == b</strong> or a and b have same * </strong>{@link #key key} and class</strong> * * @param other * @return true if equality condition above is verified */ @Override public boolean equals(Object other) { final Object key = this._key(); if (other == null) { return false; } if (this == other) { return true; } if (key == null) { return false; } if (play.db.Model.class.isAssignableFrom(other.getClass()) && key.getClass().isArray()) { Object otherKey = ((play.db.Model) other)._key(); if (otherKey.getClass().isArray()) { return Arrays.deepEquals((Object[]) key, (Object[]) otherKey); } return false; } if (!this.getClass().isAssignableFrom(other.getClass())) { return false; } return key.equals(((play.db.Model) other)._key()); }
@Override public boolean equals(Object other) { if (!(other instanceof SimilarityMatrix)) { return false; } SimilarityMatrix otherMatrix = (SimilarityMatrix) other; return otherMatrix.builtFrom.equals(builtFrom) && otherMatrix.xSubmissions.equals(xSubmissions) && otherMatrix.ySubmissions.equals(ySubmissions) && Arrays.deepEquals(otherMatrix.entries, entries); }
/** * Train a Deep Learning model, assumes that all members are populated If checkpoint == null, * then start training a new model, otherwise continue from a checkpoint */ public final void buildModel() { DeepLearningModel cp = null; if (_parms._checkpoint == null) { cp = new DeepLearningModel( dest(), _parms, new DeepLearningModel.DeepLearningModelOutput(DeepLearning.this), _train, _valid, nclasses()); cp.model_info().initializeMembers(); } else { final DeepLearningModel previous = DKV.getGet(_parms._checkpoint); if (previous == null) throw new IllegalArgumentException("Checkpoint not found."); Log.info("Resuming from checkpoint."); _job.update(0, "Resuming from checkpoint"); if (isClassifier() != previous._output.isClassifier()) throw new H2OIllegalArgumentException( "Response type must be the same as for the checkpointed model."); if (isSupervised() != previous._output.isSupervised()) throw new H2OIllegalArgumentException( "Model type must be the same as for the checkpointed model."); // check the user-given arguments for consistency DeepLearningParameters oldP = previous._parms; // sanitized parameters for checkpointed model DeepLearningParameters newP = _parms; // user-given parameters for restart DeepLearningParameters oldP2 = (DeepLearningParameters) oldP.clone(); DeepLearningParameters newP2 = (DeepLearningParameters) newP.clone(); DeepLearningParameters.Sanity.modifyParms( oldP, oldP2, nclasses()); // sanitize the user-given parameters DeepLearningParameters.Sanity.modifyParms( newP, newP2, nclasses()); // sanitize the user-given parameters DeepLearningParameters.Sanity.checkpoint(oldP2, newP2); DataInfo dinfo; try { // PUBDEV-2513: Adapt _train and _valid (in-place) to match the frames that were used for // the previous model // This can add or remove dummy columns (can happen if the dataset is sparse and datasets // have different non-const columns) for (String st : previous.adaptTestForTrain(_train, true, false)) Log.warn(st); for (String st : previous.adaptTestForTrain(_valid, true, false)) Log.warn(st); dinfo = makeDataInfo(_train, _valid, _parms, nclasses()); DKV.put(dinfo); cp = new DeepLearningModel(dest(), _parms, previous, false, dinfo); cp.write_lock(_job); if (!Arrays.equals(cp._output._names, previous._output._names)) { throw new H2OIllegalArgumentException( "The columns of the training data must be the same as for the checkpointed model. Check ignored columns (or disable ignore_const_cols)."); } if (!Arrays.deepEquals(cp._output._domains, previous._output._domains)) { throw new H2OIllegalArgumentException( "Categorical factor levels of the training data must be the same as for the checkpointed model."); } if (dinfo.fullN() != previous.model_info().data_info().fullN()) { throw new H2OIllegalArgumentException( "Total number of predictors is different than for the checkpointed model."); } if (_parms._epochs <= previous.epoch_counter) { throw new H2OIllegalArgumentException( "Total number of epochs must be larger than the number of epochs already trained for the checkpointed model (" + previous.epoch_counter + ")."); } // these are the mutable parameters that are to be used by the model (stored in // model_info._parms) final DeepLearningParameters actualNewP = cp.model_info() .get_params(); // actually used parameters for model building (defaults filled in, // etc.) assert (actualNewP != previous.model_info().get_params()); assert (actualNewP != newP); assert (actualNewP != oldP); DeepLearningParameters.Sanity.update(actualNewP, newP, nclasses()); Log.info( "Continuing training after " + String.format("%.3f", previous.epoch_counter) + " epochs from the checkpointed model."); cp.update(_job); } catch (H2OIllegalArgumentException ex) { if (cp != null) { cp.unlock(_job); cp.delete(); cp = null; } throw ex; } finally { if (cp != null) cp.unlock(_job); } } trainModel(cp); // clean up, but don't delete weights and biases if user asked for export List<Key> keep = new ArrayList<>(); try { if (_parms._export_weights_and_biases && cp._output.weights != null && cp._output.biases != null) { for (Key k : Arrays.asList(cp._output.weights)) { keep.add(k); for (Vec vk : ((Frame) DKV.getGet(k)).vecs()) { keep.add(vk._key); } } for (Key k : Arrays.asList(cp._output.biases)) { keep.add(k); for (Vec vk : ((Frame) DKV.getGet(k)).vecs()) { keep.add(vk._key); } } } } finally { Scope.exit(keep.toArray(new Key[keep.size()])); } }
@Override Val apply(Env env, Env.StackHelp stk, AST asts[]) { // Execute all args. Find a canonical frame; all Frames must look like this one. // Each argument turns into either a Frame (whose rows are entirely // inlined) or a scalar (which is replicated across as a single row). Frame fr = null; // Canonical Frame; all frames have the same column count, types and names int nchks = 0; // Total chunks Val vals[] = new Val[asts.length]; // Computed AST results for (int i = 1; i < asts.length; i++) { vals[i] = stk.track(asts[i].exec(env)); if (vals[i].isFrame()) { fr = vals[i].getFrame(); nchks += fr.anyVec().nChunks(); // Total chunks } else nchks++; // One chunk per scalar } // No Frame, just a pile-o-scalars? Vec zz = null; // The zero-length vec for the zero-frame frame if (fr == null) { // Zero-length, 1-column, default name fr = new Frame(new String[] {Frame.defaultColName(0)}, new Vec[] {zz = Vec.makeZero(0)}); if (asts.length == 1) return new ValFrame(fr); } // Verify all Frames are the same columns, names, and types. Domains can vary, and will be the // union final Frame frs[] = new Frame[asts.length]; // Input frame final byte[] types = fr.types(); // Column types final int ncols = fr.numCols(); final long[] espc = new long[nchks + 1]; // Compute a new layout! int coffset = 0; for (int i = 1; i < asts.length; i++) { Val val = vals[i]; // Save values computed for pass 2 Frame fr0 = val.isFrame() ? val.getFrame() // Scalar: auto-expand into a 1-row frame : stk.track(new Frame(fr._names, Vec.makeCons(val.getNum(), 1L, fr.numCols()))); // Check that all frames are compatible if (fr.numCols() != fr0.numCols()) throw new IllegalArgumentException( "rbind frames must have all the same columns, found " + fr.numCols() + " and " + fr0.numCols() + " columns."); if (!Arrays.deepEquals(fr._names, fr0._names)) throw new IllegalArgumentException( "rbind frames must have all the same column names, found " + Arrays.toString(fr._names) + " and " + Arrays.toString(fr0._names)); if (!Arrays.equals(types, fr0.types())) throw new IllegalArgumentException( "rbind frames must have all the same column types, found " + Arrays.toString(types) + " and " + Arrays.toString(fr0.types())); frs[i] = fr0; // Save frame // Roll up the ESPC row counts long roffset = espc[coffset]; long[] espc2 = fr0.anyVec().espc(); for (int j = 1; j < espc2.length; j++) // Roll up the row counts espc[coffset + j] = (roffset + espc2[j]); coffset += espc2.length - 1; // Chunk offset } if (zz != null) zz.remove(); // build up the new domains for each vec HashMap<String, Integer>[] dmap = new HashMap[types.length]; String[][] domains = new String[types.length][]; int[][][] cmaps = new int[types.length][][]; for (int k = 0; k < types.length; ++k) { dmap[k] = new HashMap<>(); int c = 0; byte t = types[k]; if (t == Vec.T_CAT) { int[][] maps = new int[frs.length][]; for (int i = 1; i < frs.length; i++) { maps[i] = new int[frs[i].vec(k).domain().length]; for (int j = 0; j < maps[i].length; j++) { String s = frs[i].vec(k).domain()[j]; if (!dmap[k].containsKey(s)) dmap[k].put(s, maps[i][j] = c++); else maps[i][j] = dmap[k].get(s); } } cmaps[k] = maps; } else { cmaps[k] = new int[frs.length][]; } domains[k] = c == 0 ? null : new String[c]; for (Map.Entry<String, Integer> e : dmap[k].entrySet()) domains[k][e.getValue()] = e.getKey(); } // Now make Keys for the new Vecs Key<Vec>[] keys = fr.anyVec().group().addVecs(fr.numCols()); Vec[] vecs = new Vec[fr.numCols()]; int rowLayout = Vec.ESPC.rowLayout(keys[0], espc); for (int i = 0; i < vecs.length; i++) vecs[i] = new Vec(keys[i], rowLayout, domains[i], types[i]); // Do the row-binds column-by-column. // Switch to F/J thread for continuations ParallelRbinds t; H2O.submitTask(t = new ParallelRbinds(frs, espc, vecs, cmaps)).join(); return new ValFrame(new Frame(fr.names(), t._vecs)); }
/** * Verifies that the actual <code>Object</code> array is not equal to the given array. Array * equality is checked by <code>{@link Arrays#deepEquals(Object[], Object[])}</code>. * * @param array the given array to compare the actual array to. * @return this assertion object. * @throws AssertionError if the actual <code>Object</code> array is equal to the given one. */ @Override public ObjectArrayAssert isNotEqualTo(Object[] array) { if (!Arrays.deepEquals(actual, array)) return this; failIfCustomMessageIsSet(); throw failure(unexpectedEqual(actual, array)); }
public int ai_move(int[][] board) { if (running) { System.out.println( "This AI appears to be running multiple ai_moves simultaneously. That can't be right."); } running = true; try { // System.out.println(2 + Math.random()); if (recording) { if (out == null) { try { int ind = 1; File f = null; while (true) { try { Scanner sc = new Scanner(new File("AIReplay" + ind + ".txt")); ind++; } catch (Exception e) { break; } } out = new PrintWriter(new File("AIReplay" + ind + ".txt")); filename = "AIReplay" + ind + ".txt"; out.println("AI Version: " + VERSION); } catch (Exception e) { System.out.println("Could not write to file."); } } fprint(board); } // if (fml == null) fml = new PrintWriter (new File("fmldebug.txt")); if (thisAIIsCheating && max(board) < 8) { board[0][0] = GameGUI.win_target; } if (debug2) sc.nextLine(); if (debug2) print(board); if (debug) System.out.println("New cycle."); if (debug) sc.nextLine(); if (dumbai) name += "Dumby"; turn++; if (!queue.isEmpty()) { int temp = queue.removeFirst(); if (temp > 0) { running = false; return temp; } } int boardsum = 0; for (int i = 0; i < 4; i++) { for (int j = 0; j < 4; j++) { boardsum += board[i][j]; } } boolean report = debug; /*if (Math.random() < 0.0001) { report = true; for (int i = 0; i < 4; i++) { System.out.println(Arrays.toString(board[i])); } for (int i = 0; i < 4; i++) { System.out.println(movable(board, i)); } System.out.println(); sc.nextLine(); }*/ if (dumbai) { if (!name.endsWith("Dumby")) name += "Dumby"; System.out.println(turn); running = false; if (turn % 600 == 599) return KeyEvent.VK_DOWN; if (turn % 3 == 0) return KeyEvent.VK_UP; if (turn % 6 < 3) return KeyEvent.VK_LEFT; return KeyEvent.VK_RIGHT; } else { if (name.indexOf(".") < 0) name += VERSION; } // gamestart processing /*if(board[0][0] == 0) { if (board[1][0] > board[0][1]) { return KeyEvent.VK_UP; } if (board[1][0] < board[0][1]) { return KeyEvent.VK_LEFT; } if (Math.random() < 0.5) return KeyEvent.VK_UP; return KeyEvent.VK_LEFT; }*/ long[] pref = {10, 20, 1, 1}; // LEFT, UP, RIGHT, DOWN // check if moving right/down is safe boolean occupied = true; for (int i = 0; i < 4; i++) { if (board[0][i] == 0) occupied = false; if (i < 3 && board[0][i] == board[0][i + 1]) occupied = false; } if (!occupied) { // pref[2] -= 100000000; } occupied = true; for (int i = 0; i < 4; i++) { if (board[i][0] == 0) occupied = false; if (i < 3 && board[i][0] == board[i + 1][0]) occupied = false; } if (!occupied) { // pref[3] -= 100000000; } pref[0] += 5; pref[1] += 5; // System.out.println(6 + Math.random()); // simulate sum_board = sum(board); delta_sum_board_7over8 = delta(sum_board * 7 / 8); if (debug) print(board); max_depth = 0; for (int m = 0; m < 4; m++) { if (debug) System.out.println("Now testing move: " + m); int[][] sim = simulate(board, m); if (Arrays.deepEquals(sim, board)) { if (out != null) out.println("Move " + m + " invalid; skipping"); if (GameGUI.out != null) GameGUI.out.println("Move " + m + " invalid; skipping"); continue; } long worst = (long) 1999999999 * 1000000000; long avg = 0; int numt = 0; for (int i = 0; i < 4; i++) { for (int j = 0; j < 4; j++) { if (sim[i][j] > 0) continue; sim[i][j] = 2; long temp = predictor(sim, iter_max / (int) Math.pow((countBlank(sim) + 1), 1.6), 1); if (temp < worst) worst = temp; avg += 9 * temp; sim[i][j] = 4; temp = predictor(sim, iter_max / (int) Math.pow((countBlank(sim) + 1), 1.6), 1); if (temp < worst) worst = temp; avg += temp; sim[i][j] = 0; numt += 10; } } if (countBlank(sim) == 0) { long temp = predictor(sim, iter_max / (int) pow((countBlank(sim) + 1), 2), 1); if (temp < worst) worst = temp; avg += temp; numt++; } avg /= numt; worst = (worst_weight * worst + avg) / (worst_weight + 1); if (countBlank(sim) >= 8 && max(board) < 64) worst = avg; if (debug || debug2) System.out.println("Move " + m + " final eval: " + worst); if (out != null) out.println("Move " + m + " final eval: " + worst); if (GameGUI.out != null) GameGUI.out.println("Move " + m + " final eval: " + worst); pref[m] += worst; } if (debug2) System.out.println("Max depth: " + max_depth); if (out != null) out.println("Max depth: " + max_depth); if (GameGUI.out != null) GameGUI.out.println("Max depth: " + max_depth); // System.out.println(5 + Math.random()); // process output int[] dir = new int[4]; dir[0] = KeyEvent.VK_LEFT; dir[1] = KeyEvent.VK_UP; dir[2] = KeyEvent.VK_RIGHT; dir[3] = KeyEvent.VK_DOWN; if (report) System.out.println("Pref: " + Arrays.toString(pref)); for (int i = 0; i < 4; i++) { int best = 0; for (int j = 0; j < 4; j++) { if (pref[j] > pref[best]) { best = j; } } pref[best] = Long.MIN_VALUE; if (movable(board, best)) { if (report) { report = false; if (debug) System.out.println("Chosen: " + best); if (debug) sc.nextLine(); } // if (pref[best] < -50000000) queue.add(best - 2); running = false; return dir[best]; } // System.out.println("Unmovable: " + best); // System.out.println("Pref: " + Arrays.toString(pref)); } System.out.println("???"); for (int i = 0; i < 4; i++) { System.out.println(Arrays.toString(board[i])); } // sc.nextLine(); } catch (Exception e) { e.printStackTrace(); } running = false; return KeyEvent.VK_LEFT; }
public long predictor(int[][] board, int iters, int depth) { // returns future value, kinda // debnum+=4; // System.out.println(Math.random()); if (depth > max_depth) max_depth = depth; // if (max(board) < 64 && depth == 1) return grade4(board); int div = 0; for (int i = 0; i < 4; i++) { int[][] sim = simulate(board, i); if (Arrays.deepEquals(sim, board)) continue; div += countBlank(sim); } if (!movable(board, 0) && !movable(board, 1) && !movable(board, 2) && !movable(board, 3)) { // if (max(board) == GameGUI.win_target) return grade(board); return (long) -1999999999 * 3 * sum(board); } div *= 2; if (div > iters) { // debnum-=4; return grade4(board); } iters /= div; long best = (long) -1999999999 * 800000000; if (debug) print(board); for (int m = 0; m < 4; m++) { // debnum--; int[][] sim = simulate(board, m); if (Arrays.deepEquals(sim, board)) continue; if (debug) System.out.println("Simulating: " + m); long worst = (long) 1999999999 * 800000000; long avg = 0; int numt = 0; for (int i = 0; i < 4; i++) { for (int j = 0; j < 4; j++) { if (sim[i][j] > 0) continue; sim[i][j] = 2; long temp = predictor(sim, iters, depth + 1); if (temp < worst) worst = temp; avg += 9 * temp; sim[i][j] = 4; temp = predictor(sim, iters, depth + 1); if (temp < worst) worst = temp; avg += temp; sim[i][j] = 0; numt += 10; } } if (countBlank(sim) == 0) { // System.out.println("??"); long temp = predictor(sim, iter_max / (int) pow((countBlank(sim) + 1), 2), depth + 1); if (temp < worst) worst = temp; avg += temp; numt++; } // avg -= worst; avg /= numt; if (debug) System.out.println("Result: " + worst); if ((avg + worst_weight * worst) / (worst_weight + 1) > best) best = (worst_weight * worst + avg) / (worst_weight + 1); // if (worst > best) best = worst; if (div >= 64 && max(board) < 64 && avg > best) best = avg; } return best; }