@operator( value = {"percent_absolute_deviation"}, content_type = IType.FLOAT, category = {IOperatorCategory.MAP_COMPARAISON}, concept = {IConcept.STATISTIC}) @doc( value = "percent absolute deviation indicator for 2 series of values: percent_absolute_deviation(list_vals_observe,list_vals_sim)", examples = { @example( value = "percent_absolute_deviation([200,300,150,150,200],[250,250,100,200,200])", isExecutable = false) }) public static double percentAbsoluteDeviation( final IScope scope, final IList<Double> vals1, final IList<Double> vals2) { if (vals1 == null || vals2 == null) { return 1; } int nb = vals1.size(); if (nb != vals2.size()) { return 0; } double sum = 0; double coeff = 0; for (int i = 0; i < nb; i++) { double val1 = Cast.asFloat(scope, vals1.get(i)); double val2 = Cast.asFloat(scope, vals2.get(i)); coeff += val1; sum += FastMath.abs(val1 - val2) * 100.0; } if (coeff == 0) { return 0; } return sum / coeff; }
@Override public void addWorker(String worker) { heartbeat.put(worker, System.currentTimeMillis()); if (!workers.contains(worker)) { log.info("Adding worker " + worker); workers.add(worker); log.info("Number of workers is now " + workers.size()); } }
protected void handleListAddMany(String[] args) { int count = 1; if (args.length > 1) count = Integer.parseInt(args[1]); int successCount = 0; long t0 = Clock.currentTimeMillis(); for (int i = 0; i < count; i++) { boolean success = getList().add("obj" + i); if (success) successCount++; } long t1 = Clock.currentTimeMillis(); println("Added " + successCount + " objects."); println("size = " + list.size() + ", " + successCount * 1000 / (t1 - t0) + " evt/s"); }
@operator( value = {"kappa"}, content_type = IType.FLOAT, category = {IOperatorCategory.MAP_COMPARAISON}, concept = {}) @doc( value = "kappa indicator for 2 map comparisons: kappa(list_vals1,list_vals2,categories, weights). Reference: Cohen, J. A coefficient of agreement for nominal scales. Educ. Psychol. Meas. 1960, 20. ", examples = { @example( value = "kappa([cat1,cat1,cat2,cat3,cat2],[cat2,cat1,cat2,cat1,cat2],[cat1,cat2,cat3], [1.0, 2.0, 3.0, 1.0, 5.0])", isExecutable = false) }) public static double kappa( final IScope scope, final IList<Object> vals1, final IList<Object> vals2, final List<Object> categories, final IList<Object> weights) { if (vals1 == null || vals2 == null) { return 1; } int nb = vals1.size(); if (nb != vals2.size()) { return 0; } int nbCat = categories.size(); double[] X = new double[nbCat]; double[] Y = new double[nbCat]; double[][] contigency = new double[nbCat][nbCat]; for (int j = 0; j < nbCat; j++) { X[j] = 0; Y[j] = 0; for (int k = 0; k < nbCat; k++) { contigency[j][k] = 0; } } Map<Object, Integer> categoriesId = new TOrderedHashMap<Object, Integer>(); for (int i = 0; i < nbCat; i++) { categoriesId.put(categories.get(i), i); } double total = 0; for (int i = 0; i < nb; i++) { double weight = weights == null ? 1.0 : Cast.asFloat(scope, weights.get(i)); total += weight; Object val1 = vals1.get(i); Object val2 = vals2.get(i); int indexVal1 = categoriesId.get(val1); int indexVal2 = categoriesId.get(val2); X[indexVal1] += weight; Y[indexVal2] += weight; contigency[indexVal1][indexVal2] += weight; } for (int j = 0; j < nbCat; j++) { X[j] /= total; Y[j] /= total; for (int k = 0; k < nbCat; k++) { contigency[j][k] /= total; } } double po = 0; double pe = 0; for (int i = 0; i < nbCat; i++) { po += contigency[i][i]; pe += X[i] * Y[i]; } if (pe == 1) { return 1; } return (po - pe) / (1 - pe); }
@operator( value = {"kappa_sim"}, content_type = IType.FLOAT, category = {IOperatorCategory.MAP_COMPARAISON}, concept = {}) @doc( value = "kappa simulation indicator for 2 map comparisons: kappa(list_valsInits,list_valsObs,list_valsSim, categories, weights). Reference: van Vliet, J., Bregt, A.K. & Hagen-Zanker, A. (2011). Revisiting Kappa to account for change in the accuracy assessment of land-use change models, Ecological Modelling 222(8)", examples = { @example( value = "kappa([cat1,cat1,cat2,cat2,cat2],[cat2,cat1,cat2,cat1,cat3],[cat2,cat1,cat2,cat3,cat3], [cat1,cat2,cat3],[1.0, 2.0, 3.0, 1.0, 5.0])", isExecutable = false) }) public static double kappaSimulation( final IScope scope, final IList<Object> valsInit, final IList<Object> valsObs, final IList<Object> valsSim, final List<Object> categories, final IList<Object> weights) { if (valsInit == null || valsObs == null || valsSim == null) { return 1; } int nb = valsInit.size(); if (nb != valsObs.size() || nb != valsSim.size()) { return 0; } int nbCat = categories.size(); double[] O = new double[nbCat]; double[][] contigency = new double[nbCat][nbCat]; double[][] contigencyOA = new double[nbCat][nbCat]; double[][] contigencyOS = new double[nbCat][nbCat]; for (int j = 0; j < nbCat; j++) { O[j] = 0; for (int k = 0; k < nbCat; k++) { contigency[j][k] = 0; contigencyOA[j][k] = 0; contigencyOS[j][k] = 0; } } Map<Object, Integer> categoriesId = new TOrderedHashMap<Object, Integer>(); for (int i = 0; i < nbCat; i++) { categoriesId.put(categories.get(i), i); } double total = 0; for (int i = 0; i < nb; i++) { double weight = weights == null ? 1.0 : Cast.asFloat(scope, weights.get(i)); total += weight; Object val1 = valsObs.get(i); Object val2 = valsSim.get(i); Object valO = valsInit.get(i); int indexVal1 = categoriesId.get(val1); int indexVal2 = categoriesId.get(val2); int indexValO = categoriesId.get(valO); O[indexValO] += weight; contigency[indexVal1][indexVal2] += weight; contigencyOA[indexValO][indexVal1] += weight; contigencyOS[indexValO][indexVal2] += weight; } for (int j = 0; j < nbCat; j++) { for (int k = 0; k < nbCat; k++) { contigency[j][k] /= total; if (O[j] > 0) { contigencyOA[j][k] /= O[j]; contigencyOS[j][k] /= O[j]; } } O[j] /= total; } double po = 0; double pe = 0; for (int j = 0; j < nbCat; j++) { po += contigency[j][j]; double sum = 0; for (int i = 0; i < nbCat; i++) { sum += contigencyOA[j][i] * contigencyOS[j][i]; } pe += O[j] * sum; } if (pe == 1) { return 1; } return (po - pe) / (1 - pe); }
@Override public int numWorkers() { int num = workers.size(); return num; }
public BaseHazelCastStateTracker(String connectionString, String type, int stateTrackerPort) throws Exception { log.info( "Setting up hazelcast with type " + type + " connection string " + connectionString + " and port " + stateTrackerPort); if (type.equals("master") && !PortTaken.portTaken(stateTrackerPort)) { // sets up a proper connection string for reference wrt external actors needing a reference if (connectionString.equals("master")) { String host = InetAddress.getLocalHost().getHostName(); this.connectionString = host + ":" + stateTrackerPort; } this.hazelCastPort = stateTrackerPort; config = hazelcast(); h = Hazelcast.newHazelcastInstance(config); h.getCluster() .addMembershipListener( new MembershipListener() { @Override public void memberAdded(MembershipEvent membershipEvent) { log.info("Member added " + membershipEvent.toString()); } @Override public void memberRemoved(MembershipEvent membershipEvent) { log.info("Member removed " + membershipEvent.toString()); } @Override public void memberAttributeChanged(MemberAttributeEvent memberAttributeEvent) { log.info("Member changed " + memberAttributeEvent.toString()); } }); } else if (type.equals("master") && PortTaken.portTaken(stateTrackerPort)) throw new IllegalStateException( "Specified type was master and the port specified was taken, please specify a different port"); else { setConnectionString(connectionString); log.info("Connecting to hazelcast on " + connectionString); ClientConfig client = new ClientConfig(); client.getNetworkConfig().addAddress(connectionString); h = HazelcastClient.newHazelcastClient(client); } this.type = type; jobs = h.getList(JOBS); workers = h.getList(WORKERS); // we can make the assumption workers isn't empty because // the master node by default comes with a applyTransformToDestination of workers if (!this.type.equals("master")) { while (workers.isEmpty()) { log.warn("Waiting for data sync..."); Thread.sleep(1000); } log.info("Workers is " + workers.size()); } begunTraining = h.getAtomicReference(BEGUN); miniBatchSize = h.getAtomicReference(INPUT_SPLIT); workerEnabled = h.getMap(WORKER_ENABLED); replicate = h.getList(REPLICATE_WEIGHTS); topics = h.getList(TOPICS); updates = h.getList(UPDATES); heartbeat = h.getMap(HEART_BEAT); master = h.getAtomicReference(RESULT); isPretrain = h.getAtomicReference(IS_PRETRAIN); numTimesPretrain = h.getAtomicReference(NUM_TIMES_RUN_PRETRAIN); numTimesPretrainRan = h.getAtomicReference(NUM_TIMES_PRETRAIN_RAN); done = h.getAtomicReference(DONE); validationEpochs = h.getAtomicReference(VALIDATION_EPOCHS); improvementThreshold = h.getAtomicReference(IMPROVEMENT_THRESHOLD); bestLoss = h.getAtomicReference(BEST_LOSS); earlyStop = h.getAtomicReference(EARLY_STOP); patience = h.getAtomicReference(PATIENCE); patienceIncrease = h.getAtomicReference(PATIENCE_INCREASE); numBatches = h.getAtomicReference(NUM_BATCHES_SO_FAR_RAN); // applyTransformToDestination defaults only when master, otherwise, overrides previous values if (type.equals("master")) { begunTraining.set(false); saver = createUpdateSaver(); numTimesPretrainRan.set(0); numTimesPretrain.set(1); isPretrain.set(true); done.set(false); resource = new StateTrackerDropWizardResource(this); bestLoss.set(Double.POSITIVE_INFINITY); earlyStop.set(true); patience.set(40.0); patienceIncrease.set(2.0); improvementThreshold.set(0.995); validationEpochs.set((int) Math.min(10, patience() / 2)); numBatches.set(0); } }