private void getnegphase() { /* * It does the negative phase of unsupervised RBM training algorithm * * For details, please refer to Dr. Hinton's paper: * Reducing the dimensionality of data with neural networks. Science, Vol. 313. no. 5786, pp. 504 - 507, 28 July 2006. */ // start calculate the negative phase // calculate the curved value of v1,h1 // find the vector of v1 Matrix negdata = poshidstates.times(vishid.transpose()); // (1 * numhid) * (numhid * numdims) = (1 * numdims) negdata.plusEquals(visbiases); // poshidstates*vishid' + visbiases double[][] tmp1 = negdata.getArray(); int i1 = 0; while (i1 < numdims) { tmp1[0][i1] = 1 / (1 + Math.exp(-tmp1[0][i1])); i1++; } // find the vector of h1 neghidprobs = negdata.times(vishid); // (1 * numdims) * (numdims * numhid) = (1 * numhid) neghidprobs.plusEquals(hidbiases); double[][] tmp2 = neghidprobs.getArray(); int i2 = 0; while (i2 < numhid) { tmp2[0][i2] = 1 / (1 + Math.exp(-tmp2[0][i2])); i2++; } negprods = negdata.transpose().times(neghidprobs); // (numdims * 1) *(1 * numhid) = (numdims * numhid) }
/** * Get a path from the local FS. If size is known, we go round-robin over the set of disks (via * the configured dirs) and return the first complete path which has enough space. * * <p>If size is not known, use roulette selection -- pick directories with probability * proportional to their available space. */ public synchronized Path getLocalPathForWrite( String pathStr, long size, Configuration conf, boolean checkWrite) throws IOException { confChanged(conf); int numDirs = localDirsPath.length; int numDirsSearched = 0; // remove the leading slash from the path (to make sure that the uri // resolution results in a valid path on the dir being checked) if (pathStr.startsWith("/")) { pathStr = pathStr.substring(1); } Path returnPath = null; Path path = new Path(pathStr); if (size == SIZE_UNKNOWN) { // do roulette selection: pick dir with probability // proportional to available size long[] availableOnDisk = new long[dirDF.length]; long totalAvailable = 0; // build the "roulette wheel" for (int i = 0; i < dirDF.length; ++i) { availableOnDisk[i] = dirDF[i].getAvailable(); totalAvailable += availableOnDisk[i]; } // Keep rolling the wheel till we get a valid path Random r = new java.util.Random(); while (numDirsSearched < numDirs && returnPath == null) { long randomPosition = Math.abs(r.nextLong()) % totalAvailable; int dir = 0; while (randomPosition > availableOnDisk[dir]) { randomPosition -= availableOnDisk[dir]; dir++; } dirNumLastAccessed = dir; returnPath = createPath(path, checkWrite); if (returnPath == null) { totalAvailable -= availableOnDisk[dir]; availableOnDisk[dir] = 0; // skip this disk numDirsSearched++; } } } else { while (numDirsSearched < numDirs && returnPath == null) { long capacity = dirDF[dirNumLastAccessed].getAvailable(); if (capacity > size) { returnPath = createPath(path, checkWrite); } dirNumLastAccessed++; dirNumLastAccessed = dirNumLastAccessed % numDirs; numDirsSearched++; } } if (returnPath != null) { return returnPath; } // no path found throw new DiskErrorException("Could not find any valid local " + "directory for " + pathStr); }
public void run() { while (fsRunning) { long period = Math.min(defaultRecheckInterval, timeout); try { pendingReplicationCheck(); Thread.sleep(period); } catch (InterruptedException ie) { FSNamesystem.LOG.debug("PendingReplicationMonitor thread received exception. " + ie); } } }
public void map( LongWritable key, Text value, OutputCollector<DoubleWritable, DoubleWritable> output, Reporter reporter) throws IOException { String line = value.toString(); DoubleWritable clave = new DoubleWritable(); DoubleWritable valor = new DoubleWritable(); clave.set(Double.parseDouble(line)); valor.set(Math.sqrt(Double.parseDouble(line))); output.collect(clave, valor); }
public void reduce(LongWritable key, Iterable<Text> values, Context context) throws IOException, InterruptedException { LongWritable curNodeId = key; double previousPRValue = 1; double nextPRValue = 0; double localResidual = 0; String edgeListOfCurNode = ""; long localResidualTransformed = 0; for (Text value : values) { String[] inputInfo = value.toString().split("\\s+"); // incoming pagerank value if (inputInfo.length == 1) { nextPRValue += Double.parseDouble(inputInfo[0]); } // current node info else if (inputInfo.length == 3) { edgeListOfCurNode = inputInfo[2]; previousPRValue = Double.parseDouble(inputInfo[1]); } else if (inputInfo.length == 2) { previousPRValue = Double.parseDouble(inputInfo[1]); } else { System.out.println("ERROR: received unexpected TEXT in length"); } } if (previousPRValue == 1) System.out.println("No node info has been received by a reducer"); // calculate the pagerank value according to the given formula nextPRValue = pagerankFormula(nextPRValue); // should also iterate sink nodes list, add the evenly splitted value // reducer should store the updated node info(NPR) to output directory context.write(null, new Text(curNodeId + " " + nextPRValue + " " + edgeListOfCurNode)); // then compare PPR with NPR try { localResidual = Math.abs(previousPRValue - nextPRValue) / nextPRValue; localResidualTransformed = (long) (localResidual * 10000); // System.out.println("Make sure you got the right transformed residual : // "+localResidualTransformed); } catch (ArithmeticException e) { System.out.println("PPR is zero. Check where you get the value!"); } // assume there is a global counter called residualCounter; context.getCounter(myCounter.ResidualCounter.RESIDUAL_SUM).increment(localResidualTransformed); }
private void prop2nextLayer() { /* * It computes the forward propagation algorithm. */ poshidprobs = data.times(vishid); // (1 * numdims) * (numdims * numhid) poshidprobs.plusEquals(hidbiases); // data*vishid + hidbiases double[][] product_tmp2 = poshidprobs.getArray(); for (int i2 = 0; i2 < numhid; i2++) { /* * compute the updated input, and write them to newinput */ product_tmp2[0][i2] = 1 / (1 + Math.exp(-product_tmp2[0][i2])); newinput[i2] = (int) (product_tmp2[0][i2] * 255.0); } }
private void getposphase() { /* * It does the positive phase of unsupervised RBM training algorithm * * For details, please refer to Dr. Hinton's paper: * Reducing the dimensionality of data with neural networks. Science, Vol. 313. no. 5786, pp. 504 - 507, 28 July 2006. */ // Start calculate the positive phase // calculate the cured value of h0 poshidprobs = data.times(vishid); // (1 * numdims) * (numdims * numhid) poshidprobs.plusEquals(hidbiases); // data*vishid + hidbiases double[][] product_tmp2 = poshidprobs.getArray(); int i2 = 0; while (i2 < numhid) { product_tmp2[0][i2] = 1 / (1 + Math.exp(-product_tmp2[0][i2])); i2++; } posprods = data.transpose().times(poshidprobs); // (numdims * 1) * (1 * numhid) // end of the positive phase calculation, find the binary presentation of h0 int i3 = 0; double[][] tmp1 = poshidprobs.getArray(); double[][] tmp2 = new double[1][numhid]; Random randomgenerator = new Random(); while (i3 < numhid) { /* * a sampling according to possiblity given by poshidprobs */ if (tmp1[0][i3] > randomgenerator.nextDouble()) tmp2[0][i3] = 1; else tmp2[0][i3] = 0; i3++; } // poshidstates is a binary sampling according to possiblity given by poshidprobs poshidstates = new Matrix(tmp2); }
public static boolean stopIteration(Configuration conf) throws IOException { FileSystem fs = FileSystem.get(conf); Path preFile = new Path("preX/Result"); Path curFile = new Path("curX/part-00000"); if (!(fs.exists(preFile) && fs.exists(curFile))) { System.exit(1); } boolean stop = true; String line1, line2; FSDataInputStream in1 = fs.open(preFile); FSDataInputStream in2 = fs.open(curFile); InputStreamReader isr1 = new InputStreamReader(in1); InputStreamReader isr2 = new InputStreamReader(in2); BufferedReader br1 = new BufferedReader(isr1); BufferedReader br2 = new BufferedReader(isr2); while ((line1 = br1.readLine()) != null && (line2 = br2.readLine()) != null) { String[] str1 = line1.split("\\s+"); String[] str2 = line2.split("\\s+"); double preElem = Double.parseDouble(str1[1]); double curElem = Double.parseDouble(str2[1]); if (Math.abs(preElem - curElem) > eps) { stop = false; break; } } if (stop == false) { fs.delete(preFile, true); if (fs.rename(curFile, preFile) == false) { System.exit(1); } } return stop; }
@Override public int getPartition(IntPair key, NullWritable value, int numPartitions) { return Math.abs(key.getFirst() * 127) % numPartitions; }