public void configure(JobConf conf) {
    /*
     * It reads all the configurations and distributed cache from outside.
     */

    // Read number of nodes in input layer and output layer from configuration
    inputNumdims = conf.get("numdims");
    inputNumhid = conf.get("numhid");

    // Read the weights from distributed cache
    Path[] pathwaysFiles = new Path[0];
    try {
      pathwaysFiles = DistributedCache.getLocalCacheFiles(conf);
      for (Path path : pathwaysFiles) {
        /*
         * this loop reads all the distributed cache files
         * In fact, the driver program ensures that there is only one distributed cache file
         */
        BufferedReader fis = new BufferedReader(new FileReader(path.toString()));
        weightline = fis.readLine();
      }
    } catch (Exception e) {
      e.printStackTrace();
    }
  }
  @Override
  protected void reduce(Text key, Iterable<Text> vals, Context ctx) {
    String name = null;
    String year = null;

    for (Text xx : vals) {
      String[] parts = xx.toString().split("=");

      if (parts[0].equals("name")) {
        name = parts[1];
      }

      if (parts[0].equals("year")) {
        year = parts[1];
      }
    }

    try {
      if (name != null && year != null) {
        ctx.write(new Text(year), new Text(name));
      }
    } catch (Exception ee) {
      ee.printStackTrace(System.err);
      throw new Error("I give up");
    }
  }
Example #3
0
 private static boolean unlock(String lock) {
   logger.info("Releasing lock " + lock);
   try {
     zkInstance.delete(lock, -1);
   } catch (Exception E) {
     logger.debug("Error releasing lock: " + E.toString());
     return true;
   }
   return true;
 }
  @Override
  public void executeTest() {

    int iRange1 =
        (int) (Math.pow(1, iDecimalPlaces)) * (int) (this.fMaximumValue - this.fMinimumValue) + 1;

    String sLocalOutput = null;
    if (isHadoop) {
      Job conf = null;
      try {
        conf = this.test();
      } catch (Exception e) {
        e.printStackTrace();
      }

      try {
        sLocalOutput = UtilityHadoop.getFileFromHDFS(sOutput + File.separator + sFileName, conf);
      } catch (IOException e) {
        e.printStackTrace();
      }
    } else {
      try {
        this.testLinear();
      } catch (IOException e) {
        e.printStackTrace();
      }
      sLocalOutput = sOutput + File.separator + this.sFileName;
    }
    int numOfCombinations = UtilityMath.getCombinationAmount(iRange1, 1, true, true).intValue();
    double[] dArrExpected = new double[numOfCombinations];
    for (int i = 0; i < dArrExpected.length; i++) {
      dArrExpected[i] = 1.0;
    }
    Evaluator evaluator = null;
    if (sLocalOutput != null) {
      evaluator =
          new Evaluator(
              sEvaluation, sLocalOutput, dArrExpected, dSignificance, iRange1, iDecimalPlaces);
    }

    boolean isPass = false;
    if (evaluator != null) {
      try {
        isPass = evaluator.evaluate();
      } catch (MathException e) {
        e.printStackTrace();
      } catch (ParameterNotValidException e) {
        e.printStackTrace();
      } catch (ArrayLengthNotEqualException e) {
        e.printStackTrace();
      }
    }

    System.out.println("Passed: " + isPass);
  }
Example #5
0
  private static String lock(String lock) {
    String realPath = "";
    String parent = "/lock";
    String lockName = parent + "/" + lock;

    logger.debug("Getting lock " + lockName);

    try {
      if (zkInstance.exists(parent, false) == null)
        zkInstance.create(parent, new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.fromFlag(0));
    } catch (Exception E) {
      logger.error("Error creating lock node: " + E.toString());
      return null;
    }

    List<String> children = new LinkedList<String>();
    try {
      // List <ACL> ACLList = zkInstance.getACL(lockName, zkInstance.exists(lock, false));

      realPath =
          zkInstance.create(
              lockName, new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL);
      // children = zkInstance.getChildren(realPath, false);
      checkLock:
      while (true) {
        children = zkInstance.getChildren(parent, false);
        for (String curChild : children) {
          String child = parent + "/" + curChild;
          // System.out.println(child + " " + realPath + " " +
          // Integer.toString(child.compareTo(realPath)));
          if (child.compareTo(realPath) < 0
              && child.length() == realPath.length()
              && curChild.startsWith(lock)) {
            // System.out.println(child + " cmp to " + realPath);
            Thread.sleep(300);
            continue checkLock;
          }
        }
        logger.info("Got lock " + lockName);
        return realPath;
      }
    } catch (Exception E) {
      logger.error("Exception while trying to get lock " + lockName + " :" + E.toString());
      E.printStackTrace();
      return null;
    }
  }
  public void testcheckOutputSpecsForbidRecordCompression() throws IOException {
    Job job = Job.getInstance(new Configuration(), "testcheckOutputSpecsForbidRecordCompression");
    FileSystem fs = FileSystem.getLocal(job.getConfiguration());
    Path outputdir = new Path(System.getProperty("test.build.data", "/tmp") + "/output");
    fs.delete(outputdir, true);

    // Without outputpath, FileOutputFormat.checkoutputspecs will throw
    // InvalidJobConfException
    FileOutputFormat.setOutputPath(job, outputdir);

    // SequenceFileAsBinaryOutputFormat doesn't support record compression
    // It should throw an exception when checked by checkOutputSpecs
    SequenceFileAsBinaryOutputFormat.setCompressOutput(job, true);

    SequenceFileAsBinaryOutputFormat.setOutputCompressionType(job, CompressionType.BLOCK);
    try {
      new SequenceFileAsBinaryOutputFormat().checkOutputSpecs(job);
    } catch (Exception e) {
      fail(
          "Block compression should be allowed for "
              + "SequenceFileAsBinaryOutputFormat:Caught "
              + e.getClass().getName());
    }

    SequenceFileAsBinaryOutputFormat.setOutputCompressionType(job, CompressionType.RECORD);
    try {
      new SequenceFileAsBinaryOutputFormat().checkOutputSpecs(job);
      fail("Record compression should not be allowed for " + "SequenceFileAsBinaryOutputFormat");
    } catch (InvalidJobConfException ie) {
      // expected
    } catch (Exception e) {
      fail(
          "Expected "
              + InvalidJobConfException.class.getName()
              + "but caught "
              + e.getClass().getName());
    }
  }