private void run(PhysicalPlan pp, String expectedFile) throws Exception {
    String compiledPlan, goldenPlan = null;
    int MAX_SIZE = 100000;
    MRCompiler comp = new MRCompiler(pp, pc);
    comp.compile();

    MROperPlan mrp = comp.getMRPlan();
    PlanPrinter ppp = new PlanPrinter(mrp);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    ppp.print(baos);
    compiledPlan = baos.toString();

    if (generate) {
      FileOutputStream fos = new FileOutputStream(expectedFile);
      fos.write(baos.toByteArray());
      return;
    }
    FileInputStream fis = new FileInputStream(expectedFile);
    byte[] b = new byte[MAX_SIZE];
    int len = fis.read(b);
    goldenPlan = new String(b, 0, len);
    if (goldenPlan.charAt(len - 1) == '\n') goldenPlan = goldenPlan.substring(0, len - 1);

    pp.explain(System.out);
    System.out.println();
    System.out.println("<<<" + compiledPlan + ">>>");
    System.out.println("-------------");
    System.out.println("Golden");
    System.out.println("<<<" + goldenPlan + ">>>");
    System.out.println("-------------");
    assertEquals(goldenPlan, compiledPlan);
  }
    /** Configures the Reduce plan, the POPackage operator and the reporter thread */
    @SuppressWarnings("unchecked")
    @Override
    protected void setup(Context context) throws IOException, InterruptedException {
      super.setup(context);
      inIllustrator = inIllustrator(context);
      if (inIllustrator) pack = getPack(context);
      Configuration jConf = context.getConfiguration();
      SpillableMemoryManager.configure(ConfigurationUtil.toProperties(jConf));
      context
          .getConfiguration()
          .set(
              PigConstants.TASK_INDEX,
              Integer.toString(context.getTaskAttemptID().getTaskID().getId()));
      sJobContext = context;
      sJobConfInternal.set(context.getConfiguration());
      sJobConf = context.getConfiguration();
      try {
        PigContext.setPackageImportList(
            (ArrayList<String>) ObjectSerializer.deserialize(jConf.get("udf.import.list")));
        pigContext = (PigContext) ObjectSerializer.deserialize(jConf.get("pig.pigContext"));

        // This attempts to fetch all of the generated code from the distributed cache, and resolve
        // it
        SchemaTupleBackend.initialize(jConf, pigContext);

        if (rp == null)
          rp = (PhysicalPlan) ObjectSerializer.deserialize(jConf.get("pig.reducePlan"));
        stores = PlanHelper.getPhysicalOperators(rp, POStore.class);

        if (!inIllustrator)
          pack = (POPackage) ObjectSerializer.deserialize(jConf.get("pig.reduce.package"));
        // To be removed
        if (rp.isEmpty()) log.debug("Reduce Plan empty!");
        else {
          ByteArrayOutputStream baos = new ByteArrayOutputStream();
          rp.explain(baos);
          log.debug(baos.toString());
        }
        pigReporter = new ProgressableReporter();
        if (!(rp.isEmpty())) {
          roots = rp.getRoots().toArray(new PhysicalOperator[1]);
          leaf = rp.getLeaves().get(0);
        }

        // Get the UDF specific context
        MapRedUtil.setupUDFContext(jConf);

      } catch (IOException ioe) {
        String msg = "Problem while configuring reduce plan.";
        throw new RuntimeException(msg, ioe);
      }

      log.info(
          "Aliases being processed per job phase (AliasName[line,offset]): "
              + jConf.get("pig.alias.location"));

      Utils.setDefaultTimeZone(PigMapReduce.sJobConfInternal.get());
    }