/*
   * (non-Javadoc)
   * @see eu.stratosphere.core.io.IOReadableWritable#read(java.io.DataInput)
   */
  @Override
  public void read(final DataInput in) throws IOException {
    this.mode = ExecutionMode.values()[in.readInt()];

    final ArrayList<String> requiredPackages = new ArrayList<String>();
    for (int count = in.readInt(); count > 0; count--) requiredPackages.add(in.readUTF());
    this.query = null;
    final byte[] planBuffer = new byte[in.readInt()];
    in.readFully(planBuffer);

    final JobID dummId = new JobID();
    try {
      LibraryCacheManager.register(
          dummId, requiredPackages.toArray(new String[requiredPackages.size()]));
      SopremoEnvironment.getInstance().setClassLoader(LibraryCacheManager.getClassLoader(dummId));
      this.query = SopremoUtil.deserialize(planBuffer, SopremoPlan.class);
    } catch (final IOException e) {
      e.printStackTrace();
    } finally {
      try {
        LibraryCacheManager.unregister(dummId);
      } catch (final IOException e) {
      }
    }
  }
  /*
   * (non-Javadoc)
   * @see com.esotericsoftware.kryo.KryoSerializable#read(com.esotericsoftware.kryo.Kryo,
   * com.esotericsoftware.kryo.io.Input)
   */
  @SuppressWarnings("unchecked")
  @Override
  public void read(final Kryo kryo, final Input input) {
    this.mode = kryo.readObject(input, ExecutionMode.class);
    final ArrayList<String> requiredPackages = kryo.readObject(input, ArrayList.class);

    final JobID dummId = JobID.generate();
    final ClassLoader oldClassLoader = kryo.getClassLoader();
    try {
      LibraryCacheManager.register(
          dummId, requiredPackages.toArray(new String[requiredPackages.size()]));
      kryo.setClassLoader(LibraryCacheManager.getClassLoader(dummId));
      this.query = kryo.readObject(input, SopremoPlan.class);
    } catch (final Exception e) {
      SopremoUtil.LOG.error(e.getMessage());
      throw new KryoException(e);
    } finally {
      kryo.setClassLoader(oldClassLoader);
      try {
        LibraryCacheManager.unregister(dummId);
      } catch (final Throwable e) {
        SopremoUtil.LOG.error(e.getMessage());
      }
    }
  }
  @Override
  public void run() {

    if (invokable == null) {
      LOG.fatal("ExecutionEnvironment has no Invokable set");
    }

    // Now the actual program starts to run
    changeExecutionState(ExecutionState.RUNNING, null);

    // If the task has been canceled in the mean time, do not even start it
    if (this.executionObserver.isCanceled()) {
      changeExecutionState(ExecutionState.CANCELED, null);
      return;
    }

    try {

      // Activate input channels
      // activateInputChannels();
      ClassLoader cl = LibraryCacheManager.getClassLoader(jobID);
      Thread.currentThread().setContextClassLoader(cl);
      this.invokable.invoke();

      // Make sure, we enter the catch block when the task has been canceled
      if (this.executionObserver.isCanceled()) {
        throw new InterruptedException();
      }

    } catch (Throwable t) {

      if (!this.executionObserver.isCanceled()) {

        // Perform clean up when the task failed and has been not canceled by the user
        try {
          this.invokable.cancel();
        } catch (Throwable t2) {
          LOG.error(StringUtils.stringifyException(t2));
        }
      }

      // Release all resources that may currently be allocated by the individual channels
      releaseAllChannelResources();

      if (this.executionObserver.isCanceled()) {
        changeExecutionState(ExecutionState.CANCELED, null);
      } else {
        changeExecutionState(ExecutionState.FAILED, StringUtils.stringifyException(t));
      }

      return;
    }

    // Task finished running, but there may be unconsumed output data in some of the channels
    changeExecutionState(ExecutionState.FINISHING, null);

    try {
      // If there is any unclosed input gate, close it and propagate close operation to
      // corresponding output gate
      closeInputGates();

      // First, close all output gates to indicate no records will be emitted anymore
      requestAllOutputGatesToClose();

      // Wait until all input channels are closed
      waitForInputChannelsToBeClosed();

      // Now we wait until all output channels have written out their data and are closed
      waitForOutputChannelsToBeClosed();
    } catch (Throwable t) {

      // Release all resources that may currently be allocated by the individual channels
      releaseAllChannelResources();

      if (this.executionObserver.isCanceled()) {
        changeExecutionState(ExecutionState.CANCELED, null);
      } else {
        changeExecutionState(ExecutionState.FAILED, StringUtils.stringifyException(t));
      }

      return;
    }

    // Release all resources that may currently be allocated by the individual channels
    releaseAllChannelResources();

    // Finally, switch execution state to FINISHED and report to job manager
    changeExecutionState(ExecutionState.FINISHED, null);
  }
  @Override
  public void read(final DataInput in) throws IOException {

    // Read job id
    this.jobID.read(in);

    // Read the job name
    this.jobName = StringRecord.readString(in);

    // Read required jar files
    readRequiredJarFiles(in);

    // First read total number of vertices;
    final int numVertices = in.readInt();

    // First, recreate each vertex and add it to reconstructionMap
    for (int i = 0; i < numVertices; i++) {
      final String className = StringRecord.readString(in);
      final JobVertexID id = new JobVertexID();
      id.read(in);
      final String vertexName = StringRecord.readString(in);

      Class<? extends IOReadableWritable> c;
      try {
        c = ClassUtils.getRecordByName(className);
      } catch (ClassNotFoundException cnfe) {
        throw new IOException(cnfe.toString());
      }

      // Find constructor
      Constructor<? extends IOReadableWritable> cst;
      try {
        cst = c.getConstructor(String.class, JobVertexID.class, JobGraph.class);
      } catch (SecurityException e1) {
        throw new IOException(e1.toString());
      } catch (NoSuchMethodException e1) {
        throw new IOException(e1.toString());
      }

      try {
        cst.newInstance(vertexName, id, this);
      } catch (IllegalArgumentException e) {
        throw new IOException(e.toString());
      } catch (InstantiationException e) {
        throw new IOException(e.toString());
      } catch (IllegalAccessException e) {
        throw new IOException(e.toString());
      } catch (InvocationTargetException e) {
        throw new IOException(e.toString());
      }
    }

    final JobVertexID tmpID = new JobVertexID();
    for (int i = 0; i < numVertices; i++) {

      AbstractJobVertex jv;

      tmpID.read(in);
      if (inputVertices.containsKey(tmpID)) {
        jv = inputVertices.get(tmpID);
      } else {
        if (outputVertices.containsKey(tmpID)) {
          jv = outputVertices.get(tmpID);
        } else {
          if (taskVertices.containsKey(tmpID)) {
            jv = taskVertices.get(tmpID);
          } else {
            throw new IOException("Cannot find vertex with ID " + tmpID + " in any vertex map.");
          }
        }
      }

      // Read the vertex data
      jv.read(in);
    }

    // Find the class loader for the job
    final ClassLoader cl = LibraryCacheManager.getClassLoader(this.jobID);
    if (cl == null) {
      throw new IOException("Cannot find class loader for job graph " + this.jobID);
    }

    // Re-instantiate the job configuration object and read the configuration
    this.jobConfiguration = new Configuration(cl);
    this.jobConfiguration.read(in);
  }