/*
   * (non-Javadoc)
   * @see eu.stratosphere.core.io.IOReadableWritable#read(java.io.DataInput)
   */
  @Override
  public void read(final DataInput in) throws IOException {
    this.mode = ExecutionMode.values()[in.readInt()];

    final ArrayList<String> requiredPackages = new ArrayList<String>();
    for (int count = in.readInt(); count > 0; count--) requiredPackages.add(in.readUTF());
    this.query = null;
    final byte[] planBuffer = new byte[in.readInt()];
    in.readFully(planBuffer);

    final JobID dummId = new JobID();
    try {
      LibraryCacheManager.register(
          dummId, requiredPackages.toArray(new String[requiredPackages.size()]));
      SopremoEnvironment.getInstance().setClassLoader(LibraryCacheManager.getClassLoader(dummId));
      this.query = SopremoUtil.deserialize(planBuffer, SopremoPlan.class);
    } catch (final IOException e) {
      e.printStackTrace();
    } finally {
      try {
        LibraryCacheManager.unregister(dummId);
      } catch (final IOException e) {
      }
    }
  }
  /*
   * (non-Javadoc)
   * @see com.esotericsoftware.kryo.KryoSerializable#read(com.esotericsoftware.kryo.Kryo,
   * com.esotericsoftware.kryo.io.Input)
   */
  @SuppressWarnings("unchecked")
  @Override
  public void read(final Kryo kryo, final Input input) {
    this.mode = kryo.readObject(input, ExecutionMode.class);
    final ArrayList<String> requiredPackages = kryo.readObject(input, ArrayList.class);

    final JobID dummId = JobID.generate();
    final ClassLoader oldClassLoader = kryo.getClassLoader();
    try {
      LibraryCacheManager.register(
          dummId, requiredPackages.toArray(new String[requiredPackages.size()]));
      kryo.setClassLoader(LibraryCacheManager.getClassLoader(dummId));
      this.query = kryo.readObject(input, SopremoPlan.class);
    } catch (final Exception e) {
      SopremoUtil.LOG.error(e.getMessage());
      throw new KryoException(e);
    } finally {
      kryo.setClassLoader(oldClassLoader);
      try {
        LibraryCacheManager.unregister(dummId);
      } catch (final Throwable e) {
        SopremoUtil.LOG.error(e.getMessage());
      }
    }
  }
  /**
   * Constructs a sample execution graph consisting of two vertices connected by a channel of the
   * given type.
   *
   * @param channelType the channel type to connect the vertices with
   * @param instanceManager the instance manager that shall be used during the creation of the
   *     execution graph
   * @return a sample execution graph
   */
  private ExecutionGraph createExecutionGraph(
      final ChannelType channelType, final InstanceManager instanceManager) {

    final JobGraph jobGraph = new JobGraph("Job Graph");

    final JobInputVertex inputVertex = new JobInputVertex("Input 1", jobGraph);
    inputVertex.setInputClass(InputTask.class);
    inputVertex.setNumberOfSubtasks(1);

    final JobOutputVertex outputVertex = new JobOutputVertex("Output 1", jobGraph);
    outputVertex.setOutputClass(OutputTask.class);
    outputVertex.setNumberOfSubtasks(1);

    try {
      inputVertex.connectTo(outputVertex, channelType);
    } catch (JobGraphDefinitionException e) {
      fail(StringUtils.stringifyException(e));
    }

    try {
      LibraryCacheManager.register(jobGraph.getJobID(), new String[0]);
      return new ExecutionGraph(jobGraph, instanceManager);

    } catch (GraphConversionException e) {
      fail(StringUtils.stringifyException(e));
    } catch (IOException e) {
      fail(StringUtils.stringifyException(e));
    }

    return null;
  }
  /**
   * Unregisters a finished or aborted task.
   *
   * @param id the ID of the task to be unregistered
   */
  private void unregisterTask(final ExecutionVertexID id) {

    // Task de-registration must be atomic
    synchronized (this) {
      final Task task = this.runningTasks.remove(id);
      if (task == null) {
        LOG.error("Cannot find task with ID " + id + " to unregister");
        return;
      }

      // remove the local tmp file for unregistered tasks.
      for (Entry<String, DistributedCacheEntry> e :
          DistributedCache.readFileInfoFromConfig(task.getEnvironment().getJobConfiguration())) {
        this.fileCache.deleteTmpFile(e.getKey(), e.getValue(), task.getJobID());
      }
      // Unregister task from the byte buffered channel manager
      this.channelManager.unregister(id, task);

      // Unregister task from profiling
      task.unregisterProfiler(this.profiler);

      // Unregister task from memory manager
      task.unregisterMemoryManager(this.memoryManager);

      // Unregister task from library cache manager
      try {
        LibraryCacheManager.unregister(task.getJobID());
      } catch (IOException e) {
        if (LOG.isDebugEnabled()) {
          LOG.debug("Unregistering the job vertex ID " + id + " caused an IOException");
        }
      }
    }
  }
  /**
   * Reads required JAR files from an input stream and adds them to the library cache manager.
   *
   * @param in the data stream to read the JAR files from
   * @throws IOException thrown if an error occurs while reading the stream
   */
  private void readRequiredJarFiles(final DataInput in) throws IOException {

    // Do jar files follow;
    final int numJars = in.readInt();

    if (numJars > 0) {

      for (int i = 0; i < numJars; i++) {

        final Path p = new Path();
        p.read(in);
        this.userJars.add(p);

        // Read the size of the jar file
        final long sizeOfJar = in.readLong();

        // Add the jar to the library manager
        LibraryCacheManager.addLibrary(this.jobID, p, sizeOfJar, in);
      }
    }

    // Register this job with the library cache manager
    LibraryCacheManager.register(this.jobID, this.userJars.toArray(new Path[0]));
  }
  /**
   * Checks the behavior of the scheduleJob() method with a job consisting of two tasks connected
   * via an in-memory channel.
   */
  @Test
  public void testSchedulJobWithInMemoryChannel() {

    final TestInstanceManager tim = new TestInstanceManager();
    final TestDeploymentManager tdm = new TestDeploymentManager();
    final QueueScheduler scheduler = new QueueScheduler(tdm, tim);

    final ExecutionGraph executionGraph = createExecutionGraph(ChannelType.INMEMORY, tim);

    try {
      try {
        scheduler.schedulJob(executionGraph);
      } catch (SchedulingException e) {
        fail(StringUtils.stringifyException(e));
      }

      // Wait for the deployment to complete
      tdm.waitForDeployment();

      assertEquals(executionGraph.getJobID(), tdm.getIDOfLastDeployedJob());
      final List<ExecutionVertex> listOfDeployedVertices = tdm.getListOfLastDeployedVertices();
      assertNotNull(listOfDeployedVertices);
      // Vertices connected via in-memory channels must be deployed in a single cycle.
      assertEquals(2, listOfDeployedVertices.size());

      // Check if the release of the allocated resources works properly by simulating the vertices'
      // life cycle
      assertEquals(0, tim.getNumberOfReleaseMethodCalls());

      // Simulate vertex life cycle
      for (final ExecutionVertex vertex : listOfDeployedVertices) {
        vertex.updateExecutionState(ExecutionState.STARTING);
        vertex.updateExecutionState(ExecutionState.RUNNING);
        vertex.updateExecutionState(ExecutionState.FINISHING);
        vertex.updateExecutionState(ExecutionState.FINISHED);
      }

      assertEquals(1, tim.getNumberOfReleaseMethodCalls());
    } finally {
      try {
        LibraryCacheManager.unregister(executionGraph.getJobID());
      } catch (IOException ioe) {
        // Ignore exception here
      }
    }
  }
  @Override
  public LibraryCacheProfileResponse getLibraryCacheProfile(LibraryCacheProfileRequest request)
      throws IOException {

    LibraryCacheProfileResponse response = new LibraryCacheProfileResponse(request);
    String[] requiredLibraries = request.getRequiredLibraries();

    for (int i = 0; i < requiredLibraries.length; i++) {
      if (LibraryCacheManager.contains(requiredLibraries[i]) == null) {
        response.setCached(i, false);
      } else {
        response.setCached(i, true);
      }
    }

    return response;
  }
  @Override
  public void run() {

    if (invokable == null) {
      LOG.fatal("ExecutionEnvironment has no Invokable set");
    }

    // Now the actual program starts to run
    changeExecutionState(ExecutionState.RUNNING, null);

    // If the task has been canceled in the mean time, do not even start it
    if (this.executionObserver.isCanceled()) {
      changeExecutionState(ExecutionState.CANCELED, null);
      return;
    }

    try {

      // Activate input channels
      // activateInputChannels();
      ClassLoader cl = LibraryCacheManager.getClassLoader(jobID);
      Thread.currentThread().setContextClassLoader(cl);
      this.invokable.invoke();

      // Make sure, we enter the catch block when the task has been canceled
      if (this.executionObserver.isCanceled()) {
        throw new InterruptedException();
      }

    } catch (Throwable t) {

      if (!this.executionObserver.isCanceled()) {

        // Perform clean up when the task failed and has been not canceled by the user
        try {
          this.invokable.cancel();
        } catch (Throwable t2) {
          LOG.error(StringUtils.stringifyException(t2));
        }
      }

      // Release all resources that may currently be allocated by the individual channels
      releaseAllChannelResources();

      if (this.executionObserver.isCanceled()) {
        changeExecutionState(ExecutionState.CANCELED, null);
      } else {
        changeExecutionState(ExecutionState.FAILED, StringUtils.stringifyException(t));
      }

      return;
    }

    // Task finished running, but there may be unconsumed output data in some of the channels
    changeExecutionState(ExecutionState.FINISHING, null);

    try {
      // If there is any unclosed input gate, close it and propagate close operation to
      // corresponding output gate
      closeInputGates();

      // First, close all output gates to indicate no records will be emitted anymore
      requestAllOutputGatesToClose();

      // Wait until all input channels are closed
      waitForInputChannelsToBeClosed();

      // Now we wait until all output channels have written out their data and are closed
      waitForOutputChannelsToBeClosed();
    } catch (Throwable t) {

      // Release all resources that may currently be allocated by the individual channels
      releaseAllChannelResources();

      if (this.executionObserver.isCanceled()) {
        changeExecutionState(ExecutionState.CANCELED, null);
      } else {
        changeExecutionState(ExecutionState.FAILED, StringUtils.stringifyException(t));
      }

      return;
    }

    // Release all resources that may currently be allocated by the individual channels
    releaseAllChannelResources();

    // Finally, switch execution state to FINISHED and report to job manager
    changeExecutionState(ExecutionState.FINISHED, null);
  }
  @Override
  public void read(final DataInput in) throws IOException {

    // Read job id
    this.jobID.read(in);

    // Read the job name
    this.jobName = StringRecord.readString(in);

    // Read required jar files
    readRequiredJarFiles(in);

    // First read total number of vertices;
    final int numVertices = in.readInt();

    // First, recreate each vertex and add it to reconstructionMap
    for (int i = 0; i < numVertices; i++) {
      final String className = StringRecord.readString(in);
      final JobVertexID id = new JobVertexID();
      id.read(in);
      final String vertexName = StringRecord.readString(in);

      Class<? extends IOReadableWritable> c;
      try {
        c = ClassUtils.getRecordByName(className);
      } catch (ClassNotFoundException cnfe) {
        throw new IOException(cnfe.toString());
      }

      // Find constructor
      Constructor<? extends IOReadableWritable> cst;
      try {
        cst = c.getConstructor(String.class, JobVertexID.class, JobGraph.class);
      } catch (SecurityException e1) {
        throw new IOException(e1.toString());
      } catch (NoSuchMethodException e1) {
        throw new IOException(e1.toString());
      }

      try {
        cst.newInstance(vertexName, id, this);
      } catch (IllegalArgumentException e) {
        throw new IOException(e.toString());
      } catch (InstantiationException e) {
        throw new IOException(e.toString());
      } catch (IllegalAccessException e) {
        throw new IOException(e.toString());
      } catch (InvocationTargetException e) {
        throw new IOException(e.toString());
      }
    }

    final JobVertexID tmpID = new JobVertexID();
    for (int i = 0; i < numVertices; i++) {

      AbstractJobVertex jv;

      tmpID.read(in);
      if (inputVertices.containsKey(tmpID)) {
        jv = inputVertices.get(tmpID);
      } else {
        if (outputVertices.containsKey(tmpID)) {
          jv = outputVertices.get(tmpID);
        } else {
          if (taskVertices.containsKey(tmpID)) {
            jv = taskVertices.get(tmpID);
          } else {
            throw new IOException("Cannot find vertex with ID " + tmpID + " in any vertex map.");
          }
        }
      }

      // Read the vertex data
      jv.read(in);
    }

    // Find the class loader for the job
    final ClassLoader cl = LibraryCacheManager.getClassLoader(this.jobID);
    if (cl == null) {
      throw new IOException("Cannot find class loader for job graph " + this.jobID);
    }

    // Re-instantiate the job configuration object and read the configuration
    this.jobConfiguration = new Configuration(cl);
    this.jobConfiguration.read(in);
  }