/** {@inheritDoc} */
  @Override
  public void spiStart(String gridName) throws GridSpiException {
    assertParameter(parallelJobsNum > 0, "parallelJobsNum > 0");
    assertParameter(waitJobsNum >= 0, "waitingJobsNum >= 0");
    assertParameter(taskAttrKey != null, "taskAttrKey != null");
    assertParameter(jobAttrKey != null, "jobAttrKey != null");

    // Start SPI start stopwatch.
    startStopwatch();

    // Ack parameters.
    if (log.isDebugEnabled()) {
      log.debug(configInfo("parallelJobsNum", parallelJobsNum));
      log.debug(configInfo("taskAttrKey", taskAttrKey));
      log.debug(configInfo("jobAttrKey", jobAttrKey));
      log.debug(configInfo("dfltPriority", dfltPriority));
      log.debug(configInfo("starvationInc", starvationInc));
      log.debug(configInfo("preventStarvation", preventStarvation));
    }

    registerMBean(gridName, this, GridPriorityQueueCollisionSpiMBean.class);

    // Ack start.
    if (log.isDebugEnabled()) log.debug(startInfo());
  }
  /**
   * Method cleans up all events that either outnumber queue size or exceeds time-to-live value. It
   * does none if someone else cleans up queue (lock is locked) or if there are queue readers
   * (readersNum > 0).
   */
  private void cleanupQueue() {
    long now = U.currentTimeMillis();

    long queueOversize = evts.sizex() - expireCnt;

    for (int i = 0; i < queueOversize && evts.sizex() > expireCnt; i++) {
      GridEvent expired = evts.poll();

      if (log.isDebugEnabled()) log.debug("Event expired by count: " + expired);
    }

    while (true) {
      ConcurrentLinkedDeque8.Node<GridEvent> node = evts.peekx();

      if (node == null) // Queue is empty.
      break;

      GridEvent evt = node.item();

      if (evt == null) // Competing with another thread.
      continue;

      if (now - evt.timestamp() < expireAgeMs) break;

      if (evts.unlinkx(node) && log.isDebugEnabled())
        log.debug("Event expired by age: " + node.item());
    }
  }
  /**
   * Gets Hibernate session.
   *
   * @param tx Cache transaction.
   * @return Session.
   */
  Session session(@Nullable GridCacheTx tx) {
    Session ses;

    if (tx != null) {
      ses = tx.meta(ATTR_SES);

      if (ses == null) {
        ses = sesFactory.openSession();

        ses.beginTransaction();

        // Store session in transaction metadata, so it can be accessed
        // for other operations on the same transaction.
        tx.addMeta(ATTR_SES, ses);

        if (log.isDebugEnabled())
          log.debug("Hibernate session open [ses=" + ses + ", tx=" + tx.xid() + "]");
      }
    } else {
      ses = sesFactory.openSession();

      ses.beginTransaction();
    }

    return ses;
  }
  /** {@inheritDoc} */
  @Override
  public void txEnd(GridCacheTx tx, boolean commit) throws GridException {
    init();

    Session ses = tx.removeMeta(ATTR_SES);

    if (ses != null) {
      Transaction hTx = ses.getTransaction();

      if (hTx != null) {
        try {
          if (commit) {
            ses.flush();

            hTx.commit();
          } else hTx.rollback();

          if (log.isDebugEnabled())
            log.debug("Transaction ended [xid=" + tx.xid() + ", commit=" + commit + ']');
        } catch (HibernateException e) {
          throw new GridException(
              "Failed to end transaction [xid=" + tx.xid() + ", commit=" + commit + ']', e);
        } finally {
          ses.close();
        }
      }
    }
  }
  /** {@inheritDoc} */
  @Override
  public void put(@Nullable GridCacheTx tx, K key, @Nullable V val) throws GridException {
    init();

    if (log.isDebugEnabled())
      log.debug("Store put [key=" + key + ", val=" + val + ", tx=" + tx + ']');

    if (val == null) {
      remove(tx, key);

      return;
    }

    Session ses = session(tx);

    try {
      GridCacheHibernateBlobStoreEntry entry =
          new GridCacheHibernateBlobStoreEntry(toBytes(key), toBytes(val));

      ses.saveOrUpdate(entry);
    } catch (HibernateException e) {
      rollback(ses, tx);

      throw new GridException(
          "Failed to put value to cache store [key=" + key + ", val" + val + "]", e);
    } finally {
      end(ses, tx);
    }
  }
  /** {@inheritDoc} */
  @SuppressWarnings({"unchecked", "RedundantTypeArguments"})
  @Override
  public V load(@Nullable GridCacheTx tx, K key) throws GridException {
    init();

    if (log.isDebugEnabled()) log.debug("Store load [key=" + key + ", tx=" + tx + ']');

    Session ses = session(tx);

    try {
      GridCacheHibernateBlobStoreEntry entry =
          (GridCacheHibernateBlobStoreEntry)
              ses.get(GridCacheHibernateBlobStoreEntry.class, toBytes(key));

      if (entry == null) return null;

      return fromBytes(entry.getValue());
    } catch (HibernateException e) {
      rollback(ses, tx);

      throw new GridException("Failed to load value from cache store with key: " + key, e);
    } finally {
      end(ses, tx);
    }
  }
  /** {@inheritDoc} */
  @Override
  public void spiStop() throws GridSpiException {
    unregisterMBean();

    // Ack ok stop.
    if (log.isDebugEnabled()) log.debug(stopInfo());
  }
  /** {@inheritDoc} */
  @Override
  public void spiStart(String gridName) throws GridSpiException {
    // Start SPI start stopwatch.
    startStopwatch();

    assertParameter(expireCnt > 0, "expireCnt > 0");
    assertParameter(expireAgeMs > 0, "expireAgeMs > 0");

    // Ack parameters.
    if (log.isDebugEnabled()) {
      log.debug(configInfo("expireAgeMs", expireAgeMs));
      log.debug(configInfo("expireCnt", expireCnt));
    }

    registerMBean(gridName, this, GridMemoryEventStorageSpiMBean.class);

    // Ack ok start.
    if (log.isDebugEnabled()) log.debug(startInfo());
  }
  /**
   * Gets job priority. At first tries to get from job context. If job context has no priority, then
   * tries to get from task session. If task session has no priority default one will be used.
   *
   * @param ctx Collision job context.
   * @return Job priority.
   */
  private int getJobPriority(GridCollisionJobContext ctx) {
    assert ctx != null;

    Integer p = null;

    GridJobContext jctx = ctx.getJobContext();

    try {
      p = (Integer) jctx.getAttribute(jobAttrKey);
    } catch (ClassCastException e) {
      LT.error(
          log,
          e,
          "Type of job context priority attribute '"
              + jobAttrKey
              + "' is not java.lang.Integer [type="
              + jctx.getAttribute(jobAttrKey).getClass()
              + ']');
    }

    if (p == null) {
      GridTaskSession ses = ctx.getTaskSession();

      try {
        p = (Integer) ses.getAttribute(taskAttrKey);
      } catch (ClassCastException e) {
        LT.error(
            log,
            e,
            "Type of task session priority attribute '"
                + taskAttrKey
                + "' is not java.lang.Integer [type="
                + ses.getAttribute(taskAttrKey).getClass()
                + ']');
      }

      if (p == null) {
        if (log.isDebugEnabled()) {
          log.debug(
              "Failed get priority from job context attribute '"
                  + jobAttrKey
                  + "' and task session attribute '"
                  + taskAttrKey
                  + "' (will use default priority): "
                  + dfltPriority);
        }

        p = dfltPriority;
      }
    }

    assert p != null;

    return p;
  }
  /** {@inheritDoc} */
  @SuppressWarnings("BusyWait")
  @Override
  public Boolean reduce(List<GridComputeJobResult> results) throws GridException {
    assert taskSes != null;
    assert results != null;
    assert params != null;
    assert !params.isEmpty();
    assert results.size() == params.size();

    Map<String, Integer> receivedParams = new HashMap<>();

    boolean allAttrReceived = false;

    int cnt = 0;

    while (!allAttrReceived && cnt++ < 3) {
      allAttrReceived = true;

      for (Map.Entry<String, Integer> entry : params.entrySet()) {
        assert taskSes.getAttribute(entry.getKey()) != null;

        Integer newVal = (Integer) taskSes.getAttribute(entry.getKey());

        assert newVal != null;

        receivedParams.put(entry.getKey(), newVal);

        if (newVal != entry.getValue() + 1) allAttrReceived = false;
      }

      if (!allAttrReceived) {
        try {
          Thread.sleep(100);
        } catch (InterruptedException e) {
          throw new GridException("Thread interrupted.", e);
        }
      }
    }

    if (log.isDebugEnabled()) {
      for (Map.Entry<String, Integer> entry : receivedParams.entrySet()) {
        log.debug(
            "Received session attr value [name="
                + entry.getKey()
                + ", val="
                + entry.getValue()
                + ", expected="
                + (params.get(entry.getKey()) + 1)
                + ']');
      }
    }

    return allAttrReceived;
  }
  /** {@inheritDoc} */
  @Override
  public void record(GridEvent evt) throws GridSpiException {
    assert evt != null;

    // Filter out events.
    if (filter == null || filter.apply(evt)) {
      cleanupQueue();

      evts.add(evt);

      // Make sure to filter out metrics updates to prevent log from flooding.
      if (evt.type() != EVT_NODE_METRICS_UPDATED && log.isDebugEnabled())
        log.debug("Event recorded: " + evt);
    }
  }
  /** {@inheritDoc} */
  @Override
  public Map<? extends GridComputeJob, GridNode> map(List<GridNode> subgrid, Integer arg)
      throws GridException {
    assert taskSes != null;
    assert arg != null;
    assert arg > 0;

    Map<GridSessionLoadTestJob, GridNode> map = new HashMap<>(subgrid.size());

    Iterator<GridNode> iter = subgrid.iterator();

    Random rnd = new Random();

    params = new HashMap<>(arg);

    Collection<UUID> assigned = new ArrayList<>(subgrid.size());

    for (int i = 0; i < arg; i++) {
      // Recycle iterator.
      if (!iter.hasNext()) iter = subgrid.iterator();

      String paramName = UUID.randomUUID().toString();

      int paramVal = rnd.nextInt();

      taskSes.setAttribute(paramName, paramVal);

      GridNode node = iter.next();

      assigned.add(node.id());

      map.put(new GridSessionLoadTestJob(paramName), node);

      params.put(paramName, paramVal);

      if (log.isDebugEnabled())
        log.debug("Set session attribute [name=" + paramName + ", value=" + paramVal + ']');
    }

    taskSes.setAttribute("nodes", assigned);

    return map;
  }
  /** {@inheritDoc} */
  @SuppressWarnings({"JpaQueryApiInspection", "JpaQlInspection"})
  @Override
  public void remove(@Nullable GridCacheTx tx, K key) throws GridException {
    init();

    if (log.isDebugEnabled()) log.debug("Store remove [key=" + key + ", tx=" + tx + ']');

    Session ses = session(tx);

    try {
      Object obj = ses.get(GridCacheHibernateBlobStoreEntry.class, toBytes(key));

      if (obj != null) ses.delete(obj);
    } catch (HibernateException e) {
      rollback(ses, tx);

      throw new GridException("Failed to remove value from cache store with key: " + key, e);
    } finally {
      end(ses, tx);
    }
  }
  /**
   * Initializes store.
   *
   * @throws GridException If failed to initialize.
   */
  private void init() throws GridException {
    if (initGuard.compareAndSet(false, true)) {
      if (log.isDebugEnabled()) log.debug("Initializing cache store.");

      try {
        if (sesFactory != null)
          // Session factory has been provided - nothing to do.
          return;

        if (!F.isEmpty(hibernateCfgPath)) {
          try {
            URL url = new URL(hibernateCfgPath);

            sesFactory = new Configuration().configure(url).buildSessionFactory();

            if (log.isDebugEnabled()) log.debug("Configured session factory using URL: " + url);

            // Session factory has been successfully initialized.
            return;
          } catch (MalformedURLException e) {
            if (log.isDebugEnabled())
              log.debug("Caught malformed URL exception: " + e.getMessage());
          }

          // Provided path is not a valid URL. File?
          File cfgFile = new File(hibernateCfgPath);

          if (cfgFile.exists()) {
            sesFactory = new Configuration().configure(cfgFile).buildSessionFactory();

            if (log.isDebugEnabled())
              log.debug("Configured session factory using file: " + hibernateCfgPath);

            // Session factory has been successfully initialized.
            return;
          }

          // Provided path is not a file. Classpath resource?
          sesFactory = new Configuration().configure(hibernateCfgPath).buildSessionFactory();

          if (log.isDebugEnabled())
            log.debug("Configured session factory using classpath resource: " + hibernateCfgPath);
        } else {
          if (hibernateProps == null) {
            U.warn(
                log, "No Hibernate configuration has been provided for store (will use default).");

            hibernateProps = new Properties();

            hibernateProps.setProperty("hibernate.connection.url", DFLT_CONN_URL);
            hibernateProps.setProperty("hibernate.show_sql", DFLT_SHOW_SQL);
            hibernateProps.setProperty("hibernate.hbm2ddl.auto", DFLT_HBM2DDL_AUTO);
          }

          Configuration cfg = new Configuration();

          cfg.setProperties(hibernateProps);

          assert resourceAvailable(MAPPING_RESOURCE);

          cfg.addResource(MAPPING_RESOURCE);

          sesFactory = cfg.buildSessionFactory();

          if (log.isDebugEnabled())
            log.debug("Configured session factory using properties: " + hibernateProps);
        }
      } catch (HibernateException e) {
        throw new GridException("Failed to initialize store.", e);
      } finally {
        initLatch.countDown();
      }
    } else if (initLatch.getCount() > 0) U.await(initLatch);

    if (sesFactory == null) throw new GridException("Cache store was not properly initialized.");
  }