@SuppressWarnings("unchecked")
  @Override
  public void reconstruct(RebindContext rebindContext, EntityMemento memento) {
    if (LOG.isTraceEnabled()) LOG.trace("Reconstructing entity: {}", memento.toVerboseString());

    // Note that the id should have been set in the constructor; it is immutable
    entity.setDisplayName(memento.getDisplayName());

    for (Effector<?> eff : memento.getEffectors())
      ((EntityInternal) entity).getMutableEntityType().addEffector(eff);

    for (Map.Entry<ConfigKey<?>, Object> entry : memento.getConfig().entrySet()) {
      try {
        ConfigKey<?> key = entry.getKey();
        Object value = entry.getValue();
        Class<?> type =
            (key.getType() != null) ? key.getType() : rebindContext.loadClass(key.getTypeName());
        entity.setConfig((ConfigKey<Object>) key, value);
      } catch (ClassNotFoundException e) {
        throw Throwables.propagate(e);
      }
    }

    ((EntityInternal) entity).getConfigMap().addToLocalBag(memento.getConfigUnmatched());
    ((EntityInternal) entity).refreshInheritedConfig();

    for (Map.Entry<AttributeSensor<?>, Object> entry : memento.getAttributes().entrySet()) {
      try {
        AttributeSensor<?> key = entry.getKey();
        Object value = entry.getValue();
        Class<?> type =
            (key.getType() != null) ? key.getType() : rebindContext.loadClass(key.getTypeName());
        ((EntityInternal) entity)
            .setAttributeWithoutPublishing((AttributeSensor<Object>) key, value);
      } catch (ClassNotFoundException e) {
        throw Throwables.propagate(e);
      }
    }

    setParent(rebindContext, memento);
    addChildren(rebindContext, memento);
    addPolicies(rebindContext, memento);
    addEnrichers(rebindContext, memento);
    addMembers(rebindContext, memento);
    addTags(rebindContext, memento);
    addLocations(rebindContext, memento);

    doReconstruct(rebindContext, memento);
    ((AbstractEntity) entity).rebind();
  }
 static void addToWarsByContext(Entity entity, String url, String targetName) {
   targetName = FILENAME_TO_WEB_CONTEXT_MAPPER.convertDeploymentTargetNameToContext(targetName);
   // TODO a better way to do atomic updates, see comment above
   synchronized (entity) {
     Map<String, String> newWarsMap = MutableMap.copyOf(entity.getConfig(WARS_BY_CONTEXT));
     newWarsMap.put(targetName, url);
     ((EntityInternal) entity).setConfig(WARS_BY_CONTEXT, newWarsMap);
   }
 }
 protected void addLocations(RebindContext rebindContext, EntityMemento memento) {
   for (String id : memento.getLocations()) {
     Location loc = rebindContext.getLocation(id);
     if (loc != null) {
       ((EntityInternal) entity).addLocations(ImmutableList.of(loc));
     } else {
       LOG.warn(
           "Location not found; discarding location {} of entity {}({})",
           new Object[] {id, memento.getType(), memento.getId()});
     }
   }
 }
 @SuppressWarnings("unchecked")
 protected void setSensor(Object v) {
   if (v == FeedConfig.UNCHANGED) {
     // nothing
   } else if (v == FeedConfig.REMOVE) {
     ((EntityInternal) entity).removeAttribute(sensor);
   } else if (sensor == FeedConfig.NO_SENSOR) {
     // nothing
   } else {
     entity.setAttribute(sensor, TypeCoercions.coerce(v, sensor.getType()));
   }
 }
 static boolean removeFromWarsByContext(Entity entity, String targetName) {
   targetName = FILENAME_TO_WEB_CONTEXT_MAPPER.convertDeploymentTargetNameToContext(targetName);
   // TODO a better way to do atomic updates, see comment above
   synchronized (entity) {
     Map<String, String> newWarsMap = MutableMap.copyOf(entity.getConfig(WARS_BY_CONTEXT));
     String url = newWarsMap.remove(targetName);
     if (url == null) {
       return false;
     }
     ((EntityInternal) entity).setConfig(WARS_BY_CONTEXT, newWarsMap);
     return true;
   }
 }
Ejemplo n.º 6
0
  @SuppressWarnings({"unchecked", "rawtypes"})
  protected void recomputeAfterDelay(long delay) {
    if (isRunning() && executorQueued.compareAndSet(false, true)) {
      long now = System.currentTimeMillis();
      delay = Math.max(0, Math.max(delay, (executorTime + MIN_PERIOD_BETWEEN_EXECS_MILLIS) - now));
      if (LOG.isTraceEnabled()) LOG.trace("{} scheduling publish in {}ms", this, delay);

      Runnable job =
          new Runnable() {
            @Override
            public void run() {
              try {
                executorTime = System.currentTimeMillis();
                executorQueued.set(false);

                onEvent(null);

              } catch (Exception e) {
                if (isRunning()) {
                  LOG.error("Error in enricher " + this + ": " + e, e);
                } else {
                  if (LOG.isDebugEnabled())
                    LOG.debug("Error in enricher " + this + " (but no longer running): " + e, e);
                }
              } catch (Throwable t) {
                LOG.error("Error in enricher " + this + ": " + t, t);
                throw Exceptions.propagate(t);
              }
            }
          };

      ScheduledTask task =
          new ScheduledTask(
              MutableMap.of("delay", Duration.of(delay, TimeUnit.MILLISECONDS)),
              new BasicTask(job));
      ((EntityInternal) entity).getExecutionContext().submit(task);
    }
  }
Ejemplo n.º 7
0
  protected void schedulePublish(long delay) {
    if (isRunning() && executorQueued.compareAndSet(false, true)) {
      long now = System.currentTimeMillis();
      delay = Math.max(0, Math.max(delay, (executorTime + MIN_PERIOD_BETWEEN_EXECS_MILLIS) - now));
      if (LOG.isTraceEnabled()) LOG.trace("{} scheduling publish in {}ms", this, delay);

      Runnable job =
          new Runnable() {
            @Override
            public void run() {
              try {
                executorTime = System.currentTimeMillis();
                executorQueued.set(false);

                publishNow();

              } catch (Exception e) {
                if (isRunning()) {
                  LOG.error("Problem resizing: " + e, e);
                } else {
                  if (LOG.isDebugEnabled())
                    LOG.debug("Problem resizing, but no longer running: " + e, e);
                }
              } catch (Throwable t) {
                LOG.error("Problem in service-failure-detector: " + t, t);
                throw Exceptions.propagate(t);
              }
            }
          };

      ScheduledTask task =
          new ScheduledTask(
              MutableMap.of("delay", Duration.of(delay, TimeUnit.MILLISECONDS)),
              new BasicTask(job));
      ((EntityInternal) entity).getExecutionContext().submit(task);
    }
  }
Ejemplo n.º 8
0
  protected synchronized void onServerPoolMemberChanged(Entity member) {
    if (log.isTraceEnabled())
      log.trace(
          "For {}, considering membership of {} which is in locations {}",
          new Object[] {this, member, member.getLocations()});

    if (belongsInServerPool(member)) {
      // TODO can we discover the nodes by asking the riak cluster, rather than assuming what we add
      // will be in there?
      // TODO and can we do join as part of node starting?

      Map<Entity, String> nodes = getAttribute(RIAK_CLUSTER_NODES);
      if (nodes == null) nodes = Maps.newLinkedHashMap();
      String riakName = getRiakName(member);

      if (riakName == null) {
        log.error("Unable to get riak name for node: {}", member.getId());
      } else {
        // flag a first node to be the first node in the riak cluster.
        if (!isFirstNodeSet.get()) {
          nodes.put(member, riakName);
          setAttribute(RIAK_CLUSTER_NODES, nodes);

          ((EntityInternal) member)
              .setAttribute(RiakNode.RIAK_NODE_HAS_JOINED_CLUSTER, Boolean.TRUE);
          isFirstNodeSet.set(true);

          log.info(
              "Adding riak node {}: {}; {} to cluster",
              new Object[] {this, member, getRiakName(member)});

        } else {

          // TODO: be wary of erreneous nodes but are still flagged 'in cluster'
          // add the new node to be part of the riak cluster.
          Optional<Entity> anyNodeInCluster =
              Iterables.tryFind(
                  nodes.keySet(),
                  new Predicate<Entity>() {
                    @Override
                    public boolean apply(@Nullable Entity node) {
                      return (node instanceof RiakNode && hasMemberJoinedCluster(node));
                    }
                  });

          if (anyNodeInCluster.isPresent()) {
            if (!nodes.containsKey(member) && !hasMemberJoinedCluster(member)) {

              String anyNodeName = anyNodeInCluster.get().getAttribute(RiakNode.RIAK_NODE_NAME);
              Entities.invokeEffectorWithArgs(
                  this, member, RiakNode.JOIN_RIAK_CLUSTER, anyNodeName);
              if (getAttribute(IS_CLUSTER_INIT)) {
                Entities.invokeEffector(
                    RiakClusterImpl.this, anyNodeInCluster.get(), RiakNode.COMMIT_RIAK_CLUSTER);
              }
              nodes.put(member, riakName);
              setAttribute(RIAK_CLUSTER_NODES, nodes);
              log.info(
                  "Adding riak node {}: {}; {} to cluster",
                  new Object[] {this, member, getRiakName(member)});
            }
          } else {
            log.error("entity {}: is not present", member.getId());
          }
        }
      }
    } else {
      Map<Entity, String> nodes = getAttribute(RIAK_CLUSTER_NODES);
      if (nodes != null && nodes.containsKey(member)) {
        final Entity memberToBeRemoved = member;

        Optional<Entity> anyNodeInCluster =
            Iterables.tryFind(
                nodes.keySet(),
                new Predicate<Entity>() {

                  @Override
                  public boolean apply(@Nullable Entity node) {
                    return (node instanceof RiakNode
                        && hasMemberJoinedCluster(node)
                        && !node.equals(memberToBeRemoved));
                  }
                });
        if (anyNodeInCluster.isPresent()) {
          Entities.invokeEffectorWithArgs(
              this,
              anyNodeInCluster.get(),
              RiakNode.LEAVE_RIAK_CLUSTER,
              getRiakName(memberToBeRemoved));
        }

        nodes.remove(member);
        setAttribute(RIAK_CLUSTER_NODES, nodes);
        log.info(
            "Removing riak node {}: {}; {} from cluster",
            new Object[] {this, member, getRiakName(member)});
      }
    }
    if (log.isTraceEnabled()) log.trace("Done {} checkEntity {}", this, member);
  }