public DefaultTopologyAssignContext(TopologyAssignContext context) {
    super(context);

    try {
      sysTopology = Common.system_topology(stormConf, rawTopology);
    } catch (Exception e) {
      throw new FailedAssignTopologyException("Failed to generate system topology");
    }

    sidToHostname = generateSidToHost();
    hostToSid = JStormUtils.reverse_map(sidToHostname);

    if (oldAssignment != null && oldAssignment.getWorkers() != null) {
      oldWorkers = oldAssignment.getWorkers();
    } else {
      oldWorkers = new HashSet<ResourceWorkerSlot>();
    }

    refineDeadTasks();

    componentTasks = JStormUtils.reverse_map(context.getTaskToComponent());

    for (Entry<String, List<Integer>> entry : componentTasks.entrySet()) {
      List<Integer> componentTaskList = entry.getValue();

      Collections.sort(componentTaskList);
    }

    totalWorkerNum = computeWorkerNum();

    unstoppedWorkerNum = computeUnstoppedAssignments();
  }
Exemple #2
0
  /**
   * private ConcurrentHashMap<Integer, WorkerSlot> taskNodeport; private HashMap<Integer, String>
   * tasksToComponent; private Map<String, List<Integer>> componentToSortedTasks; private
   * Map<String, Map<String, Fields>> componentToStreamToFields; private Map<String, Object>
   * defaultResources; private Map<String, Object> userResources; private Map<String, Object>
   * executorData; private Map registeredMetrics;
   *
   * @throws Exception
   */
  private void generateMaps() throws Exception {
    this.tasksToComponent = Cluster.topology_task_info(zkCluster, topologyId);
    LOG.info("Map<taskId, component>:" + tasksToComponent);

    this.componentToSortedTasks = JStormUtils.reverse_map(tasksToComponent);
    for (java.util.Map.Entry<String, List<Integer>> entry : componentToSortedTasks.entrySet()) {
      List<Integer> tasks = entry.getValue();

      Collections.sort(tasks);
    }

    this.defaultResources = new HashMap<String, Object>();
    this.userResources = new HashMap<String, Object>();
    this.executorData = new HashMap<String, Object>();
    this.registeredMetrics = new HashMap();
  }
  /**
   * Backup the toplogy's Assignment to ZK @@@ Question Do we need to do backup operation every
   * time?
   *
   * @param assignment
   * @param event
   */
  public void backupAssignment(Assignment assignment, TopologyAssignEvent event) {
    String topologyId = event.getTopologyId();
    String topologyName = event.getTopologyName();
    try {

      StormClusterState zkClusterState = nimbusData.getStormClusterState();
      // one little problem, get tasks twice when assign one topology
      HashMap<Integer, String> tasks = Cluster.topology_task_info(zkClusterState, topologyId);

      Map<String, List<Integer>> componentTasks = JStormUtils.reverse_map(tasks);

      for (Entry<String, List<Integer>> entry : componentTasks.entrySet()) {
        List<Integer> keys = entry.getValue();

        Collections.sort(keys);
      }

      AssignmentBak assignmentBak = new AssignmentBak(componentTasks, assignment);
      zkClusterState.backup_assignment(topologyName, assignmentBak);

    } catch (Exception e) {
      LOG.warn("Failed to backup " + topologyId + " assignment " + assignment, e);
    }
  }