/**
   * Deletes a scheduled entity, a deleted entity is removed completely from execution pool.
   *
   * @param type entity type
   * @param entity entity name
   * @return APIResult
   */
  public APIResult delete(HttpServletRequest request, String type, String entity, String colo) {
    checkColo(colo);
    try {
      EntityType entityType = EntityType.getEnum(type);
      String removedFromEngine = "";
      try {
        Entity entityObj = EntityUtil.getEntity(type, entity);

        canRemove(entityObj);
        if (entityType.isSchedulable() && !DeploymentUtil.isPrism()) {
          getWorkflowEngine().delete(entityObj);
          removedFromEngine = "(KILLED in ENGINE)";
        }

        configStore.remove(entityType, entity);
      } catch (EntityNotRegisteredException e) { // already deleted
        return new APIResult(
            APIResult.Status.SUCCEEDED, entity + "(" + type + ") doesn't exist. Nothing to do");
      }

      return new APIResult(
          APIResult.Status.SUCCEEDED,
          entity + "(" + type + ") removed successfully " + removedFromEngine);
    } catch (Throwable e) {
      LOG.error("Unable to reach workflow engine for deletion or deletion failed", e);
      throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
    }
  }
  /**
   * Returns the list of filtered entities as well as the total number of results.
   *
   * @param fieldStr Fields that the query is interested in, separated by comma
   * @param nameSubsequence Name subsequence to match
   * @param tagKeywords Tag keywords to match, separated by commma
   * @param filterType Only return entities of this type
   * @param filterTags Full tag matching, separated by comma
   * @param filterBy Specific fields to match (i.e. TYPE, NAME, STATUS, PIPELINES, CLUSTER)
   * @param orderBy Order result by these fields.
   * @param sortOrder Valid options are "asc" and “desc”
   * @param offset Pagination offset.
   * @param resultsPerPage Number of results that should be returned starting at the offset.
   * @return EntityList
   */
  public EntityList getEntityList(
      String fieldStr,
      String nameSubsequence,
      String tagKeywords,
      String filterType,
      String filterTags,
      String filterBy,
      String orderBy,
      String sortOrder,
      Integer offset,
      Integer resultsPerPage) {

    HashSet<String> fields = new HashSet<String>(Arrays.asList(fieldStr.toUpperCase().split(",")));
    Map<String, List<String>> filterByFieldsValues = getFilterByFieldsValues(filterBy);
    validateEntityFilterByClause(filterByFieldsValues);
    if (StringUtils.isNotEmpty(filterTags)) {
      filterByFieldsValues.put(
          EntityList.EntityFilterByFields.TAGS.name(), Arrays.asList(filterTags));
    }

    // get filtered entities
    List<Entity> entities = new ArrayList<Entity>();
    try {
      if (StringUtils.isEmpty(filterType)) {
        // return entities of all types if no entity type specified
        for (EntityType entityType : EntityType.values()) {
          entities.addAll(
              getFilteredEntities(
                  entityType, nameSubsequence, tagKeywords, filterByFieldsValues, "", "", ""));
        }
      } else {
        String[] types = filterType.split(",");
        for (String type : types) {
          EntityType entityType = EntityType.getEnum(type);
          entities.addAll(
              getFilteredEntities(
                  entityType, nameSubsequence, tagKeywords, filterByFieldsValues, "", "", ""));
        }
      }
    } catch (Exception e) {
      LOG.error("Failed to get entity list", e);
      throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
    }

    // sort entities and pagination
    List<Entity> entitiesReturn =
        sortEntitiesPagination(entities, orderBy, sortOrder, offset, resultsPerPage);

    // add total number of results
    EntityList entityList =
        entitiesReturn.size() == 0
            ? new EntityList(new Entity[] {}, 0)
            : new EntityList(
                buildEntityElements(new HashSet<String>(fields), entitiesReturn), entities.size());
    return entityList;
  }
  @CliCommand(
      value = {METADATA_COMMAND_PREFIX + LIST_OPT},
      help = LIST_OPT_DESCRIPTION)
  public String list(
      @CliOption(
              key = {TYPE_OPT},
              mandatory = true,
              help = TYPE_OPT_DESCRIPTION)
          final String dimensionType,
      @CliOption(
              key = {CLUSTER_OPT},
              mandatory = false,
              help = CLUSTER_OPT_DESCRIPTION)
          final String cluster,
      @CliOption(
              key = {FEED_OPT},
              mandatory = false,
              help = FEED_OPT_DESCRIPTION)
          final String feed,
      @CliOption(
              key = {PROCESS_OPT},
              mandatory = false,
              help = PROCESS_OPT_DESCRIPTION)
          final String process,
      @CliOption(
              key = {NUM_RESULTS_OPT},
              mandatory = false,
              help = NUM_RESULTS_OPT_DESCRIPTION)
          final Integer numResults) {
    validateDimensionType(dimensionType.toUpperCase());
    if (!(dimensionType.toUpperCase()).equals(RelationshipType.REPLICATION_METRICS.name())) {
      return getFalconClient().getDimensionList(dimensionType, cluster, getDoAs());
    } else {
      String schedEntityType = null;
      String schedEntityName = null;
      if (StringUtils.isNotEmpty(feed)) {
        schedEntityType = EntityType.getEnum(FEED_OPT).name();
        schedEntityName = feed;
      } else if (StringUtils.isNotEmpty(process)) {
        schedEntityType = EntityType.getEnum(PROCESS_OPT).name();
        schedEntityName = process;
      }
      validateScheduleEntity(schedEntityType, schedEntityName);

      return getFalconClient()
          .getReplicationMetricsDimensionList(
              schedEntityType, schedEntityName, numResults, getDoAs());
    }
  }
  protected void storeEntity(EntityType type, String name, String resource) throws Exception {
    Unmarshaller unmarshaller = type.getUnmarshaller();
    ConfigurationStore store = ConfigurationStore.get();
    store.remove(type, name);
    switch (type) {
      case CLUSTER:
        Cluster cluster = (Cluster) unmarshaller.unmarshal(this.getClass().getResource(resource));
        cluster.setName(name);
        store.publish(type, cluster);
        break;

      case FEED:
        Feed feed = (Feed) unmarshaller.unmarshal(this.getClass().getResource(resource));
        feed.setName(name);
        store.publish(type, feed);
        break;

      case PROCESS:
        Process process = (Process) unmarshaller.unmarshal(this.getClass().getResource(resource));
        process.setName(name);
        store.publish(type, process);
        break;

      default:
    }
  }
  protected synchronized Entity submitInternal(HttpServletRequest request, String type)
      throws IOException, FalconException {

    EntityType entityType = EntityType.getEnum(type);
    Entity entity = deserializeEntity(request, entityType);
    // KLUDGE - Until ACL is mandated entity passed should be decorated for equals check to pass
    decorateEntityWithACL(entity);

    Entity existingEntity = configStore.get(entityType, entity.getName());
    if (existingEntity != null) {
      if (EntityUtil.equals(existingEntity, entity)) {
        return existingEntity;
      }

      throw new EntityAlreadyExistsException(
          entity.toShortString()
              + " already registered with configuration store. "
              + "Can't be submitted again. Try removing before submitting.");
    }

    SecurityUtil.tryProxy(entity); // proxy before validating since FS/Oozie needs to be proxied
    validate(entity);
    configStore.publish(entityType, entity);
    LOG.info("Submit successful: ({}): {}", type, entity.getName());
    return entity;
  }
  /**
   * Post an entity XML with entity type. Validates the XML which can be Process, Feed or
   * Dataendpoint
   *
   * @param type entity type
   * @return APIResule -Succeeded or Failed
   */
  public APIResult validate(HttpServletRequest request, String type) {
    try {
      EntityType entityType = EntityType.getEnum(type);
      Entity entity = deserializeEntity(request, entityType);
      validate(entity);

      // Validate that the entity can be scheduled in the cluster
      if (entity.getEntityType().isSchedulable()) {
        Set<String> clusters = EntityUtil.getClustersDefinedInColos(entity);
        for (String cluster : clusters) {
          try {
            getWorkflowEngine().dryRun(entity, cluster);
          } catch (FalconException e) {
            throw new FalconException("dryRun failed on cluster " + cluster, e);
          }
        }
      }
      return new APIResult(
          APIResult.Status.SUCCEEDED,
          "Validated successfully (" + entityType + ") " + entity.getName());
    } catch (Throwable e) {
      LOG.error("Validation failed for entity ({})", type, e);
      throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
    }
  }
 protected void assertLibExtensions(
     FileSystem fs, COORDINATORAPP coord, EntityType type, String lifecycle) throws Exception {
   WORKFLOWAPP wf = getWorkflowapp(fs, coord);
   List<Object> actions = wf.getDecisionOrForkOrJoin();
   String lifeCyclePath = lifecycle == null ? "" : "/" + lifecycle;
   for (Object obj : actions) {
     if (!(obj instanceof ACTION)) {
       continue;
     }
     ACTION action = (ACTION) obj;
     List<String> files = null;
     if (action.getJava() != null) {
       files = action.getJava().getFile();
     } else if (action.getPig() != null) {
       files = action.getPig().getFile();
     } else if (action.getMapReduce() != null) {
       files = action.getMapReduce().getFile();
     }
     if (files != null) {
       Assert.assertTrue(
           files
               .get(files.size() - 1)
               .endsWith(
                   "/projects/falcon/working/libext/" + type.name() + lifeCyclePath + "/ext.jar"));
     }
   }
 }
 private static void cleanupConfigurationStore(ConfigurationStore store) throws Exception {
   for (EntityType type : EntityType.values()) {
     Collection<String> entities = store.getEntities(type);
     for (String entity : entities) {
       store.remove(type, entity);
     }
   }
 }
 protected void cleanupStore() throws FalconException {
   ConfigurationStore store = ConfigurationStore.get();
   for (EntityType type : EntityType.values()) {
     Collection<String> entities = store.getEntities(type);
     for (String entity : entities) {
       store.remove(type, entity);
     }
   }
 }
  protected Entity storeEntity(EntityType type, String name, String resource, String writeEndpoint)
      throws Exception {
    Unmarshaller unmarshaller = type.getUnmarshaller();
    ConfigurationStore store = ConfigurationStore.get();
    switch (type) {
      case CLUSTER:
        Cluster cluster = (Cluster) unmarshaller.unmarshal(this.getClass().getResource(resource));
        if (name != null) {
          store.remove(type, name);
          cluster.setName(name);
        }
        store.publish(type, cluster);

        if (writeEndpoint != null) {
          ClusterHelper.getInterface(cluster, Interfacetype.WRITE).setEndpoint(writeEndpoint);
          FileSystem fs = new Path(writeEndpoint).getFileSystem(EmbeddedCluster.newConfiguration());
          fs.create(
                  new Path(
                      ClusterHelper.getLocation(cluster, ClusterLocationType.WORKING).getPath(),
                      "libext/FEED/retention/ext.jar"))
              .close();
          fs.create(
                  new Path(
                      ClusterHelper.getLocation(cluster, ClusterLocationType.WORKING).getPath(),
                      "libext/FEED/replication/ext.jar"))
              .close();
        }

        return cluster;

      case FEED:
        Feed feed = (Feed) unmarshaller.unmarshal(this.getClass().getResource(resource));
        if (name != null) {
          store.remove(type, name);
          feed.setName(name);
        }
        store.publish(type, feed);
        return feed;

      case PROCESS:
        Process process = (Process) unmarshaller.unmarshal(this.getClass().getResource(resource));
        if (name != null) {
          store.remove(type, name);
          process.setName(name);
        }
        store.publish(type, process);
        return process;

      default:
    }

    throw new IllegalArgumentException("Unhandled type: " + type);
  }
 /**
  * Returns the entity definition as an XML based on name.
  *
  * @param type entity type
  * @param entityName entity name
  * @return String
  */
 public String getEntityDefinition(String type, String entityName) {
   try {
     EntityType entityType = EntityType.getEnum(type);
     Entity entity = configStore.get(entityType, entityName);
     if (entity == null) {
       throw new NoSuchElementException(entityName + " (" + type + ") not found");
     }
     return entity.toString();
   } catch (Throwable e) {
     LOG.error(
         "Unable to get entity definition from config store for ({}): {}", type, entityName, e);
     throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
   }
 }
  protected Set<String> getApplicableColos(String type, String name) {
    try {
      if (DeploymentUtil.isEmbeddedMode()) {
        return DeploymentUtil.getDefaultColos();
      }

      if (EntityType.getEnum(type) == EntityType.CLUSTER) {
        return getAllColos();
      }

      return getApplicableColos(type, EntityUtil.getEntity(type, name));
    } catch (FalconException e) {
      throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
    }
  }
  // The method retrieves the conf from the cache if it is in cache.
  // Else, queries WF Engine to retrieve the conf of the workflow
  private void updateContextFromWFConf(WorkflowExecutionContext context) {
    try {
      Properties wfProps = contextMap.get(context.getWorkflowId());
      if (wfProps == null) {
        Entity entity =
            CONFIG_STORE.get(EntityType.valueOf(context.getEntityType()), context.getEntityName());
        // Entity can be null in case of delete. Engine will generate notifications for instance
        // kills.
        // But, the entity would no longer be in the config store.
        if (entity == null) {
          return;
        }
        for (String cluster : EntityUtil.getClustersDefinedInColos(entity)) {
          try {
            InstancesResult.Instance[] instances =
                WorkflowEngineFactory.getWorkflowEngine()
                    .getJobDetails(cluster, context.getWorkflowId())
                    .getInstances();
            if (instances != null && instances.length > 0) {
              wfProps = getWFProps(instances[0].getWfParams());
              // Required by RetryService. But, is not part of conf.
              wfProps.setProperty(
                  WorkflowExecutionArgs.RUN_ID.getName(),
                  Integer.toString(instances[0].getRunId()));
            }
          } catch (FalconException e) {
            // Do Nothing. The workflow may not have been deployed on this cluster.
            continue;
          }
          contextMap.put(context.getWorkflowId(), wfProps);
        }
      }

      // No extra props to enhance the context with.
      if (wfProps == null || wfProps.isEmpty()) {
        return;
      }

      for (WorkflowExecutionArgs arg : WorkflowExecutionArgs.values()) {
        if (wfProps.containsKey(arg.getName())) {
          context.setValue(arg, wfProps.getProperty(arg.getName()));
        }
      }

    } catch (FalconException e) {
      LOG.error("Unable to retrieve entity {} of type {} from config store.", e);
    }
  }
  /**
   * Returns the status of requested entity.
   *
   * @param type entity type
   * @param entity entity name
   * @return String
   */
  public APIResult getStatus(String type, String entity, String colo) {

    checkColo(colo);
    Entity entityObj;
    try {
      entityObj = EntityUtil.getEntity(type, entity);
      EntityType entityType = EntityType.getEnum(type);
      EntityStatus status = getStatus(entityObj, entityType);
      return new APIResult(Status.SUCCEEDED, status.name());
    } catch (FalconWebException e) {
      throw e;
    } catch (Exception e) {

      LOG.error("Unable to get status for entity {} ({})", entity, type, e);
      throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
    }
  }
  protected EntityStatus getStatus(Entity entity, EntityType type) throws FalconException {
    EntityStatus status;

    if (type.isSchedulable()) {
      if (workflowEngine.isActive(entity)) {
        if (workflowEngine.isSuspended(entity)) {
          status = EntityStatus.SUSPENDED;
        } else {
          status = EntityStatus.RUNNING;
        }
      } else {
        status = EntityStatus.SUBMITTED;
      }
    } else {
      status = EntityStatus.SUBMITTED;
    }
    return status;
  }
  public APIResult update(HttpServletRequest request, String type, String entityName, String colo) {
    checkColo(colo);
    List<Entity> tokenList = null;
    try {
      EntityType entityType = EntityType.getEnum(type);
      Entity oldEntity = EntityUtil.getEntity(type, entityName);
      Entity newEntity = deserializeEntity(request, entityType);
      // KLUDGE - Until ACL is mandated entity passed should be decorated for equals check to pass
      decorateEntityWithACL(newEntity);
      validate(newEntity);

      validateUpdate(oldEntity, newEntity);
      configStore.initiateUpdate(newEntity);

      tokenList = obtainUpdateEntityLocks(oldEntity);

      StringBuilder result = new StringBuilder("Updated successfully");
      // Update in workflow engine
      if (!DeploymentUtil.isPrism()) {
        Set<String> oldClusters = EntityUtil.getClustersDefinedInColos(oldEntity);
        Set<String> newClusters = EntityUtil.getClustersDefinedInColos(newEntity);
        newClusters.retainAll(oldClusters); // common clusters for update
        oldClusters.removeAll(newClusters); // deleted clusters

        for (String cluster : newClusters) {
          result.append(getWorkflowEngine().update(oldEntity, newEntity, cluster));
        }
        for (String cluster : oldClusters) {
          getWorkflowEngine().delete(oldEntity, cluster);
        }
      }

      configStore.update(entityType, newEntity);

      return new APIResult(APIResult.Status.SUCCEEDED, result.toString());
    } catch (Throwable e) {
      LOG.error("Update failed", e);
      throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
    } finally {
      ConfigurationStore.get().cleanupUpdateInit();
      releaseUpdateEntityLocks(entityName, tokenList);
    }
  }
  protected Set<String> getApplicableColos(String type, Entity entity) {
    try {
      if (DeploymentUtil.isEmbeddedMode()) {
        return DeploymentUtil.getDefaultColos();
      }

      if (EntityType.getEnum(type) == EntityType.CLUSTER) {
        return getAllColos();
      }

      Set<String> clusters = EntityUtil.getClustersDefined(entity);
      Set<String> colos = new HashSet<String>();
      for (String cluster : clusters) {
        Cluster clusterEntity = EntityUtil.getEntity(EntityType.CLUSTER, cluster);
        colos.add(clusterEntity.getColo());
      }
      return colos;
    } catch (FalconException e) {
      throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
    }
  }
  /**
   * Given the location of data, returns the feed.
   *
   * @param type type of the entity, is valid only for feeds.
   * @param instancePath location of the data
   * @return Feed Name, type of the data and cluster name.
   */
  public FeedLookupResult reverseLookup(String type, String instancePath) {
    try {
      EntityType entityType = EntityType.getEnum(type);
      if (entityType != EntityType.FEED) {
        LOG.error("Reverse Lookup is not supported for entitytype: {}", type);
        throw new IllegalArgumentException("Reverse lookup is not supported for " + type);
      }

      instancePath = StringUtils.trim(instancePath);
      String instancePathWithoutSlash =
          instancePath.endsWith("/") ? StringUtils.removeEnd(instancePath, "/") : instancePath;
      // treat strings with and without trailing slash as same for purpose of searching e.g.
      // /data/cas and /data/cas/ should be treated as same.
      String instancePathWithSlash = instancePathWithoutSlash + "/";
      FeedLocationStore store = FeedLocationStore.get();
      Collection<FeedLookupResult.FeedProperties> feeds = new ArrayList<>();
      Collection<FeedLookupResult.FeedProperties> res =
          store.reverseLookup(instancePathWithoutSlash);
      if (res != null) {
        feeds.addAll(res);
      }
      res = store.reverseLookup(instancePathWithSlash);
      if (res != null) {
        feeds.addAll(res);
      }
      FeedLookupResult result = new FeedLookupResult(APIResult.Status.SUCCEEDED, "SUCCESS");
      FeedLookupResult.FeedProperties[] props =
          feeds.toArray(new FeedLookupResult.FeedProperties[0]);
      result.setElements(props);
      return result;

    } catch (IllegalArgumentException e) {
      throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
    } catch (Throwable throwable) {
      LOG.error("reverse look up failed", throwable);
      throw FalconWebException.newException(throwable, Response.Status.INTERNAL_SERVER_ERROR);
    }
  }
  protected List<Entity> getFilteredEntities(
      EntityType entityType,
      String nameSubsequence,
      String tagKeywords,
      Map<String, List<String>> filterByFieldsValues,
      String startDate,
      String endDate,
      String cluster)
      throws FalconException, IOException {
    Collection<String> entityNames = configStore.getEntities(entityType);
    if (entityNames.isEmpty()) {
      return Collections.emptyList();
    }

    List<Entity> entities = new ArrayList<Entity>();
    char[] subsequence = nameSubsequence.toLowerCase().toCharArray();
    List<String> tagKeywordsList;
    if (StringUtils.isEmpty(tagKeywords)) {
      tagKeywordsList = new ArrayList<>();
    } else {
      tagKeywordsList = getFilterByTags(Arrays.asList(tagKeywords.toLowerCase()));
    }
    for (String entityName : entityNames) {
      Entity entity;
      try {
        entity = configStore.get(entityType, entityName);
        if (entity == null) {
          continue;
        }
      } catch (FalconException e1) {
        LOG.error(
            "Unable to get list for entities for ({})",
            entityType.getEntityClass().getSimpleName(),
            e1);
        throw FalconWebException.newException(e1, Response.Status.BAD_REQUEST);
      }

      if (SecurityUtil.isAuthorizationEnabled() && !isEntityAuthorized(entity)) {
        // the user who requested list query has no permission to access this entity. Skip this
        // entity
        continue;
      }
      if (isFilteredByDatesAndCluster(entity, startDate, endDate, cluster)) {
        // this is for entity summary
        continue;
      }
      SecurityUtil.tryProxy(entity);

      // filter by fields
      if (isFilteredByFields(entity, filterByFieldsValues)) {
        continue;
      }

      // filter by subsequence of name
      if (subsequence.length > 0
          && !matchesNameSubsequence(subsequence, entityName.toLowerCase())) {
        continue;
      }

      // filter by tag keywords
      if (!matchTagKeywords(tagKeywordsList, entity.getTags())) {
        continue;
      }

      entities.add(entity);
    }

    return entities;
  }