private SiddhiManager createMockSiddhiManager(
      String[] inputStreamDefinitions, String executionPlan) throws SiddhiParserException {
    SiddhiConfiguration siddhiConfig = new SiddhiConfiguration();
    siddhiConfig.setSiddhiExtensions(SiddhiExtensionLoader.loadSiddhiExtensions());
    SiddhiManager siddhiManager = new SiddhiManager(siddhiConfig);
    try {
      int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId();
      if (tenantId > -1) {
        DataSourceManager.getInstance().initTenant(tenantId);
      }
      List<CarbonDataSource> dataSources =
          EventProcessorValueHolder.getDataSourceService().getAllDataSources();
      for (CarbonDataSource cds : dataSources) {
        try {
          if (cds.getDSObject() instanceof DataSource) {
            siddhiManager
                .getSiddhiContext()
                .addDataSource(cds.getDSMInfo().getName(), (DataSource) cds.getDSObject());
          }
        } catch (Exception e) {
          log.error("Unable to add the datasource" + cds.getDSMInfo().getName(), e);
        }
      }
    } catch (DataSourceException e) {
      log.error("Unable to access the datasource service", e);
    }

    for (String streamDefinition : inputStreamDefinitions) {
      if (streamDefinition.trim().length() > 0) {
        siddhiManager.defineStream(streamDefinition);
      }
    }
    siddhiManager.addExecutionPlan(executionPlan);
    return siddhiManager;
  }
  private void removeExecutionPlanConfiguration(String name, int tenantId) {
    Map<String, ExecutionPlan> executionPlanMap = tenantSpecificExecutionPlans.get(tenantId);
    if (executionPlanMap != null && executionPlanMap.containsKey(name)) {
      ExecutionPlan executionPlan = executionPlanMap.remove(name);
      executionPlan.shutdown();

      ExecutionPlanConfiguration executionPlanConfiguration =
          executionPlan.getExecutionPlanConfiguration();

      // releasing junction listeners.
      for (SiddhiEventConsumer eventConsumer : executionPlan.getSiddhiEventConsumers()) {
        EventProcessorValueHolder.getEventStreamService().unsubscribe(eventConsumer, tenantId);
      }

      for (EventProducer eventProducer : executionPlan.getEventProducers()) {
        EventProcessorValueHolder.getEventStreamService().unsubscribe(eventProducer, tenantId);
      }
    }
  }
  private SiddhiConfiguration getSiddhiConfigurationFor(
      ExecutionPlanConfiguration executionPlanConfiguration, int tenantId)
      throws ServiceDependencyValidationException {
    SiddhiConfiguration siddhiConfig = new SiddhiConfiguration();
    siddhiConfig.setAsyncProcessing(false);
    siddhiConfig.setInstanceIdentifier(
        "org.wso2.siddhi.instance-" + tenantId + "-" + UUID.randomUUID().toString());

    String isDistributedProcessingEnabledString =
        executionPlanConfiguration
            .getSiddhiConfigurationProperties()
            .get(EventProcessorConstants.SIDDHI_DISTRIBUTED_PROCESSING);
    if (isDistributedProcessingEnabledString != null
        && (isDistributedProcessingEnabledString.equalsIgnoreCase("DistributedCache")
            || isDistributedProcessingEnabledString.equalsIgnoreCase("true"))) {
      siddhiConfig.setDistributedProcessing(true);
      if (EventProcessorValueHolder.getHazelcastInstance() != null) {
        siddhiConfig.setInstanceIdentifier(
            EventProcessorValueHolder.getHazelcastInstance().getName());
      } else {
        throw new ServiceDependencyValidationException(
            EventProcessorConstants.HAZELCAST_INSTANCE, "Hazelcast instance is not initialized.");
      }
    } else if (isDistributedProcessingEnabledString != null
        && isDistributedProcessingEnabledString.equalsIgnoreCase("RedundantNode")) {
      siddhiConfig.setDistributedProcessing(false);
      if (EventProcessorValueHolder.getHazelcastInstance() != null) {
        siddhiConfig.setInstanceIdentifier(
            EventProcessorValueHolder.getHazelcastInstance().getName());
      } else {
        throw new ServiceDependencyValidationException(
            EventProcessorConstants.HAZELCAST_INSTANCE, "Hazelcast instance is not initialized.");
      }
    } else {
      siddhiConfig.setDistributedProcessing(false);
    }

    siddhiConfig.setQueryPlanIdentifier(
        "org.wso2.siddhi-" + tenantId + "-" + executionPlanConfiguration.getName());
    siddhiConfig.setSiddhiExtensions(SiddhiExtensionLoader.loadSiddhiExtensions());
    return siddhiConfig;
  }
 public void tryBecomeCoordinator() {
   HazelcastInstance hazelcastInstance = EventProcessorValueHolder.getHazelcastInstance();
   if (hazelcastInstance != null) {
     if (!isStormCoordinator()) {
       ILock lock = hazelcastInstance.getLock("StormCoordinator");
       boolean isCoordinator = lock.tryLock();
       if (isCoordinator) {
         log.info("Node became Storm coordinator");
       }
       setStormCoordinator(isCoordinator);
     }
   }
 }
 public static void loadDataSourceConfiguration(SiddhiManager siddhiManager) {
   try {
     int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId();
     if (tenantId > -1) {
       DataSourceManager.getInstance().initTenant(tenantId);
     }
     List<CarbonDataSource> dataSources =
         EventProcessorValueHolder.getDataSourceService().getAllDataSources();
     for (CarbonDataSource cds : dataSources) {
       try {
         if (cds.getDSObject() instanceof DataSource) {
           siddhiManager.setDataSource(cds.getDSMInfo().getName(), (DataSource) cds.getDSObject());
         }
       } catch (Exception e) {
         log.error("Unable to add the datasource" + cds.getDSMInfo().getName(), e);
       }
     }
   } catch (DataSourceException e) {
     log.error("Unable to populate the data sources in Siddhi engine.", e);
   }
 }
  private SiddhiManager getSiddhiManagerFor(
      ExecutionPlanConfiguration executionPlanConfiguration,
      SiddhiConfiguration siddhiConfig,
      Map<String, InputHandler> inputHandlerMap)
      throws ExecutionPlanConfigurationException {
    SiddhiManager siddhiManager = new SiddhiManager(siddhiConfig);
    try {
      int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId();
      if (tenantId > -1) {
        DataSourceManager.getInstance().initTenant(tenantId);
      }
      List<CarbonDataSource> dataSources =
          EventProcessorValueHolder.getDataSourceService().getAllDataSources();
      for (CarbonDataSource cds : dataSources) {
        try {
          if (cds.getDSObject() instanceof DataSource) {
            siddhiManager
                .getSiddhiContext()
                .addDataSource(cds.getDSMInfo().getName(), (DataSource) cds.getDSObject());
          }
        } catch (Exception e) {
          log.error("Unable to add the datasource" + cds.getDSMInfo().getName(), e);
        }
      }
    } catch (DataSourceException e) {
      log.error("Unable to populate the data sources in Siddhi engine.", e);
    }

    int persistenceTimeInterval = 0;
    try {
      persistenceTimeInterval =
          Integer.parseInt(
              executionPlanConfiguration
                  .getSiddhiConfigurationProperties()
                  .get(EventProcessorConstants.SIDDHI_SNAPSHOT_INTERVAL));
    } catch (NumberFormatException e) {
      log.error("Unable to parse snapshot time interval.", e);
    }

    if (persistenceTimeInterval > 0) {
      if (null == EventProcessorValueHolder.getPersistenceStore()) {
        if (EventProcessorValueHolder.getClusterInformation() == null) {
          try {
            String adminPassword =
                EventProcessorValueHolder.getUserRealm().getRealmConfiguration().getAdminPassword();
            String adminUserName =
                EventProcessorValueHolder.getUserRealm().getRealmConfiguration().getAdminUserName();

            ClusterInformation clusterInformation =
                new ClusterInformation(adminUserName, adminPassword);
            clusterInformation.setClusterName(CassandraPersistenceStore.CLUSTER_NAME);
            EventProcessorValueHolder.setClusterInformation(clusterInformation);
          } catch (UserStoreException e) {
            log.error("Unable to get realm configuration.", e);
          }
        }
        if (CassandraConnectionValidator.getInstance()
            .checkCassandraConnection(
                EventProcessorValueHolder.getClusterInformation().getUsername(),
                EventProcessorValueHolder.getClusterInformation().getPassword())) {
          Cluster cluster =
              EventProcessorValueHolder.getDataAccessService()
                  .getCluster(EventProcessorValueHolder.getClusterInformation());
          CassandraPersistenceStore casandraPersistenceStore =
              new CassandraPersistenceStore(cluster);
          EventProcessorValueHolder.setPersistenceStore(casandraPersistenceStore);
        } else {
          throw new ExecutionPlanConfigurationException(
              "Cannot connect to Cassandra. To run with embedded Cassandra enabled, start the server with command: ./wso2server.sh -Ddisable.cassandra.server.startup=false");
        }
      }
      siddhiManager.setPersistStore(EventProcessorValueHolder.getPersistenceStore());
    }
    return siddhiManager;
  }
  public void addExecutionPlanConfiguration(
      ExecutionPlanConfiguration executionPlanConfiguration, AxisConfiguration axisConfiguration)
      throws ExecutionPlanDependencyValidationException, ExecutionPlanConfigurationException,
          ServiceDependencyValidationException {
    int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId();
    Map<String, ExecutionPlan> tenantExecutionPlans = tenantSpecificExecutionPlans.get(tenantId);
    if (tenantExecutionPlans == null) {
      tenantExecutionPlans = new ConcurrentHashMap<String, ExecutionPlan>();
      tenantSpecificExecutionPlans.put(tenantId, tenantExecutionPlans);
    } else if (tenantExecutionPlans.get(executionPlanConfiguration.getName()) != null) {
      // if an execution plan with the same name already exists, we are not going to override it
      // with this plan.
      throw new ExecutionPlanConfigurationException(
          "Execution plan with the same name already exists. Please remove it and retry.");
    }

    // This iteration exists only as a check. Actual usage of imported stream configs is further
    // down
    for (StreamConfiguration streamConfiguration :
        executionPlanConfiguration.getImportedStreams()) {
      try {
        StreamDefinition streamDefinition =
            EventProcessorValueHolder.getEventStreamService()
                .getStreamDefinition(streamConfiguration.getStreamId(), tenantId);
        if (streamDefinition == null) {
          throw new ExecutionPlanDependencyValidationException(
              streamConfiguration.getStreamId(),
              "Imported Stream " + streamConfiguration.getStreamId() + " does not exist");
        }
      } catch (EventStreamConfigurationException e) {
        throw new ExecutionPlanConfigurationException(
            "Error in retrieving stream ID : " + streamConfiguration.getStreamId());
      }
    }

    // This iteration exists only as a check. Actual usage of exported stream configs is further
    // down
    for (StreamConfiguration streamConfiguration :
        executionPlanConfiguration.getExportedStreams()) {
      try {
        StreamDefinition streamDefinition =
            EventProcessorValueHolder.getEventStreamService()
                .getStreamDefinition(streamConfiguration.getStreamId(), tenantId);
        if (streamDefinition == null) {
          throw new ExecutionPlanDependencyValidationException(
              streamConfiguration.getStreamId(),
              "Exported Stream " + streamConfiguration.getStreamId() + " does not exist");
        }
      } catch (EventStreamConfigurationException e) {
        throw new ExecutionPlanConfigurationException(
            "Error in retrieving stream ID : " + streamConfiguration.getStreamId());
      }
    }

    Map<String, InputHandler> inputHandlerMap =
        new ConcurrentHashMap<String, InputHandler>(
            executionPlanConfiguration.getImportedStreams().size());

    SiddhiConfiguration siddhiConfig =
        getSiddhiConfigurationFor(executionPlanConfiguration, tenantId);
    SiddhiManager siddhiManager =
        getSiddhiManagerFor(executionPlanConfiguration, siddhiConfig, inputHandlerMap);

    for (StreamConfiguration importedStreamConfiguration :
        executionPlanConfiguration.getImportedStreams()) {
      org.wso2.siddhi.query.api.definition.StreamDefinition siddhiStreamDefinition =
          new org.wso2.siddhi.query.api.definition.StreamDefinition();
      siddhiStreamDefinition.name(importedStreamConfiguration.getSiddhiStreamName());
      StreamDefinition streamDefinition = null;
      try {
        streamDefinition =
            EventProcessorValueHolder.getEventStreamService()
                .getStreamDefinition(importedStreamConfiguration.getStreamId(), tenantId);

        populateAttributes(
            siddhiStreamDefinition,
            streamDefinition.getMetaData(),
            EventProcessorConstants.META + EventProcessorConstants.ATTRIBUTE_SEPARATOR);
        populateAttributes(
            siddhiStreamDefinition,
            streamDefinition.getCorrelationData(),
            EventProcessorConstants.CORRELATION + EventProcessorConstants.ATTRIBUTE_SEPARATOR);
        populateAttributes(siddhiStreamDefinition, streamDefinition.getPayloadData(), "");
        InputHandler inputHandler = siddhiManager.defineStream(siddhiStreamDefinition);
        inputHandlerMap.put(streamDefinition.getStreamId(), inputHandler);
        log.debug("input handler created for " + siddhiStreamDefinition.getStreamId());
      } catch (EventStreamConfigurationException e) {
        // ignored as this will not happen
      }
    }

    for (StreamConfiguration exportedStreamConfiguration :
        executionPlanConfiguration.getExportedStreams()) {
      org.wso2.siddhi.query.api.definition.StreamDefinition siddhiStreamDefinition =
          new org.wso2.siddhi.query.api.definition.StreamDefinition();
      siddhiStreamDefinition.name(exportedStreamConfiguration.getSiddhiStreamName());
      StreamDefinition streamDefinition = null;
      try {
        streamDefinition =
            EventProcessorValueHolder.getEventStreamService()
                .getStreamDefinition(exportedStreamConfiguration.getStreamId(), tenantId);

        populateAttributes(
            siddhiStreamDefinition,
            streamDefinition.getMetaData(),
            EventProcessorConstants.META + EventProcessorConstants.ATTRIBUTE_SEPARATOR);
        populateAttributes(
            siddhiStreamDefinition,
            streamDefinition.getCorrelationData(),
            EventProcessorConstants.CORRELATION + EventProcessorConstants.ATTRIBUTE_SEPARATOR);
        populateAttributes(siddhiStreamDefinition, streamDefinition.getPayloadData(), "");
        siddhiManager.defineStream(siddhiStreamDefinition);
        log.debug("stream defined for " + siddhiStreamDefinition.getStreamId());
      } catch (EventStreamConfigurationException e) {
        // ignored as this will not happen
      }
    }

    HAManager haManager = null;
    String isDistributedProcessingEnabledString =
        executionPlanConfiguration
            .getSiddhiConfigurationProperties()
            .get(EventProcessorConstants.SIDDHI_DISTRIBUTED_PROCESSING);
    if (isDistributedProcessingEnabledString != null
        && isDistributedProcessingEnabledString.equalsIgnoreCase("RedundantNode")) {
      haManager =
          new HAManager(
              EventProcessorValueHolder.getHazelcastInstance(),
              executionPlanConfiguration.getName(),
              tenantId,
              siddhiManager,
              inputHandlerMap.size(),
              currentCepMembershipInfo);
    }

    try {
      siddhiManager.addExecutionPlan(executionPlanConfiguration.getQueryExpressions());
    } catch (Exception e) {
      throw new ExecutionPlanConfigurationException(
          "Invalid query specified, " + e.getMessage(), e);
    }

    ExecutionPlan executionPlan =
        new ExecutionPlan(
            executionPlanConfiguration.getName(),
            siddhiManager,
            executionPlanConfiguration,
            haManager);
    tenantExecutionPlans.put(executionPlanConfiguration.getName(), executionPlan);

    // subscribe output to junction
    SindhiStormOutputEventListener stormOutputListener = null;
    if (isRunningOnStorm) {
      stormOutputListener =
          new SindhiStormOutputEventListener(executionPlanConfiguration, tenantId);
    }
    for (StreamConfiguration exportedStreamConfiguration :
        executionPlanConfiguration.getExportedStreams()) {

      SiddhiOutputStreamListener streamCallback;

      if (haManager != null) {
        streamCallback =
            new SiddhiHAOutputStreamListener(
                exportedStreamConfiguration.getSiddhiStreamName(),
                exportedStreamConfiguration.getStreamId(),
                executionPlanConfiguration,
                tenantId);
        haManager.addStreamCallback((SiddhiHAOutputStreamListener) streamCallback);
      } else {
        streamCallback =
            new SiddhiOutputStreamListener(
                exportedStreamConfiguration.getSiddhiStreamName(),
                exportedStreamConfiguration.getStreamId(),
                executionPlanConfiguration,
                tenantId);

        if (isRunningOnStorm) {
          stormOutputListener.registerOutputStreamListener(
              exportedStreamConfiguration.getSiddhiStreamName(), streamCallback);
        }
      }
      siddhiManager.addCallback(exportedStreamConfiguration.getSiddhiStreamName(), streamCallback);
      try {
        EventProcessorValueHolder.getEventStreamService().subscribe(streamCallback, tenantId);
      } catch (EventStreamConfigurationException e) {
        // ignored as this will never happen
      }
      executionPlan.addProducer(streamCallback);
    }

    // subscribe input to junction
    for (StreamConfiguration importedStreamConfiguration :
        executionPlanConfiguration.getImportedStreams()) {
      InputHandler inputHandler = inputHandlerMap.get(importedStreamConfiguration.getStreamId());

      AbstractSiddhiInputEventDispatcher eventDispatcher = null;
      if (haManager != null) {
        eventDispatcher =
            new SiddhiHAInputEventDispatcher(
                importedStreamConfiguration.getStreamId(),
                inputHandler,
                executionPlanConfiguration,
                tenantId,
                haManager.getProcessThreadPoolExecutor(),
                haManager.getThreadBarrier());
        haManager.addInputEventDispatcher(
            importedStreamConfiguration.getStreamId(),
            (SiddhiHAInputEventDispatcher) eventDispatcher);
      } else if (isRunningOnStorm) {
        StreamDefinition streamDefinition = null;
        try {
          streamDefinition =
              EventProcessorValueHolder.getEventStreamService()
                  .getStreamDefinition(importedStreamConfiguration.getStreamId(), tenantId);
        } catch (EventStreamConfigurationException e) {
          // Ignore as this would never happen
        }
        eventDispatcher =
            new SiddhiStormInputEventDispatcher(
                streamDefinition,
                importedStreamConfiguration.getSiddhiStreamName(),
                executionPlanConfiguration,
                tenantId);
      } else {
        eventDispatcher =
            new SiddhiInputEventDispatcher(
                importedStreamConfiguration.getStreamId(),
                inputHandler,
                executionPlanConfiguration,
                tenantId);
      }

      try {
        EventProcessorValueHolder.getEventStreamService().subscribe(eventDispatcher, tenantId);
        executionPlan.addConsumer(eventDispatcher);

      } catch (EventStreamConfigurationException e) {
        // ignored as this will never happen
      }
    }

    if (haManager != null) {
      haManager.init();
    }
  }
  public static void validateExecutionPlan(String executionPlan)
      throws ExecutionPlanConfigurationException, ExecutionPlanDependencyValidationException {

    String planName;
    int i = 0; // this is maintained for giving more context info in error messages, when throwing
    // exceptions.
    ArrayList<String> importedStreams = new ArrayList<String>();
    ArrayList<String> exportedStreams = new ArrayList<String>();
    Pattern databridgeStreamNamePattern =
        Pattern.compile(EventProcessorConstants.DATABRIDGE_STREAM_REGEX);
    Pattern streamVersionPattern = Pattern.compile(EventProcessorConstants.STREAM_VER_REGEX);

    ExecutionPlan parsedExecPlan = SiddhiCompiler.parse(executionPlan);
    Element element =
        AnnotationHelper.getAnnotationElement(
            EventProcessorConstants.ANNOTATION_NAME_NAME, null, parsedExecPlan.getAnnotations());
    if (element == null) { // check if plan name is given
      throw new ExecutionPlanConfigurationException(
          "Execution plan name is not given. Please specify execution plan name using the annotation "
              + "'"
              + EventProcessorConstants.ANNOTATION_TOKEN_AT
              + EventProcessorConstants.ANNOTATION_PLAN
              + EventProcessorConstants.ANNOTATION_TOKEN_COLON
              + EventProcessorConstants.ANNOTATION_NAME_NAME
              + EventProcessorConstants.ANNOTATION_TOKEN_OPENING_BRACKET
              + EventProcessorConstants.SIDDHI_SINGLE_QUOTE
              + "executionPlanNameHere"
              + EventProcessorConstants.SIDDHI_SINGLE_QUOTE
              + EventProcessorConstants.ANNOTATION_TOKEN_CLOSING_BRACKET
              + "'");
    }
    planName = element.getValue();
    if (planName.equals("")) {
      throw new ExecutionPlanConfigurationException(
          "Execution plan name is empty. Hence the plan is invalid");
    }
    if (planName.trim().contains(" ")) {
      throw new ExecutionPlanConfigurationException(
          "Execution plan name '"
              + planName
              + "' contains whitespaces. Please remove whitespaces.");
    }

    Map<String, org.wso2.siddhi.query.api.definition.StreamDefinition> streamDefMap =
        parsedExecPlan.getStreamDefinitionMap();
    for (Map.Entry<String, org.wso2.siddhi.query.api.definition.StreamDefinition> entry :
        streamDefMap.entrySet()) {
      Element importElement =
          AnnotationHelper.getAnnotationElement(
              EventProcessorConstants.ANNOTATION_IMPORT, null, entry.getValue().getAnnotations());
      Element exportElement =
          AnnotationHelper.getAnnotationElement(
              EventProcessorConstants.ANNOTATION_EXPORT, null, entry.getValue().getAnnotations());
      if (importElement != null && exportElement != null) {
        throw new ExecutionPlanConfigurationException(
            "Same stream definition has being imported and exported. Please correct "
                + i
                + "th of the "
                + parsedExecPlan.getStreamDefinitionMap().size()
                + "stream definition, with stream id '"
                + entry.getKey()
                + "'");
      }
      if (importElement
          != null) { // Treating import & export cases separately to give more specific error
        // messages.
        String atImportLiteral =
            EventProcessorConstants.ANNOTATION_TOKEN_AT + EventProcessorConstants.ANNOTATION_IMPORT;
        String importElementValue = importElement.getValue();
        if (importElementValue == null || importElementValue.trim().isEmpty()) {
          throw new ExecutionPlanConfigurationException(
              "Imported stream cannot be empty as in '"
                  + atImportLiteral
                  + EventProcessorConstants.ANNOTATION_TOKEN_OPENING_BRACKET
                  + EventProcessorConstants.SIDDHI_SINGLE_QUOTE
                  + EventProcessorConstants.SIDDHI_SINGLE_QUOTE
                  + EventProcessorConstants.ANNOTATION_TOKEN_CLOSING_BRACKET
                  + "'. Please correct "
                  + i
                  + "th of the "
                  + parsedExecPlan.getStreamDefinitionMap().size()
                  + "stream definition, with stream id '"
                  + entry.getKey()
                  + "'");
        }
        String[] streamIdComponents =
            importElementValue.split(EventProcessorConstants.STREAM_SEPARATOR);
        if (streamIdComponents.length != 2) {
          throw new ExecutionPlanConfigurationException(
              "Found malformed "
                  + atImportLiteral
                  + " element '"
                  + importElementValue
                  + "'. "
                  + atImportLiteral
                  + " annotation should take the form '"
                  + atImportLiteral
                  + EventProcessorConstants.ANNOTATION_TOKEN_OPENING_BRACKET
                  + EventProcessorConstants.SIDDHI_SINGLE_QUOTE
                  + "streamName"
                  + EventProcessorConstants.STREAM_SEPARATOR
                  + "StreamVersion"
                  + EventProcessorConstants.SIDDHI_SINGLE_QUOTE
                  + EventProcessorConstants.ANNOTATION_TOKEN_CLOSING_BRACKET
                  + "'. There should be a '"
                  + EventProcessorConstants.STREAM_SEPARATOR
                  + "' character, separating the streamName and its version");
        }
        if ((!databridgeStreamNamePattern.matcher(streamIdComponents[0].trim()).matches())) {
          throw new ExecutionPlanConfigurationException(
              "Invalid imported stream name["
                  + streamIdComponents[0]
                  + "] in execution plan:"
                  + planName
                  + ". Stream name should match the regex '"
                  + EventProcessorConstants.DATABRIDGE_STREAM_REGEX
                  + "'");
        }
        Matcher m = streamVersionPattern.matcher(streamIdComponents[1].trim());
        if (!m.matches()) {
          throw new ExecutionPlanConfigurationException(
              "Invalid stream version ["
                  + streamIdComponents[1]
                  + "] for stream name "
                  + streamIdComponents[0]
                  + " in execution plan: "
                  + planName
                  + ". Stream version should match the regex '"
                  + EventProcessorConstants.STREAM_VER_REGEX
                  + "'");
        }
        validateSiddhiStreamWithDatabridgeStream(
            streamIdComponents[0], streamIdComponents[1], entry.getValue());
        if (exportedStreams.contains(
            importElementValue)) { // check if same stream has been imported and exported.
          throw new ExecutionPlanConfigurationException(
              "Imported stream '"
                  + importElementValue
                  + "' is also among the exported streams. Hence the execution plan is invalid");
        }
        importedStreams.add(importElementValue);
      }
      if (exportElement != null) {
        String atExportLiteral =
            EventProcessorConstants.ANNOTATION_TOKEN_AT + EventProcessorConstants.ANNOTATION_EXPORT;
        String exportElementValue = exportElement.getValue();
        if (exportElementValue == null || exportElementValue.trim().isEmpty()) {
          throw new ExecutionPlanConfigurationException(
              "Exported stream cannot be empty as in '"
                  + atExportLiteral
                  + EventProcessorConstants.ANNOTATION_TOKEN_OPENING_BRACKET
                  + EventProcessorConstants.SIDDHI_SINGLE_QUOTE
                  + EventProcessorConstants.SIDDHI_SINGLE_QUOTE
                  + EventProcessorConstants.ANNOTATION_TOKEN_CLOSING_BRACKET
                  + "'. Please correct "
                  + i
                  + "th of the "
                  + parsedExecPlan.getStreamDefinitionMap().size()
                  + "stream definition, with stream id '"
                  + entry.getKey());
        }
        String[] streamIdComponents =
            exportElementValue.split(EventProcessorConstants.STREAM_SEPARATOR);
        if (streamIdComponents.length != 2) {
          throw new ExecutionPlanConfigurationException(
              "Found malformed "
                  + atExportLiteral
                  + " element '"
                  + exportElementValue
                  + "'. "
                  + atExportLiteral
                  + " annotation should take the form '"
                  + atExportLiteral
                  + EventProcessorConstants.ANNOTATION_TOKEN_OPENING_BRACKET
                  + EventProcessorConstants.SIDDHI_SINGLE_QUOTE
                  + "streamName"
                  + EventProcessorConstants.STREAM_SEPARATOR
                  + "StreamVersion"
                  + EventProcessorConstants.SIDDHI_SINGLE_QUOTE
                  + EventProcessorConstants.ANNOTATION_TOKEN_CLOSING_BRACKET
                  + "'. There should be a '"
                  + EventProcessorConstants.STREAM_SEPARATOR
                  + "' character, separating the streamName and its version");
        }
        if ((!databridgeStreamNamePattern.matcher(streamIdComponents[0].trim()).matches())) {
          throw new ExecutionPlanConfigurationException(
              "Invalid exported stream name["
                  + streamIdComponents[0]
                  + "] in execution plan:"
                  + planName
                  + ". Stream name should match the regex '"
                  + EventProcessorConstants.DATABRIDGE_STREAM_REGEX
                  + "'");
        }
        Matcher m = streamVersionPattern.matcher(streamIdComponents[1].trim());
        if (!m.matches()) {
          throw new ExecutionPlanConfigurationException(
              "Invalid stream version ["
                  + streamIdComponents[1]
                  + "] for stream name "
                  + streamIdComponents[0]
                  + " in execution plan: "
                  + planName
                  + ". Stream version should match the regex '"
                  + EventProcessorConstants.STREAM_VER_REGEX
                  + "'");
        }
        validateSiddhiStreamWithDatabridgeStream(
            streamIdComponents[0], streamIdComponents[1], entry.getValue());
        if (importedStreams.contains(exportElementValue)) {
          throw new ExecutionPlanConfigurationException(
              "Exported stream '"
                  + exportElementValue
                  + "' is also among the imported streams. Hence the execution plan is invalid");
        }
        exportedStreams.add(exportElementValue);
      }
      i++;
    }

    SiddhiManager siddhiManager = EventProcessorValueHolder.getSiddhiManager();
    loadDataSourceConfiguration(siddhiManager);
    try {
      siddhiManager.validateExecutionPlan(executionPlan);
    } catch (Throwable t) {
      throw new ExecutionPlanConfigurationException(t.getMessage(), t);
    }
  }
 private static boolean validateSiddhiStreamWithDatabridgeStream(
     String streamName,
     String streamVersion,
     org.wso2.siddhi.query.api.definition.StreamDefinition siddhiStreamDefinition)
     throws ExecutionPlanConfigurationException, ExecutionPlanDependencyValidationException {
   if (siddhiStreamDefinition == null) {
     throw new ExecutionPlanDependencyValidationException(
         streamName + EventProcessorConstants.STREAM_SEPARATOR + streamVersion,
         "Cannot validate null Siddhi stream for the stream: "
             + streamName
             + EventProcessorConstants.STREAM_SEPARATOR
             + streamVersion
             + " ");
   }
   EventStreamService eventStreamService = EventProcessorValueHolder.getEventStreamService();
   try {
     StreamDefinition streamDefinition =
         eventStreamService.getStreamDefinition(streamName, streamVersion);
     if (streamDefinition != null) {
       String siddhiAttributeName;
       int attributeCount = 0;
       int streamSize =
           (streamDefinition.getMetaData() == null ? 0 : streamDefinition.getMetaData().size())
               + (streamDefinition.getCorrelationData() == null
                   ? 0
                   : streamDefinition.getCorrelationData().size())
               + (streamDefinition.getPayloadData() == null
                   ? 0
                   : streamDefinition.getPayloadData().size());
       if (siddhiStreamDefinition.getAttributeList().size() != streamSize) {
         throw new ExecutionPlanDependencyValidationException(
             streamName + EventProcessorConstants.STREAM_SEPARATOR + streamVersion,
             "No of attributes in stream "
                 + streamName
                 + EventProcessorConstants.STREAM_SEPARATOR
                 + streamVersion
                 + " do not match the no of attributes in Siddhi stream");
       }
       if (streamDefinition.getMetaData() != null) {
         for (Attribute attribute : streamDefinition.getMetaData()) {
           siddhiAttributeName = EventProcessorConstants.META_PREFIX + attribute.getName();
           org.wso2.siddhi.query.api.definition.Attribute.Type type =
               siddhiStreamDefinition.getAttributeType(siddhiAttributeName);
           // null check for type not required since an exception is thrown by Siddhi
           // StreamDefinition.getAttributeType() method for non-existent attributes
           if (siddhiStreamDefinition.getAttributePosition(siddhiAttributeName)
               != attributeCount++) {
             throw new ExecutionPlanDependencyValidationException(
                 streamName + EventProcessorConstants.STREAM_SEPARATOR + streamVersion,
                 "Stream "
                     + streamName
                     + EventProcessorConstants.STREAM_SEPARATOR
                     + streamVersion
                     + "; Attribute positions do not match for attribute: "
                     + attribute.getName());
           }
           if (!isMatchingType(type, attribute.getType())) {
             throw new ExecutionPlanDependencyValidationException(
                 streamName + EventProcessorConstants.STREAM_SEPARATOR + streamVersion,
                 "Stream "
                     + streamName
                     + EventProcessorConstants.STREAM_SEPARATOR
                     + streamVersion
                     + "; Type mismatch for attribute: "
                     + attribute.getName());
           }
         }
       }
       if (streamDefinition.getCorrelationData() != null) {
         for (Attribute attribute : streamDefinition.getCorrelationData()) {
           siddhiAttributeName = EventProcessorConstants.CORRELATION_PREFIX + attribute.getName();
           org.wso2.siddhi.query.api.definition.Attribute.Type type =
               siddhiStreamDefinition.getAttributeType(siddhiAttributeName);
           // null check for type not required since an exception is thrown by Siddhi
           // StreamDefinition.getAttributeType() method for non-existent attributes
           if (siddhiStreamDefinition.getAttributePosition(siddhiAttributeName)
               != attributeCount++) {
             throw new ExecutionPlanDependencyValidationException(
                 streamName + EventProcessorConstants.STREAM_SEPARATOR + streamVersion,
                 "Stream "
                     + streamName
                     + EventProcessorConstants.STREAM_SEPARATOR
                     + streamVersion
                     + "; Attribute positions do not match for attribute: "
                     + attribute.getName());
           }
           if (!isMatchingType(type, attribute.getType())) {
             throw new ExecutionPlanDependencyValidationException(
                 streamName + EventProcessorConstants.STREAM_SEPARATOR + streamVersion,
                 "Stream "
                     + streamName
                     + EventProcessorConstants.STREAM_SEPARATOR
                     + streamVersion
                     + "; Type mismatch for attribute: "
                     + attribute.getName());
           }
         }
       }
       if (streamDefinition.getPayloadData() != null) {
         for (Attribute attribute : streamDefinition.getPayloadData()) {
           siddhiAttributeName = attribute.getName();
           org.wso2.siddhi.query.api.definition.Attribute.Type type =
               siddhiStreamDefinition.getAttributeType(siddhiAttributeName);
           // null check for type not required since an exception is thrown by Siddhi
           // StreamDefinition.getAttributeType() method for non-existent attributes
           if (siddhiStreamDefinition.getAttributePosition(siddhiAttributeName)
               != attributeCount++) {
             throw new ExecutionPlanDependencyValidationException(
                 streamName + EventProcessorConstants.STREAM_SEPARATOR + streamVersion,
                 "Stream "
                     + streamName
                     + EventProcessorConstants.STREAM_SEPARATOR
                     + streamVersion
                     + "; Attribute positions do not match for attribute: "
                     + attribute.getName());
           }
           if (!isMatchingType(type, attribute.getType())) {
             throw new ExecutionPlanDependencyValidationException(
                 streamName + EventProcessorConstants.STREAM_SEPARATOR + streamVersion,
                 "Stream "
                     + streamName
                     + EventProcessorConstants.STREAM_SEPARATOR
                     + streamVersion
                     + "; Type mismatch for attribute: "
                     + attribute.getName());
           }
         }
       }
       return true;
     }
   } catch (EventStreamConfigurationException e) {
     throw new ExecutionPlanConfigurationException(
         "Error while validating stream definition with store : " + e.getMessage(), e);
   } catch (AttributeNotExistException e) {
     throw new ExecutionPlanDependencyValidationException(
         streamName + EventProcessorConstants.STREAM_SEPARATOR + streamVersion, e.getMessage());
   }
   throw new ExecutionPlanDependencyValidationException(
       streamName + EventProcessorConstants.STREAM_SEPARATOR + streamVersion,
       "Stream "
           + streamName
           + EventProcessorConstants.STREAM_SEPARATOR
           + streamVersion
           + " does not exist");
 }