@Override
  public synchronized int onEvent(
      int evtSubscribeNotifyAction, WinDef.PVOID userContext, WinNT.HANDLE eventHandle) {
    if (logger.isDebugEnabled()) {
      logger.debug("onEvent(" + evtSubscribeNotifyAction + ", " + userContext + ", " + eventHandle);
    }

    if (evtSubscribeNotifyAction == WEvtApi.EvtSubscribeNotifyAction.ERROR) {
      if (eventHandle.getPointer().getInt(0)
          == WEvtApi.EvtSubscribeErrors.ERROR_EVT_QUERY_RESULT_STALE) {
        logger.error(MISSING_EVENT_MESSAGE);
      } else {
        logger.error(RECEIVED_THE_FOLLOWING_WIN32_ERROR + eventHandle.getPointer().getInt(0));
      }
    } else if (evtSubscribeNotifyAction == WEvtApi.EvtSubscribeNotifyAction.DELIVER) {
      wEvtApi.EvtRender(
          null, eventHandle, WEvtApi.EvtRenderFlags.EVENT_XML, size, buffer, used, propertyCount);

      // Not enough room in buffer, resize so it's big enough
      if (kernel32.GetLastError() == W32Errors.ERROR_INSUFFICIENT_BUFFER) {
        int newMaxSize = used.getInt(0);
        // Check for overflow or too big
        if (newMaxSize < size || newMaxSize > maxBufferSize) {
          logger.error(
              "Dropping event "
                  + eventHandle
                  + " because it couldn't be rendered within "
                  + maxBufferSize
                  + " bytes.");
          // Ignored, see
          // https://msdn.microsoft.com/en-us/library/windows/desktop/aa385577(v=vs.85).aspx
          return 0;
        }
        size = newMaxSize;
        buffer = new Memory(size);
        wEvtApi.EvtRender(
            null, eventHandle, WEvtApi.EvtRenderFlags.EVENT_XML, size, buffer, used, propertyCount);
      }

      int lastError = kernel32.GetLastError();
      if (lastError == W32Errors.ERROR_SUCCESS) {
        int usedBytes = used.getInt(0);
        String string = Charsets.UTF_16LE.decode(buffer.getByteBuffer(0, usedBytes)).toString();
        if (string.endsWith("\u0000")) {
          string = string.substring(0, string.length() - 1);
        }
        consumer.accept(string);
      } else {
        logger.error(
            EVT_RENDER_RETURNED_THE_FOLLOWING_ERROR_CODE + errorLookup.getLastError() + ".");
      }
    }
    // Ignored, see https://msdn.microsoft.com/en-us/library/windows/desktop/aa385577(v=vs.85).aspx
    return 0;
  }
Exemplo n.º 2
0
  // Evaluates the specified Criteria on the specified flowfile. Clones the
  // specified flow file for each rule that is applied.
  private boolean evaluateCriteria(
      final ProcessSession session,
      final ProcessContext context,
      final Criteria criteria,
      final FlowFile flowfile,
      final Map<FlowFile, List<Rule>> matchedRules) {
    final ComponentLog logger = getLogger();
    final List<Rule> rules = criteria.getRules();

    // consider each rule and hold a copy of the flowfile for each matched rule
    for (final Rule rule : rules) {
      // evaluate the rule
      if (evaluateRule(context, rule, flowfile)) {
        final FlowFile flowfileToUse;

        // determine if we should use the original flow file or clone
        if (FlowFilePolicy.USE_ORIGINAL.equals(criteria.getFlowFilePolicy())
            || matchedRules.isEmpty()) {
          flowfileToUse = flowfile;
        } else {
          // clone the original for this rule
          flowfileToUse = session.clone(flowfile);
        }

        // store the flow file to use when executing this rule
        List<Rule> rulesForFlowFile = matchedRules.get(flowfileToUse);
        if (rulesForFlowFile == null) {
          rulesForFlowFile = new ArrayList<>();
          matchedRules.put(flowfileToUse, rulesForFlowFile);
        }
        rulesForFlowFile.add(rule);

        // log if appropriate
        if (logger.isDebugEnabled()) {
          logger.debug(
              this
                  + " all conditions met for rule '"
                  + rule.getName()
                  + "'. Using flow file - "
                  + flowfileToUse);
        }
      }
    }

    return !matchedRules.isEmpty();
  }
Exemplo n.º 3
0
  // Executes the specified action on the specified flowfile.
  private FlowFile executeActions(
      final ProcessSession session,
      final ProcessContext context,
      final List<Rule> rules,
      final Map<String, Action> defaultActions,
      final FlowFile flowfile) {
    final ComponentLog logger = getLogger();
    final Map<String, Action> actions = new HashMap<>(defaultActions);
    final String ruleName =
        (rules == null || rules.isEmpty()) ? "default" : rules.get(rules.size() - 1).getName();

    // if a rule matched, get its actions and possible overwrite the default ones
    if (rules != null && rules.size() > 0) {
      // combine all rules actions with the default actions... loop through the rules in order, this
      // way
      // subsequent matching rules will take precedence over previously matching rules and default
      // values
      for (final Rule rule : rules) {
        for (final Action action : rule.getActions()) {
          // store the action and overwrite the previous value (from the defaults or a previously
          // matching rule)
          actions.put(action.getAttribute(), action);
        }
      }

      // add an action for the matched rule - when matching multiple rules against
      // the original flowfile (use original) this will leave the last matching
      // rule's name as the value of this attribute. this decision was made since
      // this would be the behavior if they user chained multiple UpdateAttributes
      // together with 'use clone' specified
      final Action matchedRuleAction = new Action();
      matchedRuleAction.setAttribute(getClass().getSimpleName() + ".matchedRule");
      matchedRuleAction.setValue(ruleName);
      actions.put(matchedRuleAction.getAttribute(), matchedRuleAction);
    }

    // attribute values that will be applied to the flow file
    final Map<String, String> attributesToUpdate = new HashMap<>(actions.size());
    final Set<String> attributesToDelete = new HashSet<>(actions.size());

    // go through each action
    for (final Action action : actions.values()) {
      if (!action.getAttribute().equals(DELETE_ATTRIBUTES.getName())) {
        try {
          final String newAttributeValue =
              getPropertyValue(action.getValue(), context)
                  .evaluateAttributeExpressions(flowfile)
                  .getValue();

          // log if appropriate
          if (logger.isDebugEnabled()) {
            logger.debug(
                String.format(
                    "%s setting attribute '%s' = '%s' for %s per rule '%s'.",
                    this, action.getAttribute(), newAttributeValue, flowfile, ruleName));
          }

          attributesToUpdate.put(action.getAttribute(), newAttributeValue);
        } catch (final ProcessException pe) {
          throw new ProcessException(
              String.format(
                  "Unable to evaluate new value for attribute '%s': %s.",
                  action.getAttribute(), pe),
              pe);
        }
      } else {
        try {
          final String actionValue = action.getValue();
          final String regex =
              (actionValue == null)
                  ? null
                  : getPropertyValue(actionValue, context)
                      .evaluateAttributeExpressions(flowfile)
                      .getValue();
          if (regex != null) {
            Pattern pattern = Pattern.compile(regex);
            final Set<String> attributeKeys = flowfile.getAttributes().keySet();
            for (final String key : attributeKeys) {
              if (pattern.matcher(key).matches()) {

                // log if appropriate
                if (logger.isDebugEnabled()) {
                  logger.debug(
                      String.format(
                          "%s deleting attribute '%s' for %s per regex '%s'.",
                          this, key, flowfile, regex));
                }

                attributesToDelete.add(key);
              }
            }
          }
        } catch (final ProcessException pe) {
          throw new ProcessException(
              String.format("Unable to delete attribute '%s': %s.", action.getAttribute(), pe), pe);
        }
      }
    }

    // If the 'alternate.identifier' attribute is added, then we want to create an ADD_INFO
    // provenance event.
    final String alternateIdentifierAdd =
        attributesToUpdate.get(CoreAttributes.ALTERNATE_IDENTIFIER.key());
    if (alternateIdentifierAdd != null) {
      try {
        final URI uri = new URI(alternateIdentifierAdd);
        final String namespace = uri.getScheme();
        if (namespace != null) {
          final String identifier =
              alternateIdentifierAdd.substring(
                  Math.min(namespace.length() + 1, alternateIdentifierAdd.length() - 1));
          session.getProvenanceReporter().associate(flowfile, namespace, identifier);
        }
      } catch (final URISyntaxException e) {
      }
    }

    // update and delete the flowfile attributes
    return session.removeAllAttributes(
        session.putAllAttributes(flowfile, attributesToUpdate), attributesToDelete);
  }
Exemplo n.º 4
0
  protected String processFlowFile(
      final ComponentLog logger,
      final DebugLevels logLevel,
      final FlowFile flowFile,
      final ProcessSession session,
      final ProcessContext context) {
    final Set<String> attributeKeys =
        getAttributesToLog(flowFile.getAttributes().keySet(), context);
    final ComponentLog LOG = getLogger();
    final String dashedLine;

    String logPrefix =
        context.getProperty(LOG_PREFIX).evaluateAttributeExpressions(flowFile).getValue();

    if (StringUtil.isBlank(logPrefix)) {
      dashedLine = StringUtils.repeat('-', 50);
    } else {
      // abbreviate long lines
      logPrefix = StringUtils.abbreviate(logPrefix, 40);
      // center the logPrefix and pad with dashes
      logPrefix = StringUtils.center(logPrefix, 40, '-');
      // five dashes on the left and right side, plus the dashed logPrefix
      dashedLine = StringUtils.repeat('-', 5) + logPrefix + StringUtils.repeat('-', 5);
    }

    // Pretty print metadata
    final StringBuilder message = new StringBuilder();
    message.append("logging for flow file ").append(flowFile);
    message.append("\n");
    message.append(dashedLine);
    message.append("\nStandard FlowFile Attributes");
    message.append(
        String.format(
            "\nKey: '%1$s'\n\tValue: '%2$s'", "entryDate", new Date(flowFile.getEntryDate())));
    message.append(
        String.format(
            "\nKey: '%1$s'\n\tValue: '%2$s'",
            "lineageStartDate", new Date(flowFile.getLineageStartDate())));
    message.append(String.format("\nKey: '%1$s'\n\tValue: '%2$s'", "fileSize", flowFile.getSize()));
    message.append("\nFlowFile Attribute Map Content");
    for (final String key : attributeKeys) {
      message.append(
          String.format("\nKey: '%1$s'\n\tValue: '%2$s'", key, flowFile.getAttribute(key)));
    }
    message.append("\n");
    message.append(dashedLine);

    // The user can request to log the payload
    final boolean logPayload = context.getProperty(LOG_PAYLOAD).asBoolean();
    if (logPayload) {
      message.append("\n");
      if (flowFile.getSize() < ONE_MB) {
        final FlowFilePayloadCallback callback = new FlowFilePayloadCallback();
        session.read(flowFile, callback);
        message.append(callback.getContents());
      } else {
        message.append("\n Not including payload since it is larger than one mb.");
      }
    }
    final String outputMessage = message.toString().trim();
    // Uses optional property to specify logging level
    switch (logLevel) {
      case info:
        LOG.info(outputMessage);
        break;
      case debug:
        LOG.debug(outputMessage);
        break;
      case warn:
        LOG.warn(outputMessage);
        break;
      case trace:
        LOG.trace(outputMessage);
        break;
      case error:
        LOG.error(outputMessage);
        break;
      default:
        LOG.debug(outputMessage);
    }

    return outputMessage;
  }
Exemplo n.º 5
0
  @Override
  public void onTrigger(final ProcessContext context, final ProcessSession session)
      throws ProcessException {

    FlowFile flowFile = null;
    if (context.hasIncomingConnection()) {
      flowFile = session.get();

      // If we have no FlowFile, and all incoming connections are self-loops then we can continue
      // on.
      // However, if we have no FlowFile and we have connections coming from other Processors, then
      // we know that we should run only if we have a FlowFile.
      if (flowFile == null && context.hasNonLoopConnection()) {
        return;
      }
    }

    OkHttpClient okHttpClient = getClient();

    if (flowFile == null) {
      flowFile = session.create();
    }

    final String index =
        context.getProperty(INDEX).evaluateAttributeExpressions(flowFile).getValue();
    final String docId =
        context.getProperty(DOC_ID).evaluateAttributeExpressions(flowFile).getValue();
    final String docType =
        context.getProperty(TYPE).evaluateAttributeExpressions(flowFile).getValue();
    final String fields =
        context.getProperty(FIELDS).isSet()
            ? context.getProperty(FIELDS).evaluateAttributeExpressions(flowFile).getValue()
            : null;

    // Authentication
    final String username =
        context.getProperty(USERNAME).evaluateAttributeExpressions(flowFile).getValue();
    final String password = context.getProperty(PASSWORD).evaluateAttributeExpressions().getValue();

    final ComponentLog logger = getLogger();

    Response getResponse = null;

    try {
      logger.debug("Fetching {}/{}/{} from Elasticsearch", new Object[] {index, docType, docId});

      // read the url property from the context
      final String urlstr =
          StringUtils.trimToEmpty(
              context.getProperty(ES_URL).evaluateAttributeExpressions().getValue());
      final URL url = buildRequestURL(urlstr, docId, index, docType, fields);
      final long startNanos = System.nanoTime();

      getResponse = sendRequestToElasticsearch(okHttpClient, url, username, password, "GET", null);
      final int statusCode = getResponse.code();

      if (isSuccess(statusCode)) {
        ResponseBody body = getResponse.body();
        final byte[] bodyBytes = body.bytes();
        JsonNode responseJson = parseJsonResponse(new ByteArrayInputStream(bodyBytes));
        boolean found = responseJson.get("found").asBoolean(false);
        String retrievedIndex = responseJson.get("_index").asText();
        String retrievedType = responseJson.get("_type").asText();
        String retrievedId = responseJson.get("_id").asText();

        if (found) {
          JsonNode source = responseJson.get("_source");
          flowFile = session.putAttribute(flowFile, "filename", retrievedId);
          flowFile = session.putAttribute(flowFile, "es.index", retrievedIndex);
          flowFile = session.putAttribute(flowFile, "es.type", retrievedType);
          if (source != null) {
            flowFile =
                session.write(
                    flowFile,
                    out -> {
                      out.write(source.toString().getBytes());
                    });
          }
          logger.debug("Elasticsearch document " + retrievedId + " fetched, routing to success");

          // emit provenance event
          final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
          if (context.hasNonLoopConnection()) {
            session.getProvenanceReporter().fetch(flowFile, url.toExternalForm(), millis);
          } else {
            session.getProvenanceReporter().receive(flowFile, url.toExternalForm(), millis);
          }
          session.transfer(flowFile, REL_SUCCESS);
        } else {
          logger.warn(
              "Failed to read {}/{}/{} from Elasticsearch: Document not found",
              new Object[] {index, docType, docId});

          // We couldn't find the document, so send it to "not found"
          session.transfer(flowFile, REL_NOT_FOUND);
        }
      } else {
        if (statusCode == 404) {
          logger.warn(
              "Failed to read {}/{}/{} from Elasticsearch: Document not found",
              new Object[] {index, docType, docId});

          // We couldn't find the document, so penalize it and send it to "not found"
          session.transfer(flowFile, REL_NOT_FOUND);
        } else {
          // 5xx -> RETRY, but a server error might last a while, so yield
          if (statusCode / 100 == 5) {

            logger.warn(
                "Elasticsearch returned code {} with message {}, transferring flow file to retry. This is likely a server problem, yielding...",
                new Object[] {statusCode, getResponse.message()});
            session.transfer(flowFile, REL_RETRY);
            context.yield();
          } else if (context.hasIncomingConnection()) { // 1xx, 3xx, 4xx -> NO RETRY
            logger.warn(
                "Elasticsearch returned code {} with message {}, transferring flow file to failure",
                new Object[] {statusCode, getResponse.message()});
            session.transfer(flowFile, REL_FAILURE);
          } else {
            logger.warn(
                "Elasticsearch returned code {} with message {}",
                new Object[] {statusCode, getResponse.message()});
            session.remove(flowFile);
          }
        }
      }
    } catch (IOException ioe) {
      logger.error(
          "Failed to read from Elasticsearch due to {}, this may indicate an error in configuration "
              + "(hosts, username/password, etc.). Routing to retry",
          new Object[] {ioe.getLocalizedMessage()},
          ioe);
      if (context.hasIncomingConnection()) {
        session.transfer(flowFile, REL_RETRY);
      } else {
        session.remove(flowFile);
      }
      context.yield();

    } catch (Exception e) {
      logger.error(
          "Failed to read {} from Elasticsearch due to {}",
          new Object[] {flowFile, e.getLocalizedMessage()},
          e);
      if (context.hasIncomingConnection()) {
        session.transfer(flowFile, REL_FAILURE);
      } else {
        session.remove(flowFile);
      }
      context.yield();
    } finally {
      if (getResponse != null) {
        getResponse.close();
      }
    }
  }