예제 #1
0
  public void writePredictionToHDFS(Tuple input, double[] params, double prediction)
      throws Exception {

    try {

      Configuration conf = new Configuration();
      conf.set("fs.defaultFS", topologyConfig.getProperty("hdfs.url"));
      FileSystem fs = FileSystem.get(conf);

      Path pt =
          new Path(
              topologyConfig.getProperty("hdfs.url")
                  + "/tmp/predictions/"
                  + System.currentTimeMillis());

      BufferedWriter br = new BufferedWriter(new OutputStreamWriter(fs.create(pt, true)));

      br.write("Original Event: " + input + "\n");
      br.write("\n");
      br.write("Certification status (from HBase): " + (params[0] == 1 ? "Y" : "N") + "\n");
      br.write("Wage plan (from HBase): " + (params[1] == 1 ? "Miles" : "Hours" + "\n"));
      br.write("Hours logged (from HBase): " + params[2] * 100 + "\n");
      br.write("Miles logged (from HBase): " + params[3] * 1000 + "\n");
      br.write("\n");
      br.write("Is Foggy? (from weather API): " + (params[4] == 1 ? "Y" : "N" + "\n"));
      br.write("Is Rainy? (from weather API): " + (params[5] == 1 ? "Y" : "N" + "\n"));
      br.write("Is Windy? (from weather API): " + (params[6] == 1 ? "Y" : "N" + "\n"));
      br.write("\n");
      br.write("\n");
      br.write("Input to Spark ML model: " + Arrays.toString(params) + "\n");
      br.write("\n");
      br.write("Prediction from Spark ML model: " + prediction + "\n");
      br.flush();
      br.close();

    } catch (Exception e) {
      e.printStackTrace();
    }
  }
예제 #2
0
  private synchronized List<EventBean> execute(
      PreparedStatement preparedStatement, Object[] lookupValuePerStream) {
    if (ExecutionPathDebugLog.isDebugEnabled && log.isInfoEnabled()) {
      log.info(".execute Executing prepared statement '" + preparedStatementText + "'");
    }

    boolean hasJDBCLogging = enableJDBCLogging && jdbcPerfLog.isInfoEnabled();

    // set parameters
    SQLInputParameterContext inputParameterContext = null;
    if (columnTypeConversionHook != null) {
      inputParameterContext = new SQLInputParameterContext();
    }

    int count = 1;
    Object[] parameters = null;
    if (hasJDBCLogging) {
      parameters = new Object[lookupValuePerStream.length];
    }
    for (int i = 0; i < lookupValuePerStream.length; i++) {
      try {
        Object parameter = lookupValuePerStream[i];
        if (ExecutionPathDebugLog.isDebugEnabled && log.isInfoEnabled()) {
          log.info(
              ".execute Setting parameter "
                  + count
                  + " to "
                  + parameter
                  + " typed "
                  + ((parameter == null) ? "null" : parameter.getClass()));
        }

        if (columnTypeConversionHook != null) {
          inputParameterContext.setParameterNumber(i + 1);
          inputParameterContext.setParameterValue(parameter);
          parameter = columnTypeConversionHook.getParameterValue(inputParameterContext);
        }

        setObject(preparedStatement, count, parameter);
        if (parameters != null) {
          parameters[i] = parameter;
        }
      } catch (SQLException ex) {
        throw new EPException("Error setting parameter " + count, ex);
      }

      count++;
    }

    // execute
    ResultSet resultSet;
    if (hasJDBCLogging) {
      long startTimeNS = System.nanoTime();
      long startTimeMS = System.currentTimeMillis();
      try {
        resultSet = preparedStatement.executeQuery();
      } catch (SQLException ex) {
        throw new EPException("Error executing statement '" + preparedStatementText + '\'', ex);
      }
      long endTimeNS = System.nanoTime();
      long endTimeMS = System.currentTimeMillis();
      jdbcPerfLog.info(
          "Statement '"
              + preparedStatementText
              + "' delta nanosec "
              + (endTimeNS - startTimeNS)
              + " delta msec "
              + (endTimeMS - startTimeMS)
              + " parameters "
              + Arrays.toString(parameters));
    } else {
      try {
        resultSet = preparedStatement.executeQuery();
      } catch (SQLException ex) {
        throw new EPException("Error executing statement '" + preparedStatementText + '\'', ex);
      }
    }

    // generate events for result set
    List<EventBean> rows = new LinkedList<EventBean>();
    try {
      SQLColumnValueContext valueContext = null;
      if (columnTypeConversionHook != null) {
        valueContext = new SQLColumnValueContext();
      }

      SQLOutputRowValueContext rowContext = null;
      if (outputRowConversionHook != null) {
        rowContext = new SQLOutputRowValueContext();
      }

      int rowNum = 0;
      while (resultSet.next()) {
        int colNum = 1;
        Map<String, Object> row = new HashMap<String, Object>();
        for (Map.Entry<String, DBOutputTypeDesc> entry : outputTypes.entrySet()) {
          String columnName = entry.getKey();

          Object value;
          DatabaseTypeBinding binding = entry.getValue().getOptionalBinding();
          if (binding != null) {
            value = binding.getValue(resultSet, columnName);
          } else {
            value = resultSet.getObject(columnName);
          }

          if (columnTypeConversionHook != null) {
            valueContext.setColumnName(columnName);
            valueContext.setColumnNumber(colNum);
            valueContext.setColumnValue(value);
            valueContext.setResultSet(resultSet);
            value = columnTypeConversionHook.getColumnValue(valueContext);
          }

          row.put(columnName, value);
          colNum++;
        }

        EventBean eventBeanRow = null;
        if (this.outputRowConversionHook == null) {
          eventBeanRow = eventAdapterService.adapterForTypedMap(row, eventType);
        } else {
          rowContext.setValues(row);
          rowContext.setRowNum(rowNum);
          rowContext.setResultSet(resultSet);
          Object rowData = outputRowConversionHook.getOutputRow(rowContext);
          if (rowData != null) {
            eventBeanRow =
                eventAdapterService.adapterForTypedBean(rowData, (BeanEventType) eventType);
          }
        }

        if (eventBeanRow != null) {
          rows.add(eventBeanRow);
          rowNum++;
        }
      }
    } catch (SQLException ex) {
      throw new EPException(
          "Error reading results for statement '" + preparedStatementText + '\'', ex);
    }

    if (enableJDBCLogging && jdbcPerfLog.isInfoEnabled()) {
      jdbcPerfLog.info("Statement '" + preparedStatementText + "' " + rows.size() + " rows");
    }

    try {
      resultSet.close();
    } catch (SQLException ex) {
      throw new EPException("Error closing statement '" + preparedStatementText + '\'', ex);
    }

    return rows;
  }