public void validate(Object obj, Errors errors, MemberService service) {

    Member member = (Member) obj;

    String escapedName = StringEscapeUtils.escapeJava(member.getFullName());
    String escapedPassword = StringEscapeUtils.escapeJavaScript(member.getPassword());
    String escapedEmailAddress = StringEscapeUtils.escapeJavaScript(member.getEmailAddress());
    String escapedTown = StringEscapeUtils.escapeJavaScript(member.getTown());
    String escapedUsername = StringEscapeUtils.escapeJavaScript(member.getUserName());

    if (escapedName.equals("") || escapedName.length() < 10) {
      errors.reject("name", "Fullname shouldn't be empty");
    }
    if (escapedPassword.length() < 10) {
      errors.reject("password", "Password should consists atleast of 10 symbols");
    }
    if (escapedEmailAddress == null || escapedEmailAddress.indexOf("@") < 0) {
      errors.reject("emailaddress", "Email address should be valid");
    }
    if (escapedTown == null || escapedTown.equals("")) {
      errors.reject("town", "Town shouldn't be empty");
    }
    if (escapedUsername == null || escapedUsername.length() < 8) {
      errors.reject("userName", "username should be longer");
    }

    if (service.getMember(escapedUsername) > 0) {
      errors.reject("userNameExists", "user with provided username already exists");
    }
    member.setFullName(escapedName);
    member.setEmailAddress(escapedEmailAddress);
    member.setPassword(escapedPassword);
    member.setTown(escapedTown);
  }
예제 #2
0
  /**
   * Translate antlr internal exceptions to sane flume data flow configuration specific messages.
   */
  @Override
  public String getMessage() {
    if (re instanceof NoViableAltException) {
      NoViableAltException nvae = (NoViableAltException) re;
      String c = StringEscapeUtils.escapeJava("" + (char) nvae.c);
      return "Lexer error at char '"
          + c
          + "' at line "
          + nvae.line
          + " char "
          + nvae.charPositionInLine;
    }

    if (re instanceof MismatchedTokenException) {
      MismatchedTokenException mte = (MismatchedTokenException) re;
      String token = (mte.token == null) ? "\"\"" : mte.token.getText();

      return "Parser error: unexpected '"
          + token
          + "' at position "
          + mte.charPositionInLine
          + " line "
          + mte.line
          + ": '"
          + mte.input
          + "'";
    }

    return "Unknown RecognitionException: " + re.getMessage();
  }
예제 #3
0
  // Configure and run Mapreduce job
  public void runMRJob(
      boolean quiet,
      boolean silent,
      Configuration conf,
      ArrayList<String> D_options,
      String out,
      Logger LOG,
      String field_separator,
      String queue_name,
      String[] args,
      String job,
      Tool tool)
      throws Exception {

    logConsole(quiet, silent, info, "Running Mapreduce job & Calling " + job);

    if (out.equals("-")) {
      // Uncompress results to be able to read to stdout
      D_options.add("-Dmapreduce.output.fileoutputformat.compress=false");
    }

    try {
      conf.set("zk.connect.string", System.getenv("ZK_CONNECT_STRING"));
      conf.setBoolean("mapreduce.output.fileoutputformat.compress", true);
      conf.set("mapred.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
      conf.setInt("mapred.max.split.size", 256 * 1024 * 1024);
      conf.set("logdriver.output.field.separator", field_separator);
      conf.set("mapred.job.queue.name", StringEscapeUtils.escapeJava(queue_name));

      dOpts(D_options, silent, out, conf);

      // Now run JOB and send arguments
      LOG.info("Sending args to " + job + ": {}", args);
      ToolRunner.run(conf, tool, args);
    } catch (IOException e) {
      if (e.toString().contains("Failed to find any Kerberos")) {
        logConsole(true, true, error, "No/bad Kerberos ticket - please authenticate.");
        System.exit(1);
      } else if (e.toString().contains("Permission denied")) {
        logConsole(true, true, error, "Permission denied.");
        System.err.println(
            "; Please go to https://go/itforms and filled out the Hadoop Onboarding Form "
                + "to get access to the requested data.  Paste the following data into the ticket to help with your request:\n"
                + "Error Message"
                + e);
        System.exit(1);
      } else if (e.toString().contains("quota") && e.toString().contains("exceeded")) {
        logConsole(true, true, error, "Disk quota Exceeded.");
        System.exit(1);
      }
      logConsole(
          true,
          true,
          error,
          "\n\tError running mapreduce job." + generalError() + "\n\tCommand stopped");
      e.printStackTrace();
      System.exit(1);
    }
  }
예제 #4
0
 private String format(Event e) {
   Date d = new Date(e.getTimestamp());
   String data =
       String.format(
           "%s %s %s: %s\n",
           DateUtils.asISO8601(d),
           e.getPriority(),
           "log4j",
           StringEscapeUtils.escapeJava(new String(e.getBody())));
   return data;
 }
예제 #5
0
 private void addResultItem(Entry<String, String> mapping, boolean hasMore) {
   switch (getFormat()) {
     case JSON:
       List<String> line =
           Arrays.asList(
               String.format(
                   "\"%s\":\"%s\"",
                   StringEscapeUtils.escapeJava(mapping.getKey()),
                   StringEscapeUtils.escapeJava(mapping.getValue())));
       if (hasMore) {
         line.add("");
       }
       output.addResultItem(line);
       break;
     case TEXT:
       output.addResultItem(
           Arrays.asList(
               String.format(
                   "%s successfully saved as %s", mapping.getKey(), mapping.getValue())));
       break;
     default:
       output.addResultItem(Arrays.asList(mapping.getKey(), mapping.getValue()));
   }
 }
예제 #6
0
  /** 5、字符串的Escape */
  @Test
  public void test5() {
    System.out.println(StringEscapeUtils.escapeCsv("测试测试哦")); // "测试测试哦"
    System.out.println(StringEscapeUtils.escapeCsv("测试,测试哦")); // "\"测试,测试哦\""
    System.out.println(StringEscapeUtils.escapeCsv("测试\n测试哦")); // "\"测试\n测试哦\""

    System.out.println(StringEscapeUtils.escapeHtml("测试测试哦")); // "<p>测试测试哦</p>"
    System.out.println(
        StringEscapeUtils.escapeJava(
            "\"rensaninng\",欢迎您!")); // "\"rensaninng\"\uFF0C\u6B22\u8FCE\u60A8\uFF01"

    // System.out.println(StringEscapeUtils.escapeEcmaScript("测试'测试哦"));//"\u6D4B\u8BD5\'\u6D4B\u8BD5\u54E6"
    System.out.println(
        StringEscapeUtils.escapeXml(
            "<tt>\"bread\" & \"butter\"</tt>")); // "<tt>"bread" &amp; "butter"</tt>"
  }
예제 #7
0
  @Test
  public final void testCtasWithOptions() throws Exception {
    ResultSet res = executeFile("CtasWithOptions.sql");
    res.close();

    ResultSet res2 = executeQuery();
    resultSetToString(res2);
    res2.close();

    TableDesc desc =
        client.getTableDesc(CatalogUtil.normalizeIdentifier(res2.getMetaData().getTableName(1)));
    assertNotNull(desc);
    assertTrue("CSV".equalsIgnoreCase(desc.getMeta().getStoreType()));

    KeyValueSet options = desc.getMeta().getOptions();
    assertNotNull(options);
    assertEquals(
        StringEscapeUtils.escapeJava("\u0001"), options.get(StorageConstants.TEXT_DELIMITER));
  }
예제 #8
0
 /** Agrega un archivo al script */
 private void addFile2Script() {
   if (this.file.exists() && this.file.length() > 0) {
     if (this.script == null) {
       initScript();
     }
     if (this.getCampos() == null) {
       throw new IllegalArgumentException("Campos tiene un valor nulo.");
     }
     String sentencia =
         "LOAD DATA INFILE '"
             + StringEscapeUtils.escapeJava(this.file.getAbsolutePath())
             + "' "
             + this.modo
             + " INTO TABLE  "
             + this.table
             + " ("
             + this.getCampos()
             + ");";
     this.script.addSentencia(sentencia);
   }
 }
 public void checkValidName(final String name) throws KettleException {
   // potentially problematic characters in filesystem/repository metastores
   char[] fileSystemReservedChars =
       new char[] {'\\', '/', ':', '*', '?', '"', '<', '>', '|', '\t', '\r', '\n'};
   if (StringUtils.isBlank(name)) {
     throw new KettleException(
         BaseMessages.getString(
             PKG,
             isSharedDimension()
                 ? "ModelAnnotation.SharedDimensionMissingName.Message"
                 : "ModelAnnotation.ModelAnnotationGroupMissingName.Message"));
   }
   if (StringUtils.indexOfAny(name, fileSystemReservedChars) >= 0
       || StringUtils.startsWith(name, ".")) {
     StringBuilder sb = new StringBuilder();
     boolean first = true;
     for (char ch : fileSystemReservedChars) {
       if (first) {
         first = false;
       } else {
         sb.append(" ");
       }
       if (Character.isWhitespace(ch)) {
         sb.append(StringEscapeUtils.escapeJava(Character.toString(ch)));
       } else {
         sb.append(ch);
       }
     }
     throw new KettleException(
         BaseMessages.getString(
             PKG,
             isSharedDimension()
                 ? "ModelAnnotation.MetaStoreInvalidName.SharedDimension.Message"
                 : "ModelAnnotation.MetaStoreInvalidName.Message",
             sb.toString()));
   }
 }
예제 #10
0
 public static String toJsonString(String value) {
   if (value == null || value.length() <= 0) {
     return "";
   }
   return org.apache.commons.lang.StringEscapeUtils.escapeJava(value);
 }
예제 #11
0
 private String normalize(String str) {
   if (StringUtils.isEmpty(str)) {
     return null;
   }
   return StringEscapeUtils.escapeJava(str.toLowerCase());
 }
예제 #12
0
  // Run Pig Locally
  public void runPigLocal(
      Map<String, String> params,
      String out,
      String tmp,
      final boolean quiet,
      final boolean silent,
      Configuration conf,
      String queue_name,
      String additional_jars,
      File pig_tmp,
      ArrayList<String> D_options,
      String PIG_DIR,
      FileSystem fs)
      throws IllegalArgumentException, IOException {
    // Create temp file on local to hold data to sort
    final File local_tmp = Files.createTempDir();
    local_tmp.deleteOnExit();

    Runtime.getRuntime()
        .addShutdownHook(
            new Thread(
                new Runnable() {
                  @Override
                  public void run() {
                    try {
                      logConsole(quiet, silent, warn, "Deleting tmp files in local tmp");
                      delete(local_tmp);
                    } catch (IOException e) {
                      // TODO Auto-generated catch block
                      e.printStackTrace();
                    }
                  }
                }));

    // Set input parameter for pig job
    params.put("tmpdir", local_tmp.toString() + "/" + tmp);

    // Check for an out of '-', meaning write to stdout
    String pigout;
    if (out.equals("-")) {
      params.put("out", local_tmp + "/" + tmp + "/final");
      pigout = local_tmp + "/" + tmp + "/final";
    } else {
      params.put("out", local_tmp + "/" + StringEscapeUtils.escapeJava(out));
      pigout = StringEscapeUtils.escapeJava(out);
    }

    // Copy the tmp folder from HDFS to the local tmp directory, and delete the remote folder
    fs.copyToLocalFile(true, new Path(tmp), new Path(local_tmp + "/" + tmp));

    try {
      logConsole(quiet, silent, info, "Running PIG Command");
      conf.set("mapred.job.queue.name", queue_name);
      conf.set("pig.additional.jars", additional_jars);
      conf.set("pig.exec.reducers.bytes.per.reducer", Integer.toString(100 * 1000 * 1000));
      conf.set("pig.logfile", pig_tmp.toString());
      conf.set("hadoopversion", "23");
      // PIG temp directory set to be able to delete all temp files/directories
      conf.set("pig.temp.dir", local_tmp.getAbsolutePath());

      // Setting output separator for logdriver
      String DEFAULT_OUTPUT_SEPARATOR = "\t";
      Charset UTF_8 = Charset.forName("UTF-8");
      String outputSeparator =
          conf.get("logdriver.output.field.separator", DEFAULT_OUTPUT_SEPARATOR);
      byte[] bytes = outputSeparator.getBytes(UTF_8);
      if (bytes.length != 1) {
        System.err.println(
            ";******************** The output separator must be a single byte in UTF-8. ******************** ");
        System.exit(1);
      }
      conf.set("logdriver.output.field.separator", Byte.toString(bytes[0]));

      dOpts(D_options, silent, out, conf);

      PigServer pigServer = new PigServer(ExecType.LOCAL, conf);
      UserGroupInformation.setConfiguration(new Configuration(false));
      pigServer.registerScript(PIG_DIR + "/formatAndSortLocal.pg", params);
    } catch (Exception e) {
      e.printStackTrace();
      System.exit(1);
    }

    logConsole(quiet, silent, warn, "PIG Job Completed.");

    if (out.equals("-")) {
      System.out.println(";#################### DATA RESULTS ####################");
      try {
        File results = new File(pigout);
        String[] resultList = results.list();

        // Find the files in the directory, open and printout results
        for (int i = 0; i < resultList.length; i++) {
          if (resultList[i].contains("part-") && !resultList[i].contains(".crc")) {
            BufferedReader br =
                new BufferedReader(new FileReader(new File(pigout + "/" + resultList[i])));
            String line;
            line = br.readLine();
            while (line != null) {
              System.out.println(line);
              line = br.readLine();
            }
            br.close();
          }
        }
        System.out.println(";#################### END OF RESULTS ####################");
      } catch (IOException e) {
        e.printStackTrace();
        System.exit(1);
      }
    } else {
      fs.copyFromLocalFile(
          new Path(local_tmp + "/" + StringEscapeUtils.escapeJava(out)), new Path(pigout));
      System.out.println(
          ";#################### Done. Search results are in " + pigout + " ####################");
    }
  }
예제 #13
0
  // Run Pig Remotely
  public void runPigRemote(
      Map<String, String> params,
      String out,
      String tmp,
      boolean quiet,
      boolean silent,
      Configuration conf,
      String queue_name,
      String additional_jars,
      File pig_tmp,
      ArrayList<String> D_options,
      String PIG_DIR,
      FileSystem fs) {
    // Set input parameter for pig job - calling Pig directly
    params.put("tmpdir", StringEscapeUtils.escapeJava(tmp));

    // Check for an out of '-', meaning write to stdout
    String pigout;
    if (out.equals("-")) {
      params.put("out", tmp + "/final");
      pigout = tmp + "/final";
    } else {
      params.put("out", StringEscapeUtils.escapeJava(out));
      pigout = StringEscapeUtils.escapeJava(out);
    }

    try {
      logConsole(quiet, silent, info, "Running PIG Command");
      conf.set("mapred.job.queue.name", queue_name);
      conf.set("pig.additional.jars", additional_jars);
      conf.set("pig.exec.reducers.bytes.per.reducer", Integer.toString(100 * 1000 * 1000));
      conf.set("pig.logfile", pig_tmp.toString());
      conf.set("hadoopversion", "23");
      // PIG temp directory set to be able to delete all temp files/directories
      conf.set("pig.temp.dir", tmp);

      // Setting output separator for logdriver
      String DEFAULT_OUTPUT_SEPARATOR = "\t";
      Charset UTF_8 = Charset.forName("UTF-8");
      String outputSeparator =
          conf.get("logdriver.output.field.separator", DEFAULT_OUTPUT_SEPARATOR);
      byte[] bytes = outputSeparator.getBytes(UTF_8);
      if (bytes.length != 1) {
        logConsole(true, true, error, "The output separator must be a single byte in UTF-8.");
        System.exit(1);
      }
      conf.set("logdriver.output.field.separator", Byte.toString(bytes[0]));

      dOpts(D_options, silent, out, conf);

      PigServer pigServer = new PigServer(ExecType.MAPREDUCE, conf);
      pigServer.registerScript(PIG_DIR + "/formatAndSort.pg", params);
    } catch (Exception e) {
      e.printStackTrace();
      System.exit(1);
    }

    logConsole(quiet, silent, warn, "PIG Job Completed.");
    if (out.equals("-")) {
      System.out.println(";#################### DATA RESULTS ####################");
      try {
        // Create filter to find files with the results from PIG job
        PathFilter filter =
            new PathFilter() {
              public boolean accept(Path file) {
                return file.getName().contains("part-");
              }
            };

        // Find the files in the directory, open and printout results
        FileStatus[] status = fs.listStatus(new Path(tmp + "/final"), filter);
        for (int i = 0; i < status.length; i++) {
          BufferedReader br =
              new BufferedReader(new InputStreamReader(fs.open(status[i].getPath())));
          String line;
          line = br.readLine();
          while (line != null) {
            System.out.println(line);
            line = br.readLine();
          }
        }
        System.out.println(";#################### END OF RESULTS ####################");
      } catch (IOException e) {
        e.printStackTrace();
        System.exit(1);
      }
    } else {
      System.out.println(
          ";#################### Done. Search results are in " + pigout + " ####################");
    }
  }
예제 #14
0
 public static String escapeJava(String string) {
   return string == null ? null : StringEscapeUtils.escapeJava(string);
 }
예제 #15
0
  @Override
  public final CatalogProtos.TableDescProto getTable(String databaseName, final String tableName)
      throws CatalogException {
    org.apache.hadoop.hive.ql.metadata.Table table = null;
    HiveCatalogStoreClientPool.HiveCatalogStoreClient client = null;
    Path path = null;
    String storeType = null;
    org.apache.tajo.catalog.Schema schema = null;
    KeyValueSet options = null;
    TableStats stats = null;
    PartitionMethodDesc partitions = null;

    //////////////////////////////////
    // set tajo table schema.
    //////////////////////////////////
    try {
      // get hive table schema
      try {
        client = clientPool.getClient();
        table = HiveCatalogUtil.getTable(client.getHiveClient(), databaseName, tableName);
        path = table.getPath();
      } catch (NoSuchObjectException nsoe) {
        throw new UndefinedTableException(tableName);
      } catch (Exception e) {
        throw new TajoInternalError(e);
      }

      // convert HiveCatalogStore field schema into tajo field schema.
      schema = new org.apache.tajo.catalog.Schema();

      List<FieldSchema> fieldSchemaList = table.getCols();
      boolean isPartitionKey = false;
      for (FieldSchema eachField : fieldSchemaList) {
        isPartitionKey = false;

        if (table.getPartitionKeys() != null) {
          for (FieldSchema partitionKey : table.getPartitionKeys()) {
            if (partitionKey.getName().equals(eachField.getName())) {
              isPartitionKey = true;
            }
          }
        }

        if (!isPartitionKey) {
          String fieldName =
              databaseName
                  + CatalogConstants.IDENTIFIER_DELIMITER
                  + tableName
                  + CatalogConstants.IDENTIFIER_DELIMITER
                  + eachField.getName();
          TajoDataTypes.Type dataType =
              HiveCatalogUtil.getTajoFieldType(eachField.getType().toString());
          schema.addColumn(fieldName, dataType);
        }
      }

      // validate field schema.
      HiveCatalogUtil.validateSchema(table);

      stats = new TableStats();
      options = new KeyValueSet();
      options.putAll(table.getParameters());
      options.remove("EXTERNAL");

      Properties properties = table.getMetadata();
      if (properties != null) {
        // set field delimiter
        String fieldDelimiter = "", nullFormat = "";
        if (properties.getProperty(serdeConstants.FIELD_DELIM) != null) {
          fieldDelimiter = properties.getProperty(serdeConstants.FIELD_DELIM);
        } else {
          // if hive table used default row format delimiter, Properties doesn't have it.
          // So, Tajo must set as follows:
          fieldDelimiter = "\u0001";
        }

        // set null format
        if (properties.getProperty(serdeConstants.SERIALIZATION_NULL_FORMAT) != null) {
          nullFormat = properties.getProperty(serdeConstants.SERIALIZATION_NULL_FORMAT);
        } else {
          nullFormat = "\\N";
        }
        options.remove(serdeConstants.SERIALIZATION_NULL_FORMAT);

        // set file output format
        String fileOutputformat =
            properties.getProperty(hive_metastoreConstants.FILE_OUTPUT_FORMAT);
        storeType = HiveCatalogUtil.getStoreType(fileOutputformat);

        if (storeType.equalsIgnoreCase("TEXT")) {
          options.set(
              StorageConstants.TEXT_DELIMITER, StringEscapeUtils.escapeJava(fieldDelimiter));
          options.set(StorageConstants.TEXT_NULL, StringEscapeUtils.escapeJava(nullFormat));
        } else if (storeType.equals("RCFILE")) {
          options.set(StorageConstants.RCFILE_NULL, StringEscapeUtils.escapeJava(nullFormat));
          String serde = properties.getProperty(serdeConstants.SERIALIZATION_LIB);
          if (LazyBinaryColumnarSerDe.class.getName().equals(serde)) {
            options.set(StorageConstants.RCFILE_SERDE, StorageConstants.DEFAULT_BINARY_SERDE);
          } else if (ColumnarSerDe.class.getName().equals(serde)) {
            options.set(StorageConstants.RCFILE_SERDE, StorageConstants.DEFAULT_TEXT_SERDE);
          }
        } else if (storeType.equals("SEQUENCEFILE")) {
          options.set(
              StorageConstants.SEQUENCEFILE_DELIMITER,
              StringEscapeUtils.escapeJava(fieldDelimiter));
          options.set(StorageConstants.SEQUENCEFILE_NULL, StringEscapeUtils.escapeJava(nullFormat));
          String serde = properties.getProperty(serdeConstants.SERIALIZATION_LIB);
          if (LazyBinarySerDe.class.getName().equals(serde)) {
            options.set(StorageConstants.SEQUENCEFILE_SERDE, StorageConstants.DEFAULT_BINARY_SERDE);
          } else if (LazySimpleSerDe.class.getName().equals(serde)) {
            options.set(StorageConstants.SEQUENCEFILE_SERDE, StorageConstants.DEFAULT_TEXT_SERDE);
          }
        }

        // set data size
        long totalSize = 0;
        if (properties.getProperty("totalSize") != null) {
          totalSize = Long.parseLong(properties.getProperty("totalSize"));
        } else {
          try {
            FileSystem fs = path.getFileSystem(conf);
            if (fs.exists(path)) {
              totalSize = fs.getContentSummary(path).getLength();
            }
          } catch (IOException ioe) {
            throw new TajoInternalError(ioe);
          }
        }
        stats.setNumBytes(totalSize);
      }

      // set partition keys
      List<FieldSchema> partitionKeys = table.getPartitionKeys();

      if (null != partitionKeys) {
        org.apache.tajo.catalog.Schema expressionSchema = new org.apache.tajo.catalog.Schema();
        StringBuilder sb = new StringBuilder();
        if (partitionKeys.size() > 0) {
          for (int i = 0; i < partitionKeys.size(); i++) {
            FieldSchema fieldSchema = partitionKeys.get(i);
            TajoDataTypes.Type dataType =
                HiveCatalogUtil.getTajoFieldType(fieldSchema.getType().toString());
            String fieldName =
                databaseName
                    + CatalogConstants.IDENTIFIER_DELIMITER
                    + tableName
                    + CatalogConstants.IDENTIFIER_DELIMITER
                    + fieldSchema.getName();
            expressionSchema.addColumn(new Column(fieldName, dataType));
            if (i > 0) {
              sb.append(",");
            }
            sb.append(fieldSchema.getName());
          }
          partitions =
              new PartitionMethodDesc(
                  databaseName, tableName, PartitionType.COLUMN, sb.toString(), expressionSchema);
        }
      }
    } finally {
      if (client != null) client.release();
    }
    TableMeta meta = new TableMeta(storeType, options);
    TableDesc tableDesc = new TableDesc(databaseName + "." + tableName, schema, meta, path.toUri());
    if (table.getTableType().equals(TableType.EXTERNAL_TABLE)) {
      tableDesc.setExternal(true);
    }
    if (stats != null) {
      tableDesc.setStats(stats);
    }
    if (partitions != null) {
      tableDesc.setPartitionMethod(partitions);
    }
    return tableDesc.getProto();
  }
예제 #16
0
        @Override
        public void textEdited(
            User user, SPath filePath, int offset, String replacedText, String text) {
          /*
           * delete whitespaces from the text because we don't want to count
           * them. that would result in quite a number of counted characters
           * the user actually hasn't written, e.g. when eclipse automatically
           * starts lines with tabs or spaces
           */
          int textLength = StringUtils.deleteWhitespace(text).length();

          EditEvent event = new EditEvent(System.currentTimeMillis(), textLength);

          /*
           * if the edit activity text length exceeds the threshold for
           * possible pastes store this as a possible paste and file it for
           * the user who made that possible paste. Moreover, store the number
           * of characters that were "pasted" or auto generated.
           */
          if (textLength > pasteThreshold) {
            Integer currentPasteCount = pastes.get(user);
            if (currentPasteCount == null) {
              currentPasteCount = 0;
            }
            pastes.put(user, currentPasteCount + 1);

            Integer currentPasteChars = pastesCharCount.get(user);
            if (currentPasteChars == null) {
              currentPasteChars = 0;
            }
            pastesCharCount.put(user, currentPasteChars + textLength);
          }

          if (log.isTraceEnabled()) {
            log.trace(
                String.format(
                    "Received chars written from %s " + "(whitespaces omitted): %s [%s]",
                    user, textLength, StringEscapeUtils.escapeJava(text)));
          }

          if (textLength > 0) {
            if (user.isLocal()) {
              /*
               * accumulate the written chars of the local user and store
               * the time and text length of this Activity
               */
              addToCharsWritten(textLength);
              localEvents.add(event);
            } else {
              /*
               * store all remote text edits for future comparison. As
               * those text edits are remote it needs to be determined,
               * who made the edit and to increase the appropriate edited
               * character count. The total text edit count is increased
               * by one for each TextEditActivity received.
               */
              remoteEvents.add(event);
              Integer currentCharCount = remoteCharCount.get(user);
              if (currentCharCount == null) {
                currentCharCount = 0;
              }
              remoteCharCount.put(user, currentCharCount + textLength);
            }
          }
        }
예제 #17
0
 private String escapeCell(String cell) {
   return "\"" + StringEscapeUtils.escapeJava(cell) + "\"";
 }
예제 #18
0
 protected static String escape(String string) {
   return (string == null) ? "" : StringEscapeUtils.escapeJava(string);
 }
예제 #19
0
  // Initialize Pig Job and call appropriate type of Pig Job (remote/local)
  public void runPig(
      boolean silent,
      boolean quiet,
      long foundresults,
      long size,
      String tmp,
      String out,
      ArrayList<String> D_options,
      String queue_name,
      String date_format,
      String field_separator,
      File pig_tmp,
      FileSystem fs,
      Configuration conf,
      boolean forcelocal,
      boolean forceremote)
      throws Exception {
    // If type of sorting not forced, then choose either local or remote sorting based on size of
    // results
    if (!forceremote && !forcelocal) {
      if (size > 256 * 1024 * 1024) {
        forceremote = true;
      } else {
        forcelocal = true;
      }
    }

    if (forceremote) {
      logConsole(
          quiet,
          silent,
          warn,
          "Found Results="
              + foundresults
              + ". Results are "
              + (100 * size / (1024 * 1024) / 100)
              + " MB. Using remote sort...");
    } else {
      logConsole(
          quiet,
          silent,
          warn,
          "Found Results="
              + foundresults
              + ". Results are "
              + (100 * size / (1024 * 1024) / 100)
              + " MB. Using local sort...");
    }

    String LOGDRIVER_HOME = System.getenv("LOGDRIVER_HOME");

    // Convert field separator to hex used for calling PIG
    char[] chars = new char[field_separator.length()];
    chars = field_separator.toCharArray();
    String field_separator_in_hex = String.format("%1x", (int) chars[0]);

    // Add the required parameters for running pig
    Map<String, String> params = new HashMap<String, String>();
    params.put("dateFormat", StringEscapeUtils.escapeJava(date_format));
    params.put("fs", StringEscapeUtils.escapeJava(field_separator_in_hex));

    // Set variables to be used for calling Pig script
    String PIG_DIR = LOGDRIVER_HOME + "/pig";

    // Get the list of additional jars we'll need for PIG
    String additional_jars = LOGDRIVER_HOME + "/" + getLogdriverJar(LOGDRIVER_HOME);

    if (forceremote) {
      runPigRemote(
          params,
          out,
          tmp,
          quiet,
          silent,
          conf,
          queue_name,
          additional_jars,
          pig_tmp,
          D_options,
          PIG_DIR,
          fs);
    } else {
      runPigLocal(
          params,
          out,
          tmp,
          quiet,
          silent,
          conf,
          queue_name,
          additional_jars,
          pig_tmp,
          D_options,
          PIG_DIR,
          fs);
    }
  }