Ejemplo n.º 1
0
 private static void processCLIStmt(String query) {
   try {
     cliClient_.executeCLIStmt(query);
   } catch (Exception e) {
     System.err.println("Exception " + e.getMessage());
     e.printStackTrace(System.err);
   }
   return;
 }
Ejemplo n.º 2
0
  private static void processServerQuery(String query) {
    if (!isConnected()) return;

    try {
      cliClient_.executeQueryOnServer(query);
    } catch (Exception e) {
      System.err.println("Exception " + e.getMessage());
      e.printStackTrace(System.err);
    }
    return;
  }
Ejemplo n.º 3
0
  public int processReader(BufferedReader r) {
    String line;
    StringBuffer qsb = new StringBuffer();

    try {
      while ((line = r.readLine()) != null) {
        qsb.append(line + "\n");
      }
    } catch (IOException e) {
      e.printStackTrace();
    }

    try {
      int rrr = (processLine(qsb.toString()));
      return rrr;
    } catch (Exception e) {
      e.printStackTrace();
      return 1;
    }
  }
Ejemplo n.º 4
0
 public static void include_file(String file) {
   try {
     MRQLParser old_parser = parser;
     boolean old_interactive = Config.interactive;
     try {
       parser = new MRQLParser(new MRQLLex(new FileInputStream(file)));
     } catch (Exception e) {
       Path path = new Path(file);
       FileSystem fs = path.getFileSystem(conf);
       parser = new MRQLParser(new MRQLLex(fs.open(path)));
     }
     ;
     Config.interactive = false;
     parser.parse();
     Config.interactive = old_interactive;
     parser = old_parser;
   } catch (Exception ex) {
     ex.printStackTrace(System.err);
     throw new Error(ex);
   }
 }
Ejemplo n.º 5
0
  // Establish a thrift connection to cassandra instance
  public static void connect(String server, int port) {
    TSocket socket = new TSocket(server, port);

    if (transport_ != null) transport_.close();

    transport_ = socket;

    TBinaryProtocol binaryProtocol = new TBinaryProtocol(transport_, false, false);
    Cassandra.Client cassandraClient = new Cassandra.Client(binaryProtocol);

    try {
      transport_.open();
    } catch (Exception e) {
      // Should move this to Log4J as well probably...
      System.err.println("Exception " + e.getMessage());
      e.printStackTrace();
    }

    thriftClient_ = cassandraClient;
    cliClient_ = new CliClient(css_, thriftClient_);

    css_.out.printf("Connected to %s/%d\n", server, port);
  }
Ejemplo n.º 6
0
 public static void main(String[] args) throws Exception {
   Config.hadoop_mode = false;
   if (args.length == 2 && args[0].equals("args")) // needed for mrql.flink script
   args = args[1].substring(1).split("!");
   for (String arg : args) {
     Config.hadoop_mode |= arg.equals("-local") || arg.equals("-dist");
     Config.bsp_mode |= arg.equals("-bsp");
     Config.spark_mode |= arg.equals("-spark");
     Config.flink_mode |= arg.equals("-flink");
   }
   ;
   Config.map_reduce_mode = !Config.bsp_mode && !Config.spark_mode && !Config.flink_mode;
   initialize_evaluator();
   if (Config.hadoop_mode) {
     conf = Evaluator.evaluator.new_configuration();
     GenericOptionsParser gop = new GenericOptionsParser(conf, args);
     conf = gop.getConfiguration();
     args = gop.getRemainingArgs();
   }
   ;
   Config.parse_args(args, conf);
   Config.hadoop_mode = Config.local_mode || Config.distributed_mode;
   if (!Config.info) {
     for (Enumeration en = LogManager.getCurrentLoggers(); en.hasMoreElements(); )
       ((Logger) en.nextElement()).setLevel(Level.WARN);
     LogManager.getRootLogger().setLevel(Level.WARN);
   }
   ;
   Evaluator.evaluator.init(conf);
   new TopLevel();
   System.out.print("Apache MRQL version " + version + " (");
   if (Config.compile_functional_arguments) System.out.print("compiled ");
   else System.out.print("interpreted ");
   if (Config.hadoop_mode) {
     if (Config.local_mode) System.out.print("local ");
     else if (Config.distributed_mode) System.out.print("distributed ");
     if (Config.spark_mode) System.out.println("Spark mode using " + Config.nodes + " tasks)");
     else if (Config.flink_mode)
       System.out.println("Flink mode using " + Config.nodes + " tasks)");
     else if (Config.bsp_mode)
       System.out.println("Hama BSP mode over " + Config.nodes + " BSP tasks)");
     else if (Config.nodes > 0)
       System.out.println("Hadoop MapReduce mode with " + Config.nodes + " reducers)");
     else if (!Config.local_mode)
       System.out.println("Hadoop MapReduce mode with 1 reducer, use -nodes to change it)");
     else System.out.println("Hadoop MapReduce mode)");
   } else if (Config.bsp_mode) System.out.println("in-memory BSP mode)");
   else System.out.println("in-memory Java mode)");
   if (Config.interactive) {
     System.out.println("Type quit to exit");
     ConsoleReader reader = new ConsoleReader();
     reader.setBellEnabled(false);
     History history = new History(new File(System.getProperty("user.home") + "/.mrqlhistory"));
     reader.setHistory(history);
     reader.setUseHistory(false);
     try {
       loop:
       while (true) {
         String line = "";
         String s = "";
         try {
           if (Config.hadoop_mode && Config.bsp_mode) Config.write(Plan.conf);
           do {
             s = reader.readLine("> ");
             if (s != null && (s.equals("quit") || s.equals("exit"))) break loop;
             if (s != null) line += " " + s;
           } while (s == null || s.indexOf(";") <= 0);
           line = line.substring(1);
           history.addToHistory(line);
           parser = new MRQLParser(new MRQLLex(new StringReader(line)));
           MRQLLex.reset();
           parser.parse();
         } catch (EOFException x) {
           break;
         } catch (Exception x) {
           if (x.getMessage() != null) System.out.println(x);
         } catch (Error x) {
           System.out.println(x);
         }
       }
     } finally {
       if (Config.hadoop_mode) {
         Plan.clean();
         Evaluator.evaluator.shutdown(Plan.conf);
       }
       ;
       if (Config.compile_functional_arguments) Compiler.clean();
     }
   } else
     try {
       if (Config.hadoop_mode && Config.bsp_mode) Config.write(Plan.conf);
       try {
         parser = new MRQLParser(new MRQLLex(new FileInputStream(query_file)));
       } catch (Exception e) {
         // when the query file is in HDFS
         Path path = new Path(query_file);
         FileSystem fs = path.getFileSystem(conf);
         parser = new MRQLParser(new MRQLLex(fs.open(path)));
       }
       ;
       parser.parse();
     } finally {
       if (Config.hadoop_mode) {
         Plan.clean();
         Evaluator.evaluator.shutdown(Plan.conf);
       }
       ;
       if (Config.compile_functional_arguments) Compiler.clean();
     }
 }
Ejemplo n.º 7
0
  public int processCmd(String cmd) throws TException, HiveException {
    SessionState ss = SessionState.get();
    ss.setiscli(true);

    String cmd_trimmed = cmd.trim();
    String[] tokens = cmd_trimmed.split("\\s+");
    String cmd_1 = cmd_trimmed.substring(tokens[0].length()).trim();
    int ret = 0;

    if (tokens[0].equalsIgnoreCase("delete")) {
      Vector<String> nexttoken = new Vector<String>();
      nexttoken.add("jar");
      nexttoken.add("file");
      nexttoken.add("from");
      if (tokens.length < 2 || !nexttoken.contains(tokens[1].toLowerCase())) {
        String errorMessage =
            "\nif delete resource:\n"
                + "Usage: delete [FILE|JAR] <value> [<value>]*\n"
                + "if delete table rows:\n"
                + "Usage: delete from tableName [where searchCondition]";
        console.printError(errorMessage);
        ret = 1;

        return ret;
      }
    }

    if (tokens[0].equalsIgnoreCase("dfs") || tokens[0].equalsIgnoreCase("zktest")) {
      String errorMessage = "\ntdw hive do not support " + tokens[0].toLowerCase() + " operation\n";
      throw new HiveException(errorMessage);
    }

    if (cmd_trimmed.toLowerCase().equals("quit") || cmd_trimmed.toLowerCase().equals("exit")) {

      System.exit(0);

    } else if (cmd_trimmed.startsWith("!")) {

      String shell_cmd = cmd_trimmed.substring(1);

      try {
        Process executor = Runtime.getRuntime().exec(shell_cmd);
        StreamPrinter outPrinter = new StreamPrinter(executor.getInputStream(), null, ss.out);
        StreamPrinter errPrinter = new StreamPrinter(executor.getErrorStream(), null, ss.err);

        outPrinter.start();
        errPrinter.start();

        ret = executor.waitFor();
        if (ret != 0) {
          console.printError("Command failed with exit code = " + ret);
        }
      } catch (Exception e) {
        console.printError(
            "Exception raised from Shell command " + e.getLocalizedMessage(),
            org.apache.hadoop.util.StringUtils.stringifyException(e));
        ret = 1;
      }

    } else if (tokens[0].toLowerCase().equals("list")) {

      SessionState.ResourceType t;
      if (tokens.length < 2 || (t = SessionState.find_resource_type(tokens[1])) == null) {
        console.printError(
            "Usage: list ["
                + StringUtils.join(SessionState.ResourceType.values(), "|")
                + "] [<value> [<value>]*]");
        ret = 1;
      } else {
        List<String> filter = null;
        if (tokens.length >= 3) {
          System.arraycopy(tokens, 2, tokens, 0, tokens.length - 2);
          filter = Arrays.asList(tokens);
        }
        Set<String> s = ss.list_resource(t, filter);
        if (s != null && !s.isEmpty()) ss.out.println(StringUtils.join(s, "\n"));
      }

    } else {
      CommandProcessor proc = CommandProcessorFactory.get(tokens);
      if (proc != null) {
        if (proc instanceof Driver) {
          Driver qp = (Driver) proc;
          PrintStream out = ss.out;
          long start = System.currentTimeMillis();

          try {
            ret = qp.run(cmd);
          } catch (Exception e1) {
            e1.printStackTrace();
          }
          if (ret != 0) {
            qp.close();
            return ret;
          }

          Vector<String> res = new Vector<String>();
          try {
            while (qp.getResults(res)) {
              for (String r : res) {
                out.println(r);
              }
              res.clear();
              if (out.checkError()) {
                break;
              }
            }
          } catch (IOException e) {
            console.printError(
                "Failed with exception " + e.getClass().getName() + ":" + e.getMessage(),
                "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
            ret = 1;
          }

          int cret = qp.close();
          if (ret == 0) {
            ret = cret;
          }

          long end = System.currentTimeMillis();
          if (end > start) {
            double timeTaken = (double) (end - start) / 1000.0;
            console.printInfo("Time taken: " + timeTaken + " seconds", null);
          }

        } else {
          try {
            ret = proc.run(cmd_1);
          } catch (Exception e) {
            e.printStackTrace();
          }
        }
      }
    }

    return ret;
  }